Merge pull request #329823 from ExpidusOS/fix/pkgsllvm/elfutils
[NixPkgs.git] / maintainers / scripts / bootstrap-files / refresh-tarballs.bash
blobeb4aa1667cd7c7b8f9e57f37fa7eac0025ca2ff1
1 #!/usr/bin/env nix-shell
2 #! nix-shell --pure
3 #! nix-shell -i bash
4 #! nix-shell -p curl cacert
5 #! nix-shell -p git
6 #! nix-shell -p nix
7 #! nix-shell -p jq
9 set -o pipefail
11 # How the refresher works:
13 # For a given list of <targets>:
14 # 1. fetch latest successful '.build` job
15 # 2. fetch oldest evaluation that contained that '.build', extract nixpkgs commit
16 # 3. fetch all the `.build` artifacts from '$out/on-server/' directory
17 # 4. calculate hashes and craft the commit message with the details on
18 # how to upload the result to 'tarballs.nixos.org'
20 scratch_dir=$(mktemp -d)
21 trap 'rm -rf -- "${scratch_dir}"' EXIT
23 usage() {
24 cat >&2 <<EOF
25 Usage:
26 $0 [ --commit ] --targets=<target>[,<target>,...]
28 The tool must be ran from the root directory of 'nixpkgs' repository.
30 Synopsis:
31 'refresh-tarballs.bash' script fetches latest bootstrapFiles built
32 by hydra, registers them in 'nixpkgs' and provides commands to
33 upload seed files to 'tarballs.nixos.org'.
35 This is usually done in the following cases:
37 1. Single target fix: current bootstrap files for a single target
38 are problematic for some reason (target-specific bug). In this
39 case we can refresh just that target as:
41 \$ $0 --commit --targets=i686-unknown-linux-gnu
43 2. Routine refresh: all bootstrap files should be refreshed to avoid
44 debugging problems that only occur on very old binaries.
46 \$ $0 --commit --all-targets
48 To get help on uploading refreshed binaries to 'tarballs.nixos.org'
49 please have a look at <maintainers/scripts/bootstrap-files/README.md>.
50 EOF
51 exit 1
54 # log helpers
56 die() {
57 echo "ERROR: $*" >&2
58 exit 1
61 info() {
62 echo "INFO: $*" >&2
65 [[ ${#@} -eq 0 ]] && usage
67 # known targets
69 NATIVE_TARGETS=(
70 aarch64-unknown-linux-gnu
71 aarch64-unknown-linux-musl
72 i686-unknown-linux-gnu
73 x86_64-unknown-linux-gnu
74 x86_64-unknown-linux-musl
75 aarch64-apple-darwin
76 x86_64-apple-darwin
79 is_native() {
80 local t target=$1
81 for t in "${NATIVE_TARGETS[@]}"; do
82 [[ $t == $target ]] && return 0
83 done
84 return 1
87 CROSS_TARGETS=(
88 armv5tel-unknown-linux-gnueabi
89 armv6l-unknown-linux-gnueabihf
90 armv6l-unknown-linux-musleabihf
91 armv7l-unknown-linux-gnueabihf
92 mips64el-unknown-linux-gnuabi64
93 mips64el-unknown-linux-gnuabin32
94 mipsel-unknown-linux-gnu
95 powerpc64-unknown-linux-gnuabielfv2
96 powerpc64le-unknown-linux-gnu
97 riscv64-unknown-linux-gnu
98 x86_64-unknown-freebsd
101 is_cross() {
102 local t target=$1
103 for t in "${CROSS_TARGETS[@]}"; do
104 [[ $t == $target ]] && return 0
105 done
106 return 1
109 nar_sri_get() {
110 local restore_path store_path
111 ((${#@} != 2)) && die "nar_sri_get /path/to/name.nar.xz name"
112 restore_path="${scratch_dir}/$2"
113 xz -d < "$1" | nix-store --restore "${restore_path}"
114 [[ $? -ne 0 ]] && die "Failed to unpack '$1'"
116 store_path=$(nix-store --add "${restore_path}")
117 [[ $? -ne 0 ]] && die "Failed to add '$restore_path' to store"
118 rm -rf -- "${restore_path}"
120 nix-hash --to-sri "$(nix-store --query --hash "${store_path}")"
123 # collect passed options
125 targets=()
126 commit=no
128 for arg in "$@"; do
129 case "$arg" in
130 --all-targets)
131 targets+=(
132 ${CROSS_TARGETS[@]}
133 ${NATIVE_TARGETS[@]}
136 --targets=*)
137 # Convert "--targets=a,b,c" to targets=(a b c) bash array.
138 comma_targets=${arg#--targets=}
139 targets+=(${comma_targets//,/ })
141 --commit)
142 commit=yes
145 usage
147 esac
148 done
150 for target in "${targets[@]}"; do
151 # Native and cross jobsets differ a bit. We'll have to pick the
152 # one based on target name:
153 if is_native $target; then
154 jobset=nixpkgs/trunk
155 job="stdenvBootstrapTools.${target}.build"
156 elif is_cross $target; then
157 jobset=nixpkgs/cross-trunk
158 job="bootstrapTools.${target}.build"
159 else
160 die "'$target' is not present in either of 'NATIVE_TARGETS' or 'CROSS_TARGETS'. Please add one."
163 # 'nixpkgs' prefix where we will write new tarball hashes
164 case "$target" in
165 *linux*) nixpkgs_prefix="pkgs/stdenv/linux" ;;
166 *darwin*) nixpkgs_prefix="pkgs/stdenv/darwin" ;;
167 *freebsd*) nixpkgs_prefix="pkgs/stdenv/freebsd" ;;
168 *) die "don't know where to put '$target'" ;;
169 esac
171 # We enforce s3 prefix for all targets here. This slightly differs
172 # from manual uploads targets where names were chosen inconsistently.
173 s3_prefix="stdenv/$target"
175 # resolve 'latest' build to the build 'id', construct the link.
176 latest_build_uri="https://hydra.nixos.org/job/$jobset/$job/latest"
177 latest_build="$target.latest-build"
178 info "Fetching latest successful build from '${latest_build_uri}'"
179 curl -s -H "Content-Type: application/json" -L "$latest_build_uri" > "$latest_build"
180 [[ $? -ne 0 ]] && die "Failed to fetch latest successful build"
181 latest_build_id=$(jq '.id' < "$latest_build")
182 [[ $? -ne 0 ]] && die "Did not find 'id' in latest build"
183 build_uri="https://hydra.nixos.org/build/${latest_build_id}"
185 # We pick oldest jobset evaluation and extract the 'nicpkgs' commit.
187 # We use oldest instead of latest to make the result more stable
188 # across unrelated 'nixpkgs' updates. Ideally two subsequent runs of
189 # this refresher should produce the same output (provided there are
190 # no bootstrapTools updates committed between the two runs).
191 oldest_eval_id=$(jq '.jobsetevals|min' < "$latest_build")
192 [[ $? -ne 0 ]] && die "Did not find 'jobsetevals' in latest build"
193 eval_uri="https://hydra.nixos.org/eval/${oldest_eval_id}"
194 eval_meta="$target.eval-meta"
195 info "Fetching oldest eval details from '${eval_uri}' (can take a minute)"
196 curl -s -H "Content-Type: application/json" -L "${eval_uri}" > "$eval_meta"
197 [[ $? -ne 0 ]] && die "Failed to fetch eval metadata"
198 nixpkgs_revision=$(jq --raw-output ".jobsetevalinputs.nixpkgs.revision" < "$eval_meta")
199 [[ $? -ne 0 ]] && die "Failed to fetch revision"
201 # Extract the build paths out of the build metadata
202 drvpath=$(jq --raw-output '.drvpath' < "${latest_build}")
203 [[ $? -ne 0 ]] && die "Did not find 'drvpath' in latest build"
204 outpath=$(jq --raw-output '.buildoutputs.out.path' < "${latest_build}")
205 [[ $? -ne 0 ]] && die "Did not find 'buildoutputs' in latest build"
206 build_timestamp=$(jq --raw-output '.timestamp' < "${latest_build}")
207 [[ $? -ne 0 ]] && die "Did not find 'timestamp' in latest build"
208 build_time=$(TZ=UTC LANG=C date --date="@${build_timestamp}" --rfc-email)
209 [[ $? -ne 0 ]] && die "Failed to format timestamp"
211 info "Fetching bootstrap tools to calculate hashes from '${outpath}'"
212 nix-store --realize "$outpath"
213 [[ $? -ne 0 ]] && die "Failed to fetch '${outpath}' from hydra"
215 fnames=()
217 target_file="${nixpkgs_prefix}/bootstrap-files/${target}.nix"
218 info "Writing '${target_file}'"
220 # header
221 cat <<EOF
222 # Autogenerated by maintainers/scripts/bootstrap-files/refresh-tarballs.bash as:
223 # $ ./refresh-tarballs.bash --targets=${target}
225 # Metadata:
226 # - nixpkgs revision: ${nixpkgs_revision}
227 # - hydra build: ${latest_build_uri}
228 # - resolved hydra build: ${build_uri}
229 # - instantiated derivation: ${drvpath}
230 # - output directory: ${outpath}
231 # - build time: ${build_time}
234 for p in "${outpath}/on-server"/*; do
235 fname=$(basename "$p")
236 fnames+=("$fname")
237 case "$fname" in
238 bootstrap-tools.tar.xz) attr=bootstrapTools ;;
239 busybox) attr=$fname ;;
240 unpack.nar.xz) attr=unpack ;;
241 *) die "Don't know how to map '$fname' to attribute name. Please update me."
242 esac
244 executable_arg=
245 executable_nix=
246 if [[ -x "$p" ]]; then
247 executable_arg="--executable"
248 executable_nix="executable = true;"
250 unpack_nix=
251 name_nix=
252 if [[ $fname = *.nar.xz ]]; then
253 unpack_nix="unpack = true;"
254 name_nix="name = \"${fname%.nar.xz}\";"
255 sri=$(nar_sri_get "$p" "${fname%.nar.xz}")
256 [[ $? -ne 0 ]] && die "Failed to get hash of '$p'"
257 else
258 sha256=$(nix-prefetch-url $executable_arg --name "$fname" "file://$p")
259 [[ $? -ne 0 ]] && die "Failed to get the hash for '$p'"
260 sri=$(nix-hash --to-sri "sha256:$sha256")
261 [[ $? -ne 0 ]] && die "Failed to convert '$sha256' hash to an SRI form"
264 # individual file entries
265 cat <<EOF
266 $attr = import <nix/fetchurl.nix> {
267 url = "http://tarballs.nixos.org/${s3_prefix}/${nixpkgs_revision}/$fname";
268 hash = "${sri}";$(
269 [[ -n ${executable_nix} ]] && printf "\n %s" "${executable_nix}"
270 [[ -n ${name_nix} ]] && printf "\n %s" "${name_nix}"
271 [[ -n ${unpack_nix} ]] && printf "\n %s" "${unpack_nix}"
275 done
276 # footer
277 cat <<EOF
280 } > "${target_file}"
282 target_file_commit_msg=${target}.commit_message
283 cat > "$target_file_commit_msg" <<EOF
284 ${nixpkgs_prefix}: update ${target} bootstrap-files
286 sha256sum of files to be uploaded:
289 echo "$ sha256sum ${outpath}/on-server/*"
290 sha256sum ${outpath}/on-server/*
293 Suggested commands to upload files to 'tarballs.nixos.org':
295 $ nix-store --realize ${outpath}
296 $ aws s3 cp --recursive --acl public-read ${outpath}/on-server/ s3://nixpkgs-tarballs/${s3_prefix}/${nixpkgs_revision}
297 $ aws s3 cp --recursive s3://nixpkgs-tarballs/${s3_prefix}/${nixpkgs_revision} ./
298 $ sha256sum ${fnames[*]}
299 $ sha256sum ${outpath}/on-server/*
302 cat "$target_file_commit_msg"
303 if [[ $commit == yes ]]; then
304 git commit "${target_file}" -F "$target_file_commit_msg"
305 else
306 info "DRY RUN: git commit ${target_file} -F $target_file_commit_msg"
308 rm -- "$target_file_commit_msg"
310 # delete temp files
311 rm -- "$latest_build" "$eval_meta"
312 done