1 #!/usr/bin/env nix-shell
4 #! nix-shell -p curl cacert
11 # How the refresher works:
13 # For a given list of <targets>:
14 # 1. fetch latest successful '.build` job
15 # 2. fetch oldest evaluation that contained that '.build', extract nixpkgs commit
16 # 3. fetch all the `.build` artifacts from '$out/on-server/' directory
17 # 4. calculate hashes and craft the commit message with the details on
18 # how to upload the result to 'tarballs.nixos.org'
20 scratch_dir
=$
(mktemp
-d)
21 trap 'rm -rf -- "${scratch_dir}"' EXIT
26 $0 [ --commit ] --targets=<target>[,<target>,...]
28 The tool must be ran from the root directory of 'nixpkgs' repository.
31 'refresh-tarballs.bash' script fetches latest bootstrapFiles built
32 by hydra, registers them in 'nixpkgs' and provides commands to
33 upload seed files to 'tarballs.nixos.org'.
35 This is usually done in the following cases:
37 1. Single target fix: current bootstrap files for a single target
38 are problematic for some reason (target-specific bug). In this
39 case we can refresh just that target as:
41 \$ $0 --commit --targets=i686-unknown-linux-gnu
43 2. Routine refresh: all bootstrap files should be refreshed to avoid
44 debugging problems that only occur on very old binaries.
46 \$ $0 --commit --all-targets
48 To get help on uploading refreshed binaries to 'tarballs.nixos.org'
49 please have a look at <maintainers/scripts/bootstrap-files/README.md>.
65 [[ ${#@} -eq 0 ]] && usage
70 aarch64-unknown-linux-gnu
71 aarch64-unknown-linux-musl
72 i686-unknown-linux-gnu
73 x86_64-unknown-linux-gnu
74 x86_64-unknown-linux-musl
81 for t
in "${NATIVE_TARGETS[@]}"; do
82 [[ $t == $target ]] && return 0
88 armv5tel-unknown-linux-gnueabi
89 armv6l-unknown-linux-gnueabihf
90 armv6l-unknown-linux-musleabihf
91 armv7l-unknown-linux-gnueabihf
92 mips64el-unknown-linux-gnuabi64
93 mips64el-unknown-linux-gnuabin32
94 mipsel-unknown-linux-gnu
95 powerpc64-unknown-linux-gnuabielfv2
96 powerpc64le-unknown-linux-gnu
97 riscv64-unknown-linux-gnu
102 for t
in "${CROSS_TARGETS[@]}"; do
103 [[ $t == $target ]] && return 0
109 local restore_path store_path
110 ((${#@} != 2)) && die
"nar_sri_get /path/to/name.nar.xz name"
111 restore_path
="${scratch_dir}/$2"
112 xz
-d < "$1" | nix-store
--restore "${restore_path}"
113 [[ $?
-ne 0 ]] && die
"Failed to unpack '$1'"
115 store_path
=$
(nix-store
--add "${restore_path}")
116 [[ $?
-ne 0 ]] && die
"Failed to add '$restore_path' to store"
117 rm -rf -- "${restore_path}"
119 nix-hash
--to-sri "$(nix-store --query --hash "${store_path}")"
122 # collect passed options
136 # Convert "--targets=a,b,c" to targets=(a b c) bash array.
137 comma_targets
=${arg#--targets=}
138 targets
+=(${comma_targets//,/ })
149 for target
in "${targets[@]}"; do
150 # Native and cross jobsets differ a bit. We'll have to pick the
151 # one based on target name:
152 if is_native
$target; then
154 job
="stdenvBootstrapTools.${target}.build"
155 elif is_cross
$target; then
156 jobset
=nixpkgs
/cross-trunk
157 job
="bootstrapTools.${target}.build"
159 die
"'$target' is not present in either of 'NATIVE_TARGETS' or 'CROSS_TARGETS'. Please add one."
162 # 'nixpkgs' prefix where we will write new tarball hashes
164 *linux
*) nixpkgs_prefix
="pkgs/stdenv/linux" ;;
165 *darwin
*) nixpkgs_prefix
="pkgs/stdenv/darwin" ;;
166 *) die
"don't know where to put '$target'" ;;
169 # We enforce s3 prefix for all targets here. This slightly differs
170 # from manual uploads targets where names were chosen inconsistently.
171 s3_prefix
="stdenv/$target"
173 # resolve 'latest' build to the build 'id', construct the link.
174 latest_build_uri
="https://hydra.nixos.org/job/$jobset/$job/latest"
175 latest_build
="$target.latest-build"
176 info
"Fetching latest successful build from '${latest_build_uri}'"
177 curl
-s -H "Content-Type: application/json" -L "$latest_build_uri" > "$latest_build"
178 [[ $?
-ne 0 ]] && die
"Failed to fetch latest successful build"
179 latest_build_id
=$
(jq
'.id' < "$latest_build")
180 [[ $?
-ne 0 ]] && die
"Did not find 'id' in latest build"
181 build_uri
="https://hydra.nixos.org/build/${latest_build_id}"
183 # We pick oldest jobset evaluation and extract the 'nicpkgs' commit.
185 # We use oldest instead of latest to make the result more stable
186 # across unrelated 'nixpkgs' updates. Ideally two subsequent runs of
187 # this refresher should produce the same output (provided there are
188 # no bootstrapTools updates committed between the two runs).
189 oldest_eval_id
=$
(jq
'.jobsetevals|min' < "$latest_build")
190 [[ $?
-ne 0 ]] && die
"Did not find 'jobsetevals' in latest build"
191 eval_uri
="https://hydra.nixos.org/eval/${oldest_eval_id}"
192 eval_meta
="$target.eval-meta"
193 info
"Fetching oldest eval details from '${eval_uri}' (can take a minute)"
194 curl
-s -H "Content-Type: application/json" -L "${eval_uri}" > "$eval_meta"
195 [[ $?
-ne 0 ]] && die
"Failed to fetch eval metadata"
196 nixpkgs_revision
=$
(jq
--raw-output ".jobsetevalinputs.nixpkgs.revision" < "$eval_meta")
197 [[ $?
-ne 0 ]] && die
"Failed to fetch revision"
199 # Extract the build paths out of the build metadata
200 drvpath
=$
(jq
--raw-output '.drvpath' < "${latest_build}")
201 [[ $?
-ne 0 ]] && die
"Did not find 'drvpath' in latest build"
202 outpath
=$
(jq
--raw-output '.buildoutputs.out.path' < "${latest_build}")
203 [[ $?
-ne 0 ]] && die
"Did not find 'buildoutputs' in latest build"
204 build_timestamp
=$
(jq
--raw-output '.timestamp' < "${latest_build}")
205 [[ $?
-ne 0 ]] && die
"Did not find 'timestamp' in latest build"
206 build_time
=$
(TZ
=UTC LANG
=C
date --date="@${build_timestamp}" --rfc-email)
207 [[ $?
-ne 0 ]] && die
"Failed to format timestamp"
209 info
"Fetching bootstrap tools to calculate hashes from '${outpath}'"
210 nix-store
--realize "$outpath"
211 [[ $?
-ne 0 ]] && die
"Failed to fetch '${outpath}' from hydra"
215 target_file
="${nixpkgs_prefix}/bootstrap-files/${target}.nix"
216 info
"Writing '${target_file}'"
220 # Autogenerated by maintainers/scripts/bootstrap-files/refresh-tarballs.bash as:
221 # $ ./refresh-tarballs.bash --targets=${target}
224 # - nixpkgs revision: ${nixpkgs_revision}
225 # - hydra build: ${latest_build_uri}
226 # - resolved hydra build: ${build_uri}
227 # - instantiated derivation: ${drvpath}
228 # - output directory: ${outpath}
229 # - build time: ${build_time}
232 for p
in "${outpath}/on-server"/*; do
233 fname
=$
(basename "$p")
236 bootstrap-tools.
tar.xz
) attr
=bootstrapTools
;;
237 busybox
) attr
=$fname ;;
238 unpack.nar.xz
) attr
=unpack
;;
239 *) die
"Don't know how to map '$fname' to attribute name. Please update me."
244 if [[ -x "$p" ]]; then
245 executable_arg
="--executable"
246 executable_nix
="executable = true;"
250 if [[ $fname = *.nar.xz
]]; then
251 unpack_nix
="unpack = true;"
252 name_nix
="name = \"${fname%.nar.xz}\";"
253 sri
=$
(nar_sri_get
"$p" "${fname%.nar.xz}")
254 [[ $?
-ne 0 ]] && die
"Failed to get hash of '$p'"
256 sha256
=$
(nix-prefetch-url
$executable_arg --name "$fname" "file://$p")
257 [[ $?
-ne 0 ]] && die
"Failed to get the hash for '$p'"
258 sri
=$
(nix-hash
--to-sri "sha256:$sha256")
259 [[ $?
-ne 0 ]] && die
"Failed to convert '$sha256' hash to an SRI form"
262 # individual file entries
264 $attr = import <nix/fetchurl.nix> {
265 url = "http://tarballs.nixos.org/${s3_prefix}/${nixpkgs_revision}/$fname";
267 [[ -n ${executable_nix} ]] && printf "\n %s" "${executable_nix}"
268 [[ -n ${name_nix} ]] && printf "\n %s" "${name_nix}"
269 [[ -n ${unpack_nix} ]] && printf "\n %s" "${unpack_nix}"
280 target_file_commit_msg
=${target}.commit_message
281 cat > "$target_file_commit_msg" <<EOF
282 ${nixpkgs_prefix}: update ${target} bootstrap-files
284 sha256sum of files to be uploaded:
287 echo "$ sha256sum ${outpath}/on-server/*"
288 sha256sum ${outpath}/on-server/*
291 Suggested commands to upload files to 'tarballs.nixos.org':
293 $ nix-store --realize ${outpath}
294 $ aws s3 cp --recursive --acl public-read ${outpath}/on-server/ s3://nixpkgs-tarballs/${s3_prefix}/${nixpkgs_revision}
295 $ aws s3 cp --recursive s3://nixpkgs-tarballs/${s3_prefix}/${nixpkgs_revision} ./
296 $ sha256sum ${fnames[*]}
297 $ sha256sum ${outpath}/on-server/*
300 cat "$target_file_commit_msg"
301 if [[ $commit == yes ]]; then
302 git commit
"${target_file}" -F "$target_file_commit_msg"
304 info
"DRY RUN: git commit ${target_file} -F $target_file_commit_msg"
306 rm -- "$target_file_commit_msg"
309 rm -- "$latest_build" "$eval_meta"