lol
1#!/usr/bin/env nix-shell
2#! nix-shell --pure
3#! nix-shell -i bash
4#! nix-shell -p curl cacert
5#! nix-shell -p git
6#! nix-shell -p nix
7#! nix-shell -p jq
8
9set -o pipefail
10
11# How the refresher works:
12#
13# For a given list of <targets>:
14# 1. fetch latest successful '.build` job
15# 2. fetch oldest evaluation that contained that '.build', extract nixpkgs commit
16# 3. fetch all the `.build` artifacts from '$out/on-server/' directory
17# 4. calculate hashes and craft the commit message with the details on
18# how to upload the result to 'tarballs.nixos.org'
19
20scratch_dir=$(mktemp -d)
21trap 'rm -rf -- "${scratch_dir}"' EXIT
22
23usage() {
24 cat >&2 <<EOF
25Usage:
26 $0 [ --commit ] --targets=<target>[,<target>,...]
27
28 The tool must be ran from the root directory of 'nixpkgs' repository.
29
30Synopsis:
31 'refresh-tarballs.bash' script fetches latest bootstrapFiles built
32 by hydra, registers them in 'nixpkgs' and provides commands to
33 upload seed files to 'tarballs.nixos.org'.
34
35 This is usually done in the following cases:
36
37 1. Single target fix: current bootstrap files for a single target
38 are problematic for some reason (target-specific bug). In this
39 case we can refresh just that target as:
40
41 \$ $0 --commit --targets=i686-unknown-linux-gnu
42
43 2. Routine refresh: all bootstrap files should be refreshed to avoid
44 debugging problems that only occur on very old binaries.
45
46 \$ $0 --commit --all-targets
47
48To get help on uploading refreshed binaries to 'tarballs.nixos.org'
49please have a look at <maintainers/scripts/bootstrap-files/README.md>.
50EOF
51 exit 1
52}
53
54# log helpers
55
56die() {
57 echo "ERROR: $*" >&2
58 exit 1
59}
60
61info() {
62 echo "INFO: $*" >&2
63}
64
65[[ ${#@} -eq 0 ]] && usage
66
67# known targets
68
69NATIVE_TARGETS=(
70 aarch64-unknown-linux-gnu
71 aarch64-unknown-linux-musl
72 i686-unknown-linux-gnu
73 x86_64-unknown-linux-gnu
74 x86_64-unknown-linux-musl
75 aarch64-apple-darwin
76 x86_64-apple-darwin
77)
78
79is_native() {
80 local t target=$1
81 for t in "${NATIVE_TARGETS[@]}"; do
82 [[ $t == $target ]] && return 0
83 done
84 return 1
85}
86
87CROSS_TARGETS=(
88 armv5tel-unknown-linux-gnueabi
89 armv6l-unknown-linux-gnueabihf
90 armv6l-unknown-linux-musleabihf
91 armv7l-unknown-linux-gnueabihf
92 mips64el-unknown-linux-gnuabi64
93 mips64el-unknown-linux-gnuabin32
94 mipsel-unknown-linux-gnu
95 powerpc64-unknown-linux-gnuabielfv2
96 powerpc64le-unknown-linux-gnu
97 riscv64-unknown-linux-gnu
98)
99
100is_cross() {
101 local t target=$1
102 for t in "${CROSS_TARGETS[@]}"; do
103 [[ $t == $target ]] && return 0
104 done
105 return 1
106}
107
108nar_sri_get() {
109 local restore_path store_path
110 ((${#@} != 2)) && die "nar_sri_get /path/to/name.nar.xz name"
111 restore_path="${scratch_dir}/$2"
112 xz -d < "$1" | nix-store --restore "${restore_path}"
113 [[ $? -ne 0 ]] && die "Failed to unpack '$1'"
114
115 store_path=$(nix-store --add "${restore_path}")
116 [[ $? -ne 0 ]] && die "Failed to add '$restore_path' to store"
117 rm -rf -- "${restore_path}"
118
119 nix-hash --to-sri "$(nix-store --query --hash "${store_path}")"
120}
121
122# collect passed options
123
124targets=()
125commit=no
126
127for arg in "$@"; do
128 case "$arg" in
129 --all-targets)
130 targets+=(
131 ${CROSS_TARGETS[@]}
132 ${NATIVE_TARGETS[@]}
133 )
134 ;;
135 --targets=*)
136 # Convert "--targets=a,b,c" to targets=(a b c) bash array.
137 comma_targets=${arg#--targets=}
138 targets+=(${comma_targets//,/ })
139 ;;
140 --commit)
141 commit=yes
142 ;;
143 *)
144 usage
145 ;;
146 esac
147done
148
149for target in "${targets[@]}"; do
150 # Native and cross jobsets differ a bit. We'll have to pick the
151 # one based on target name:
152 if is_native $target; then
153 jobset=nixpkgs/trunk
154 job="stdenvBootstrapTools.${target}.build"
155 elif is_cross $target; then
156 jobset=nixpkgs/cross-trunk
157 job="bootstrapTools.${target}.build"
158 else
159 die "'$target' is not present in either of 'NATIVE_TARGETS' or 'CROSS_TARGETS'. Please add one."
160 fi
161
162 # 'nixpkgs' prefix where we will write new tarball hashes
163 case "$target" in
164 *linux*) nixpkgs_prefix="pkgs/stdenv/linux" ;;
165 *darwin*) nixpkgs_prefix="pkgs/stdenv/darwin" ;;
166 *) die "don't know where to put '$target'" ;;
167 esac
168
169 # We enforce s3 prefix for all targets here. This slightly differs
170 # from manual uploads targets where names were chosen inconsistently.
171 s3_prefix="stdenv/$target"
172
173 # resolve 'latest' build to the build 'id', construct the link.
174 latest_build_uri="https://hydra.nixos.org/job/$jobset/$job/latest"
175 latest_build="$target.latest-build"
176 info "Fetching latest successful build from '${latest_build_uri}'"
177 curl -s -H "Content-Type: application/json" -L "$latest_build_uri" > "$latest_build"
178 [[ $? -ne 0 ]] && die "Failed to fetch latest successful build"
179 latest_build_id=$(jq '.id' < "$latest_build")
180 [[ $? -ne 0 ]] && die "Did not find 'id' in latest build"
181 build_uri="https://hydra.nixos.org/build/${latest_build_id}"
182
183 # We pick oldest jobset evaluation and extract the 'nicpkgs' commit.
184 #
185 # We use oldest instead of latest to make the result more stable
186 # across unrelated 'nixpkgs' updates. Ideally two subsequent runs of
187 # this refresher should produce the same output (provided there are
188 # no bootstrapTools updates committed between the two runs).
189 oldest_eval_id=$(jq '.jobsetevals|min' < "$latest_build")
190 [[ $? -ne 0 ]] && die "Did not find 'jobsetevals' in latest build"
191 eval_uri="https://hydra.nixos.org/eval/${oldest_eval_id}"
192 eval_meta="$target.eval-meta"
193 info "Fetching oldest eval details from '${eval_uri}' (can take a minute)"
194 curl -s -H "Content-Type: application/json" -L "${eval_uri}" > "$eval_meta"
195 [[ $? -ne 0 ]] && die "Failed to fetch eval metadata"
196 nixpkgs_revision=$(jq --raw-output ".jobsetevalinputs.nixpkgs.revision" < "$eval_meta")
197 [[ $? -ne 0 ]] && die "Failed to fetch revision"
198
199 # Extract the build paths out of the build metadata
200 drvpath=$(jq --raw-output '.drvpath' < "${latest_build}")
201 [[ $? -ne 0 ]] && die "Did not find 'drvpath' in latest build"
202 outpath=$(jq --raw-output '.buildoutputs.out.path' < "${latest_build}")
203 [[ $? -ne 0 ]] && die "Did not find 'buildoutputs' in latest build"
204 build_timestamp=$(jq --raw-output '.timestamp' < "${latest_build}")
205 [[ $? -ne 0 ]] && die "Did not find 'timestamp' in latest build"
206 build_time=$(TZ=UTC LANG=C date --date="@${build_timestamp}" --rfc-email)
207 [[ $? -ne 0 ]] && die "Failed to format timestamp"
208
209 info "Fetching bootstrap tools to calculate hashes from '${outpath}'"
210 nix-store --realize "$outpath"
211 [[ $? -ne 0 ]] && die "Failed to fetch '${outpath}' from hydra"
212
213 fnames=()
214
215 target_file="${nixpkgs_prefix}/bootstrap-files/${target}.nix"
216 info "Writing '${target_file}'"
217 {
218 # header
219 cat <<EOF
220# Autogenerated by maintainers/scripts/bootstrap-files/refresh-tarballs.bash as:
221# $ ./refresh-tarballs.bash --targets=${target}
222#
223# Metadata:
224# - nixpkgs revision: ${nixpkgs_revision}
225# - hydra build: ${latest_build_uri}
226# - resolved hydra build: ${build_uri}
227# - instantiated derivation: ${drvpath}
228# - output directory: ${outpath}
229# - build time: ${build_time}
230{
231EOF
232 for p in "${outpath}/on-server"/*; do
233 fname=$(basename "$p")
234 fnames+=("$fname")
235 case "$fname" in
236 bootstrap-tools.tar.xz) attr=bootstrapTools ;;
237 busybox) attr=$fname ;;
238 unpack.nar.xz) attr=unpack ;;
239 *) die "Don't know how to map '$fname' to attribute name. Please update me."
240 esac
241
242 executable_arg=
243 executable_nix=
244 if [[ -x "$p" ]]; then
245 executable_arg="--executable"
246 executable_nix="executable = true;"
247 fi
248 unpack_nix=
249 name_nix=
250 if [[ $fname = *.nar.xz ]]; then
251 unpack_nix="unpack = true;"
252 name_nix="name = \"${fname%.nar.xz}\";"
253 sri=$(nar_sri_get "$p" "${fname%.nar.xz}")
254 [[ $? -ne 0 ]] && die "Failed to get hash of '$p'"
255 else
256 sha256=$(nix-prefetch-url $executable_arg --name "$fname" "file://$p")
257 [[ $? -ne 0 ]] && die "Failed to get the hash for '$p'"
258 sri=$(nix-hash --to-sri "sha256:$sha256")
259 [[ $? -ne 0 ]] && die "Failed to convert '$sha256' hash to an SRI form"
260 fi
261
262 # individual file entries
263 cat <<EOF
264 $attr = import <nix/fetchurl.nix> {
265 url = "http://tarballs.nixos.org/${s3_prefix}/${nixpkgs_revision}/$fname";
266 hash = "${sri}";$(
267 [[ -n ${executable_nix} ]] && printf "\n %s" "${executable_nix}"
268 [[ -n ${name_nix} ]] && printf "\n %s" "${name_nix}"
269 [[ -n ${unpack_nix} ]] && printf "\n %s" "${unpack_nix}"
270)
271 };
272EOF
273 done
274 # footer
275 cat <<EOF
276}
277EOF
278 } > "${target_file}"
279
280 target_file_commit_msg=${target}.commit_message
281 cat > "$target_file_commit_msg" <<EOF
282${nixpkgs_prefix}: update ${target} bootstrap-files
283
284sha256sum of files to be uploaded:
285
286$(
287echo "$ sha256sum ${outpath}/on-server/*"
288sha256sum ${outpath}/on-server/*
289)
290
291Suggested commands to upload files to 'tarballs.nixos.org':
292
293 $ nix-store --realize ${outpath}
294 $ aws s3 cp --recursive --acl public-read ${outpath}/on-server/ s3://nixpkgs-tarballs/${s3_prefix}/${nixpkgs_revision}
295 $ aws s3 cp --recursive s3://nixpkgs-tarballs/${s3_prefix}/${nixpkgs_revision} ./
296 $ sha256sum ${fnames[*]}
297 $ sha256sum ${outpath}/on-server/*
298EOF
299
300 cat "$target_file_commit_msg"
301 if [[ $commit == yes ]]; then
302 git commit "${target_file}" -F "$target_file_commit_msg"
303 else
304 info "DRY RUN: git commit ${target_file} -F $target_file_commit_msg"
305 fi
306 rm -- "$target_file_commit_msg"
307
308 # delete temp files
309 rm -- "$latest_build" "$eval_meta"
310done