nixpkgs mirror (for testing)
github.com/NixOS/nixpkgs
nix
1#!/usr/bin/env nix-shell
2#!nix-shell -i bash -p curl jq git gnused nix-prefetch-github rustup python3 nix prefetch-yarn-deps coreutils nix-prefetch-git
3
4set -euo pipefail
5
6# Some Facebook package make use of nightly Rust features
7rustup toolchain install nightly --force
8
9# Paths
10nixpkgs="$(git rev-parse --show-toplevel)"
11pkgdir="$nixpkgs/pkgs/by-name/sa/sapling"
12pkgfile="$pkgdir/package.nix"
13latest_json="$(curl -s https://api.github.com/repos/facebook/sapling/releases/latest)"
14latest_tag="$(jq -r .tag_name <<<"$latest_json")"
15tarball_url="$(jq -r .tarball_url <<<"$latest_json")"
16
17# Update version
18sed -i -e 's|^ version = "[^"]*";| version = "'"$latest_tag"'";|' "$pkgfile"
19
20# Prefetch source tarball and get unpacked path
21mapfile -t tarball_lines < <(nix-prefetch-url --print-path --unpack "$tarball_url")
22source_dir="${tarball_lines[1]}"
23
24# Update Cargo.lock by running cargo fetch in a writable copy of the source
25tmpdir="$(mktemp -d)"
26trap 'rm -rf "$tmpdir"' EXIT
27cp -R "$source_dir" "$tmpdir/src"
28chmod -R u+w "$tmpdir/src"
29rustup run nightly cargo fetch --manifest-path "$tmpdir/src/eden/scm/Cargo.toml"
30cp "$tmpdir/src/eden/scm/Cargo.lock" "$pkgdir/Cargo.lock"
31
32# Parse Cargo.lock and prefetch git sources
33cargo_output_hashes="$(python3 -c '
34import json
35import subprocess
36import sys
37import tomllib
38
39cargo_lock_path = sys.argv[1]
40
41allowed_packages = {
42 "abomonation",
43 "cloned",
44 "fb303_core",
45 "fbthrift",
46 "serde_bser",
47 "watchman_client"
48}
49
50with open(cargo_lock_path, "rb") as f:
51 lock = tomllib.load(f)
52
53for pkg in lock.get("package", []):
54 source = pkg.get("source", "")
55 if source.startswith("git+"):
56 name = pkg["name"]
57 if name not in allowed_packages:
58 continue
59 version = pkg["version"]
60 # source format: git+https://url?rev#hash
61 parts = source.split("#")
62 if len(parts) == 2:
63 rev = parts[1]
64 url_part = parts[0][4:] # remove git+
65 if "?" in url_part:
66 url = url_part.split("?")[0]
67 else:
68 url = url_part
69 out = subprocess.check_output(
70 ["nix-prefetch-git", "--url", url, "--rev", rev, "--quiet"],
71 text=True
72 )
73 data = json.loads(out)
74 hash_val = data["hash"]
75 print(f" \"{name}-{version}\" = \"{hash_val}\";")
76' "$pkgdir/Cargo.lock")"
77
78# First clear existing hashes
79sed -i '/outputHashes = {/,/};/ {
80 /outputHashes = {/n
81 /};/!d
82}' "$pkgfile"
83
84# Then insert new hashes
85echo "$cargo_output_hashes" > "$tmpdir/hashes.txt"
86sed -i '/outputHashes = {/r '"$tmpdir/hashes.txt" "$pkgfile"
87
88# Prefetch source hash for fetchFromGitHub
89src_hash="$(nix-prefetch-github facebook sapling --rev "$latest_tag" | jq -r '.hash')"
90
91# Update the fetchFromGitHub src block's hash
92sed -i -e '/src = fetchFromGitHub {/,/}/{s|hash = "[^"]*";|hash = "'"$src_hash"'";|}' "$pkgfile"
93
94# Compute yarn offline cache hash without building
95yarn_lock="$source_dir/addons/yarn.lock"
96yarn_hash_raw="$(prefetch-yarn-deps "$yarn_lock")"
97yarn_hash_sri="$(nix hash convert --hash-algo sha256 --to sri "$yarn_hash_raw")"
98sed -i -e '/yarnOfflineCache = fetchYarnDeps {/,/};/{s|sha256 = "[^"]*";|sha256 = "'"$yarn_hash_sri"'";|}' "$pkgfile"