at 25.11-pre 4.9 kB view raw
1 2import json 3from pathlib import Path 4import multiprocessing 5import subprocess 6import sys 7import toml 8from urllib.parse import urlparse 9import yaml 10 11import dag 12 13# This should match the behavior of the default unpackPhase. 14# See https://github.com/NixOS/nixpkgs/blob/59fa082abdbf462515facc8800d517f5728c909d/pkgs/stdenv/generic/setup.sh#L1044 15archive_extensions = [ 16 # xz extensions 17 ".tar.xz", 18 ".tar.lzma", 19 ".txz", 20 21 # *.tar or *.tar.* 22 ".tar", 23 ".tar.Z", 24 ".tar.bz2", 25 ".tar.gz", 26 27 # Other tar extensions 28 ".tgz", 29 ".tbz2", 30 ".tbz", 31 32 ".zip" 33 ] 34 35def get_archive_derivation(uuid, artifact_name, url, sha256, closure_dependencies_dag, dependency_uuids, extra_libs, is_darwin): 36 depends_on = set() 37 if closure_dependencies_dag.has_node(uuid): 38 depends_on = set(closure_dependencies_dag.get_dependencies(uuid)).intersection(dependency_uuids) 39 40 other_libs = extra_libs.get(uuid, []) 41 42 if is_darwin: 43 fixup = f"""fixupPhase = let 44 libs = lib.concatMap (lib.mapAttrsToList (k: v: v.path)) 45 [{" ".join(["uuid-" + x for x in depends_on])}]; 46 in '' 47 48 ''""" 49 else: 50 fixup = f"""fixupPhase = let 51 libs = lib.concatMap (lib.mapAttrsToList (k: v: v.path)) 52 [{" ".join(["uuid-" + x for x in depends_on])}]; 53 in '' 54 find $out -type f -executable -exec \ 55 patchelf --set-rpath \$ORIGIN:\$ORIGIN/../lib:${{lib.makeLibraryPath (["$out" glibc] ++ libs ++ (with pkgs; [{" ".join(other_libs)}]))}} {{}} \; 56 find $out -type f -executable -exec \ 57 patchelf --set-interpreter ${{glibc}}/lib/ld-linux-x86-64.so.2 {{}} \; 58 ''""" 59 60 return f"""stdenv.mkDerivation {{ 61 name = "{artifact_name}"; 62 src = fetchurl {{ 63 url = "{url}"; 64 sha256 = "{sha256}"; 65 }}; 66 preUnpack = '' 67 mkdir unpacked 68 cd unpacked 69 ''; 70 sourceRoot = "."; 71 dontConfigure = true; 72 dontBuild = true; 73 installPhase = "cp -r . $out"; 74 {fixup}; 75 }}""" 76 77def get_plain_derivation(url, sha256): 78 return f"""fetchurl {{ 79 url = "{url}"; 80 sha256 = "{sha256}"; 81 }}""" 82 83def process_item(args): 84 item, julia_path, extract_artifacts_script, closure_dependencies_dag, dependency_uuids, extra_libs, is_darwin = args 85 uuid, src = item 86 lines = [] 87 88 artifacts = toml.loads(subprocess.check_output([julia_path, extract_artifacts_script, uuid, src]).decode()) 89 if not artifacts: 90 return f' uuid-{uuid} = {{}};\n' 91 92 lines.append(f' uuid-{uuid} = {{') 93 94 for artifact_name, details in artifacts.items(): 95 if len(details["download"]) == 0: 96 continue 97 download = details["download"][0] 98 url = download["url"] 99 sha256 = download["sha256"] 100 101 git_tree_sha1 = details["git-tree-sha1"] 102 103 parsed_url = urlparse(url) 104 if any(parsed_url.path.endswith(x) for x in archive_extensions): 105 derivation = get_archive_derivation(uuid, artifact_name, url, sha256, closure_dependencies_dag, dependency_uuids, extra_libs, is_darwin) 106 else: 107 derivation = get_plain_derivation(url, sha256) 108 109 lines.append(f""" "{artifact_name}" = {{ 110 sha1 = "{git_tree_sha1}"; 111 path = {derivation}; 112 }};\n""") 113 114 lines.append(' };\n') 115 116 return "\n".join(lines) 117 118def main(): 119 dependencies_path = Path(sys.argv[1]) 120 closure_yaml_path = Path(sys.argv[2]) 121 julia_path = Path(sys.argv[3]) 122 extract_artifacts_script = Path(sys.argv[4]) 123 extra_libs = json.loads(sys.argv[5]) 124 is_darwin = json.loads(sys.argv[6]) 125 out_path = Path(sys.argv[7]) 126 127 with open(dependencies_path, "r") as f: 128 dependencies = yaml.safe_load(f) 129 dependency_uuids = list(dependencies.keys()) # Convert dict_keys to list 130 131 with open(closure_yaml_path, "r") as f: 132 # Build up a map of UUID -> closure information 133 closure_yaml_list = yaml.safe_load(f) or [] 134 closure_yaml = {} 135 for item in closure_yaml_list: 136 closure_yaml[item["uuid"]] = item 137 138 # Build up a dependency graph of UUIDs 139 closure_dependencies_dag = dag.DAG() 140 for uuid, contents in closure_yaml.items(): 141 if contents.get("depends_on"): 142 closure_dependencies_dag.add_node(uuid, dependencies=contents["depends_on"].values()) 143 144 with open(out_path, "w") as f: 145 if is_darwin: 146 f.write("{ lib, fetchurl, pkgs, stdenv }:\n\n") 147 else: 148 f.write("{ lib, fetchurl, glibc, pkgs, stdenv }:\n\n") 149 150 f.write("rec {\n") 151 152 with multiprocessing.Pool(10) as pool: 153 # Create args tuples for each item 154 process_args = [ 155 (item, julia_path, extract_artifacts_script, closure_dependencies_dag, dependency_uuids, extra_libs, is_darwin) 156 for item in dependencies.items() 157 ] 158 for s in pool.map(process_item, process_args): 159 f.write(s) 160 161 f.write(f""" 162}}\n""") 163 164if __name__ == "__main__": 165 main()