nixpkgs mirror (for testing) github.com/NixOS/nixpkgs
nix
at python-updates 223 lines 7.5 kB view raw
1#!/usr/bin/env node 2'use strict' 3 4const fs = require('fs') 5const crypto = require('crypto') 6const process = require('process') 7const https = require('https') 8const child_process = require('child_process') 9const path = require('path') 10const lockfile = require('./yarnpkg-lockfile.js') 11const { promisify } = require('util') 12const url = require('url') 13const { URL } = url; 14const { urlToName } = require('./common.js') 15 16const execFile = promisify(child_process.execFile) 17 18const exec = async (...args) => { 19 const res = await execFile(...args) 20 if (res.error) throw new Error(res.stderr) 21 return res 22} 23 24const downloadFileHttps = (fileName, url, expectedHash, verbose, hashType = 'sha1') => { 25 return new Promise((resolve, reject) => { 26 const get = (url, redirects = 0) => https.get(url, (res) => { 27 if(redirects > 10) { 28 reject('Too many redirects!'); 29 return; 30 } 31 if(res.statusCode === 301 || res.statusCode === 302) { 32 const location = new URL(res.headers.location, url); 33 if (verbose) console.log('following redirect to ' + location); 34 return get(location, redirects + 1); 35 } 36 const file = fs.createWriteStream(fileName) 37 const hash = crypto.createHash(hashType) 38 res.pipe(file) 39 res.pipe(hash).setEncoding('hex') 40 res.on('end', () => { 41 file.close() 42 const h = hash.read() 43 if (expectedHash === undefined){ 44 console.log(`Warning: lockfile url ${url} doesn't end in "#<hash>" to validate against. Downloaded file had hash ${h}.`); 45 } else if (h != expectedHash) return reject(new Error(`hash mismatch, expected ${expectedHash}, got ${h} for ${url}`)) 46 resolve() 47 }) 48 res.on('error', e => reject(e)) 49 }) 50 get(url) 51 }) 52} 53 54const downloadGit = async (fileName, url, rev) => { 55 await exec('nix-prefetch-git', [ 56 '--out', fileName + '.tmp', 57 '--url', url, 58 '--rev', rev, 59 '--builder' 60 ]) 61 62 await exec('tar', [ 63 // hopefully make it reproducible across runs and systems 64 '--owner=0', '--group=0', '--numeric-owner', '--format=gnu', '--sort=name', '--mtime=@1', 65 66 // Set u+w because tar-fs can't unpack archives with read-only dirs: https://github.com/mafintosh/tar-fs/issues/79 67 '--mode', 'u+w', 68 69 '-C', fileName + '.tmp', 70 '-cf', fileName, '.' 71 ]) 72 73 await exec('rm', [ '-rf', fileName + '.tmp', ]) 74} 75 76const isGitUrl = pattern => { 77 // https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/resolvers/exotics/git-resolver.js#L15-L47 78 const GIT_HOSTS = ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org'] 79 const GIT_PATTERN_MATCHERS = [/^git:/, /^git\+.+:/, /^ssh:/, /^https?:.+\.git$/, /^https?:.+\.git#.+/] 80 81 for (const matcher of GIT_PATTERN_MATCHERS) if (matcher.test(pattern)) return true 82 83 const {hostname, path} = url.parse(pattern) 84 if (hostname && path && GIT_HOSTS.indexOf(hostname) >= 0 85 // only if dependency is pointing to a git repo, 86 // e.g. facebook/flow and not file in a git repo facebook/flow/archive/v1.0.0.tar.gz 87 && path.split('/').filter(p => !!p).length === 2 88 ) return true 89 90 return false 91} 92 93const downloadPkg = (pkg, verbose) => { 94 for (let marker of ['@file:', '@link:']) { 95 const split = pkg.key.split(marker) 96 if (split.length == 2) { 97 console.info(`ignoring lockfile entry "${split[0]}" which points at path "${split[1]}"`) 98 return 99 } else if (split.length > 2) { 100 throw new Error(`The lockfile entry key "${pkg.key}" contains "${marker}" more than once. Processing is not implemented.`) 101 } 102 } 103 104 if (pkg.resolved === undefined) { 105 throw new Error(`The lockfile entry with key "${pkg.key}" cannot be downloaded because it is missing the "resolved" attribute, which should contain the URL to download from. The lockfile might be invalid.`) 106 } 107 108 const [ url, hash ] = pkg.resolved.split('#') 109 if (verbose) console.log('downloading ' + url) 110 const fileName = urlToName(url) 111 const s = url.split('/') 112 if (url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')) { 113 return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1]) 114 } else if (url.startsWith('https://github.com/') && url.endsWith('.tar.gz') && 115 ( 116 s.length <= 5 || // https://github.com/owner/repo.tgz#feedface... 117 s[5] == "archive" // https://github.com/owner/repo/archive/refs/tags/v0.220.1.tar.gz 118 )) { 119 return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1].replace(/.tar.gz$/, '')) 120 } else if (isGitUrl(url)) { 121 return downloadGit(fileName, url.replace(/^git\+/, ''), hash) 122 } else if (url.startsWith('https://')) { 123 if (typeof pkg.integrity === 'string' || pkg.integrity instanceof String) { 124 const [ type, checksum ] = pkg.integrity.split('-') 125 return downloadFileHttps(fileName, url, Buffer.from(checksum, 'base64').toString('hex'), verbose, type) 126 } 127 return downloadFileHttps(fileName, url, hash, verbose) 128 } else if (url.startsWith('file:')) { 129 console.warn(`ignoring unsupported file:path url "${url}"`) 130 } else { 131 throw new Error('don\'t know how to download "' + url + '"') 132 } 133} 134 135const performParallel = tasks => { 136 const worker = async () => { 137 while (tasks.length > 0) await tasks.shift()() 138 } 139 140 const workers = [] 141 for (let i = 0; i < 4; i++) { 142 workers.push(worker()) 143 } 144 145 return Promise.all(workers) 146} 147 148// This could be implemented using [`Map.groupBy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/groupBy), 149// but that method is only supported starting with Node 21 150const uniqueBy = (arr, callback) => { 151 const map = new Map() 152 for (const elem of arr) { 153 map.set(callback(elem), elem) 154 } 155 return [...map.values()] 156} 157 158const prefetchYarnDeps = async (lockContents, verbose) => { 159 const lockData = lockfile.parse(lockContents) 160 await performParallel( 161 uniqueBy(Object.entries(lockData.object), ([_, value]) => value.resolved) 162 .map(([key, value]) => () => downloadPkg({ key, ...value }, verbose)) 163 ) 164 await fs.promises.writeFile('yarn.lock', lockContents) 165 if (verbose) console.log('Done') 166} 167 168const showUsage = async () => { 169 process.stderr.write(` 170syntax: prefetch-yarn-deps [path to yarn.lock] [options] 171 172Options: 173 -h --help Show this help 174 -v --verbose Verbose output 175 --builder Only perform the download to current directory, then exit 176`) 177 process.exit(1) 178} 179 180const main = async () => { 181 const args = process.argv.slice(2) 182 let next, lockFile, verbose, isBuilder 183 while (next = args.shift()) { 184 if (next == '--builder') { 185 isBuilder = true 186 } else if (next == '--verbose' || next == '-v') { 187 verbose = true 188 } else if (next == '--help' || next == '-h') { 189 showUsage() 190 } else if (!lockFile) { 191 lockFile = next 192 } else { 193 showUsage() 194 } 195 } 196 let lockContents 197 try { 198 lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8') 199 } catch { 200 showUsage() 201 } 202 203 if (isBuilder) { 204 await prefetchYarnDeps(lockContents, verbose) 205 } else { 206 const { stdout: tmpDir } = await exec('mktemp', [ '-d' ]) 207 208 try { 209 process.chdir(tmpDir.trim()) 210 await prefetchYarnDeps(lockContents, verbose) 211 const { stdout: hash } = await exec('nix-hash', [ '--type', 'sha256', '--base32', tmpDir.trim() ]) 212 console.log(hash) 213 } finally { 214 await exec('rm', [ '-rf', tmpDir.trim() ]) 215 } 216 } 217} 218 219main() 220 .catch(e => { 221 console.error(e) 222 process.exit(1) 223 })