at 22.05-pre 20 kB view raw
1# This file originates from node2nix 2 3{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile}: 4 5let 6 # Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master 7 utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux; 8 9 python = if nodejs ? python then nodejs.python else python2; 10 11 # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise 12 tarWrapper = runCommand "tarWrapper" {} '' 13 mkdir -p $out/bin 14 15 cat > $out/bin/tar <<EOF 16 #! ${stdenv.shell} -e 17 $(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore 18 EOF 19 20 chmod +x $out/bin/tar 21 ''; 22 23 # Function that generates a TGZ file from a NPM project 24 buildNodeSourceDist = 25 { name, version, src, ... }: 26 27 stdenv.mkDerivation { 28 name = "node-tarball-${name}-${version}"; 29 inherit src; 30 buildInputs = [ nodejs ]; 31 buildPhase = '' 32 export HOME=$TMPDIR 33 tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts) 34 ''; 35 installPhase = '' 36 mkdir -p $out/tarballs 37 mv $tgzFile $out/tarballs 38 mkdir -p $out/nix-support 39 echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products 40 ''; 41 }; 42 43 includeDependencies = {dependencies}: 44 lib.optionalString (dependencies != []) 45 (lib.concatMapStrings (dependency: 46 '' 47 # Bundle the dependencies of the package 48 mkdir -p node_modules 49 cd node_modules 50 51 # Only include dependencies if they don't exist. They may also be bundled in the package. 52 if [ ! -e "${dependency.name}" ] 53 then 54 ${composePackage dependency} 55 fi 56 57 cd .. 58 '' 59 ) dependencies); 60 61 # Recursively composes the dependencies of a package 62 composePackage = { name, packageName, src, dependencies ? [], ... }@args: 63 builtins.addErrorContext "while evaluating node package '${packageName}'" '' 64 DIR=$(pwd) 65 cd $TMPDIR 66 67 unpackFile ${src} 68 69 # Make the base dir in which the target dependency resides first 70 mkdir -p "$(dirname "$DIR/${packageName}")" 71 72 if [ -f "${src}" ] 73 then 74 # Figure out what directory has been unpacked 75 packageDir="$(find . -maxdepth 1 -type d | tail -1)" 76 77 # Restore write permissions to make building work 78 find "$packageDir" -type d -exec chmod u+x {} \; 79 chmod -R u+w "$packageDir" 80 81 # Move the extracted tarball into the output folder 82 mv "$packageDir" "$DIR/${packageName}" 83 elif [ -d "${src}" ] 84 then 85 # Get a stripped name (without hash) of the source directory. 86 # On old nixpkgs it's already set internally. 87 if [ -z "$strippedName" ] 88 then 89 strippedName="$(stripHash ${src})" 90 fi 91 92 # Restore write permissions to make building work 93 chmod -R u+w "$strippedName" 94 95 # Move the extracted directory into the output folder 96 mv "$strippedName" "$DIR/${packageName}" 97 fi 98 99 # Unset the stripped name to not confuse the next unpack step 100 unset strippedName 101 102 # Include the dependencies of the package 103 cd "$DIR/${packageName}" 104 ${includeDependencies { inherit dependencies; }} 105 cd .. 106 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 107 ''; 108 109 pinpointDependencies = {dependencies, production}: 110 let 111 pinpointDependenciesFromPackageJSON = writeTextFile { 112 name = "pinpointDependencies.js"; 113 text = '' 114 var fs = require('fs'); 115 var path = require('path'); 116 117 function resolveDependencyVersion(location, name) { 118 if(location == process.env['NIX_STORE']) { 119 return null; 120 } else { 121 var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json"); 122 123 if(fs.existsSync(dependencyPackageJSON)) { 124 var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON)); 125 126 if(dependencyPackageObj.name == name) { 127 return dependencyPackageObj.version; 128 } 129 } else { 130 return resolveDependencyVersion(path.resolve(location, ".."), name); 131 } 132 } 133 } 134 135 function replaceDependencies(dependencies) { 136 if(typeof dependencies == "object" && dependencies !== null) { 137 for(var dependency in dependencies) { 138 var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency); 139 140 if(resolvedVersion === null) { 141 process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n"); 142 } else { 143 dependencies[dependency] = resolvedVersion; 144 } 145 } 146 } 147 } 148 149 /* Read the package.json configuration */ 150 var packageObj = JSON.parse(fs.readFileSync('./package.json')); 151 152 /* Pinpoint all dependencies */ 153 replaceDependencies(packageObj.dependencies); 154 if(process.argv[2] == "development") { 155 replaceDependencies(packageObj.devDependencies); 156 } 157 replaceDependencies(packageObj.optionalDependencies); 158 159 /* Write the fixed package.json file */ 160 fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2)); 161 ''; 162 }; 163 in 164 '' 165 node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"} 166 167 ${lib.optionalString (dependencies != []) 168 '' 169 if [ -d node_modules ] 170 then 171 cd node_modules 172 ${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies} 173 cd .. 174 fi 175 ''} 176 ''; 177 178 # Recursively traverses all dependencies of a package and pinpoints all 179 # dependencies in the package.json file to the versions that are actually 180 # being used. 181 182 pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args: 183 '' 184 if [ -d "${packageName}" ] 185 then 186 cd "${packageName}" 187 ${pinpointDependencies { inherit dependencies production; }} 188 cd .. 189 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 190 fi 191 ''; 192 193 # Extract the Node.js source code which is used to compile packages with 194 # native bindings 195 nodeSources = runCommand "node-sources" {} '' 196 tar --no-same-owner --no-same-permissions -xf ${nodejs.src} 197 mv node-* $out 198 ''; 199 200 # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty) 201 addIntegrityFieldsScript = writeTextFile { 202 name = "addintegrityfields.js"; 203 text = '' 204 var fs = require('fs'); 205 var path = require('path'); 206 207 function augmentDependencies(baseDir, dependencies) { 208 for(var dependencyName in dependencies) { 209 var dependency = dependencies[dependencyName]; 210 211 // Open package.json and augment metadata fields 212 var packageJSONDir = path.join(baseDir, "node_modules", dependencyName); 213 var packageJSONPath = path.join(packageJSONDir, "package.json"); 214 215 if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored 216 console.log("Adding metadata fields to: "+packageJSONPath); 217 var packageObj = JSON.parse(fs.readFileSync(packageJSONPath)); 218 219 if(dependency.integrity) { 220 packageObj["_integrity"] = dependency.integrity; 221 } else { 222 packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads. 223 } 224 225 if(dependency.resolved) { 226 packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided 227 } else { 228 packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories. 229 } 230 231 if(dependency.from !== undefined) { // Adopt from property if one has been provided 232 packageObj["_from"] = dependency.from; 233 } 234 235 fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2)); 236 } 237 238 // Augment transitive dependencies 239 if(dependency.dependencies !== undefined) { 240 augmentDependencies(packageJSONDir, dependency.dependencies); 241 } 242 } 243 } 244 245 if(fs.existsSync("./package-lock.json")) { 246 var packageLock = JSON.parse(fs.readFileSync("./package-lock.json")); 247 248 if(![1, 2].includes(packageLock.lockfileVersion)) { 249 process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n"); 250 process.exit(1); 251 } 252 253 if(packageLock.dependencies !== undefined) { 254 augmentDependencies(".", packageLock.dependencies); 255 } 256 } 257 ''; 258 }; 259 260 # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes 261 reconstructPackageLock = writeTextFile { 262 name = "addintegrityfields.js"; 263 text = '' 264 var fs = require('fs'); 265 var path = require('path'); 266 267 var packageObj = JSON.parse(fs.readFileSync("package.json")); 268 269 var lockObj = { 270 name: packageObj.name, 271 version: packageObj.version, 272 lockfileVersion: 1, 273 requires: true, 274 dependencies: {} 275 }; 276 277 function augmentPackageJSON(filePath, dependencies) { 278 var packageJSON = path.join(filePath, "package.json"); 279 if(fs.existsSync(packageJSON)) { 280 var packageObj = JSON.parse(fs.readFileSync(packageJSON)); 281 dependencies[packageObj.name] = { 282 version: packageObj.version, 283 integrity: "sha1-000000000000000000000000000=", 284 dependencies: {} 285 }; 286 processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies); 287 } 288 } 289 290 function processDependencies(dir, dependencies) { 291 if(fs.existsSync(dir)) { 292 var files = fs.readdirSync(dir); 293 294 files.forEach(function(entry) { 295 var filePath = path.join(dir, entry); 296 var stats = fs.statSync(filePath); 297 298 if(stats.isDirectory()) { 299 if(entry.substr(0, 1) == "@") { 300 // When we encounter a namespace folder, augment all packages belonging to the scope 301 var pkgFiles = fs.readdirSync(filePath); 302 303 pkgFiles.forEach(function(entry) { 304 if(stats.isDirectory()) { 305 var pkgFilePath = path.join(filePath, entry); 306 augmentPackageJSON(pkgFilePath, dependencies); 307 } 308 }); 309 } else { 310 augmentPackageJSON(filePath, dependencies); 311 } 312 } 313 }); 314 } 315 } 316 317 processDependencies("node_modules", lockObj.dependencies); 318 319 fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2)); 320 ''; 321 }; 322 323 prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}: 324 let 325 forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; 326 in 327 '' 328 # Pinpoint the versions of all dependencies to the ones that are actually being used 329 echo "pinpointing versions of dependencies..." 330 source $pinpointDependenciesScriptPath 331 332 # Patch the shebangs of the bundled modules to prevent them from 333 # calling executables outside the Nix store as much as possible 334 patchShebangs . 335 336 # Deploy the Node.js package by running npm install. Since the 337 # dependencies have been provided already by ourselves, it should not 338 # attempt to install them again, which is good, because we want to make 339 # it Nix's responsibility. If it needs to install any dependencies 340 # anyway (e.g. because the dependency parameters are 341 # incomplete/incorrect), it fails. 342 # 343 # The other responsibilities of NPM are kept -- version checks, build 344 # steps, postprocessing etc. 345 346 export HOME=$TMPDIR 347 cd "${packageName}" 348 runHook preRebuild 349 350 ${lib.optionalString bypassCache '' 351 ${lib.optionalString reconstructLock '' 352 if [ -f package-lock.json ] 353 then 354 echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!" 355 echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!" 356 rm package-lock.json 357 else 358 echo "No package-lock.json file found, reconstructing..." 359 fi 360 361 node ${reconstructPackageLock} 362 ''} 363 364 node ${addIntegrityFieldsScript} 365 ''} 366 367 npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild 368 369 if [ "''${dontNpmInstall-}" != "1" ] 370 then 371 # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. 372 rm -f npm-shrinkwrap.json 373 374 npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install 375 fi 376 ''; 377 378 # Builds and composes an NPM package including all its dependencies 379 buildNodePackage = 380 { name 381 , packageName 382 , version 383 , dependencies ? [] 384 , buildInputs ? [] 385 , production ? true 386 , npmFlags ? "" 387 , dontNpmInstall ? false 388 , bypassCache ? false 389 , reconstructLock ? false 390 , preRebuild ? "" 391 , dontStrip ? true 392 , unpackPhase ? "true" 393 , buildPhase ? "true" 394 , meta ? {} 395 , ... }@args: 396 397 let 398 extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ]; 399 in 400 stdenv.mkDerivation ({ 401 name = "${name}-${version}"; 402 buildInputs = [ tarWrapper python nodejs ] 403 ++ lib.optional (stdenv.isLinux) utillinux 404 ++ lib.optional (stdenv.isDarwin) libtool 405 ++ buildInputs; 406 407 inherit nodejs; 408 409 inherit dontStrip; # Stripping may fail a build for some package deployments 410 inherit dontNpmInstall preRebuild unpackPhase buildPhase; 411 412 compositionScript = composePackage args; 413 pinpointDependenciesScript = pinpointDependenciesOfPackage args; 414 415 passAsFile = [ "compositionScript" "pinpointDependenciesScript" ]; 416 417 installPhase = '' 418 # Create and enter a root node_modules/ folder 419 mkdir -p $out/lib/node_modules 420 cd $out/lib/node_modules 421 422 # Compose the package and all its dependencies 423 source $compositionScriptPath 424 425 ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} 426 427 # Create symlink to the deployed executable folder, if applicable 428 if [ -d "$out/lib/node_modules/.bin" ] 429 then 430 ln -s $out/lib/node_modules/.bin $out/bin 431 fi 432 433 # Create symlinks to the deployed manual page folders, if applicable 434 if [ -d "$out/lib/node_modules/${packageName}/man" ] 435 then 436 mkdir -p $out/share 437 for dir in "$out/lib/node_modules/${packageName}/man/"* 438 do 439 mkdir -p $out/share/man/$(basename "$dir") 440 for page in "$dir"/* 441 do 442 ln -s $page $out/share/man/$(basename "$dir") 443 done 444 done 445 fi 446 447 # Run post install hook, if provided 448 runHook postInstall 449 ''; 450 451 meta = { 452 # default to Node.js' platforms 453 platforms = nodejs.meta.platforms; 454 } // meta; 455 } // extraArgs); 456 457 # Builds a node environment (a node_modules folder and a set of binaries) 458 buildNodeDependencies = 459 { name 460 , packageName 461 , version 462 , src 463 , dependencies ? [] 464 , buildInputs ? [] 465 , production ? true 466 , npmFlags ? "" 467 , dontNpmInstall ? false 468 , bypassCache ? false 469 , reconstructLock ? false 470 , dontStrip ? true 471 , unpackPhase ? "true" 472 , buildPhase ? "true" 473 , ... }@args: 474 475 let 476 extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ]; 477 in 478 stdenv.mkDerivation ({ 479 name = "node-dependencies-${name}-${version}"; 480 481 buildInputs = [ tarWrapper python nodejs ] 482 ++ lib.optional (stdenv.isLinux) utillinux 483 ++ lib.optional (stdenv.isDarwin) libtool 484 ++ buildInputs; 485 486 inherit dontStrip; # Stripping may fail a build for some package deployments 487 inherit dontNpmInstall unpackPhase buildPhase; 488 489 includeScript = includeDependencies { inherit dependencies; }; 490 pinpointDependenciesScript = pinpointDependenciesOfPackage args; 491 492 passAsFile = [ "includeScript" "pinpointDependenciesScript" ]; 493 494 installPhase = '' 495 mkdir -p $out/${packageName} 496 cd $out/${packageName} 497 498 source $includeScriptPath 499 500 # Create fake package.json to make the npm commands work properly 501 cp ${src}/package.json . 502 chmod 644 package.json 503 ${lib.optionalString bypassCache '' 504 if [ -f ${src}/package-lock.json ] 505 then 506 cp ${src}/package-lock.json . 507 fi 508 ''} 509 510 # Go to the parent folder to make sure that all packages are pinpointed 511 cd .. 512 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 513 514 ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} 515 516 # Expose the executables that were installed 517 cd .. 518 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 519 520 mv ${packageName} lib 521 ln -s $out/lib/node_modules/.bin $out/bin 522 ''; 523 } // extraArgs); 524 525 # Builds a development shell 526 buildNodeShell = 527 { name 528 , packageName 529 , version 530 , src 531 , dependencies ? [] 532 , buildInputs ? [] 533 , production ? true 534 , npmFlags ? "" 535 , dontNpmInstall ? false 536 , bypassCache ? false 537 , reconstructLock ? false 538 , dontStrip ? true 539 , unpackPhase ? "true" 540 , buildPhase ? "true" 541 , ... }@args: 542 543 let 544 nodeDependencies = buildNodeDependencies args; 545 in 546 stdenv.mkDerivation { 547 name = "node-shell-${name}-${version}"; 548 549 buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs; 550 buildCommand = '' 551 mkdir -p $out/bin 552 cat > $out/bin/shell <<EOF 553 #! ${stdenv.shell} -e 554 $shellHook 555 exec ${stdenv.shell} 556 EOF 557 chmod +x $out/bin/shell 558 ''; 559 560 # Provide the dependencies in a development shell through the NODE_PATH environment variable 561 inherit nodeDependencies; 562 shellHook = lib.optionalString (dependencies != []) '' 563 export NODE_PATH=${nodeDependencies}/lib/node_modules 564 export PATH="${nodeDependencies}/bin:$PATH" 565 ''; 566 }; 567in 568{ 569 buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist; 570 buildNodePackage = lib.makeOverridable buildNodePackage; 571 buildNodeDependencies = lib.makeOverridable buildNodeDependencies; 572 buildNodeShell = lib.makeOverridable buildNodeShell; 573}