1# This file originates from node2nix
2
3{
4 lib,
5 stdenv,
6 nodejs,
7 pkgs,
8 libtool,
9 runCommand,
10 writeTextFile,
11 writeShellScript,
12}:
13
14let
15 inherit (nodejs) python;
16
17 # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
18 tarWrapper = runCommand "tarWrapper" { } ''
19 mkdir -p $out/bin
20
21 cat > $out/bin/tar <<EOF
22 #! ${stdenv.shell} -e
23 $(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
24 EOF
25
26 chmod +x $out/bin/tar
27 '';
28
29 # Function that generates a TGZ file from a NPM project
30 buildNodeSourceDist =
31 {
32 name,
33 version,
34 src,
35 ...
36 }:
37
38 stdenv.mkDerivation {
39 name = "node-tarball-${name}-${version}";
40 inherit src;
41 buildInputs = [ nodejs ];
42 buildPhase = ''
43 export HOME=$TMPDIR
44 tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
45 '';
46 installPhase = ''
47 mkdir -p $out/tarballs
48 mv $tgzFile $out/tarballs
49 mkdir -p $out/nix-support
50 echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
51 '';
52 };
53
54 # Common shell logic
55 installPackage = writeShellScript "install-package" ''
56 installPackage() {
57 local packageName=$1 src=$2
58
59 local strippedName
60
61 local DIR=$PWD
62 cd $TMPDIR
63
64 unpackFile $src
65
66 # Make the base dir in which the target dependency resides first
67 mkdir -p "$(dirname "$DIR/$packageName")"
68
69 if [ -f "$src" ]
70 then
71 # Figure out what directory has been unpacked
72 packageDir="$(find . -maxdepth 1 -type d | tail -1)"
73
74 # Restore write permissions to make building work
75 find "$packageDir" -type d -exec chmod u+x {} \;
76 chmod -R u+w "$packageDir"
77
78 # Move the extracted tarball into the output folder
79 mv "$packageDir" "$DIR/$packageName"
80 elif [ -d "$src" ]
81 then
82 # Get a stripped name (without hash) of the source directory.
83 # On old nixpkgs it's already set internally.
84 if [ -z "$strippedName" ]
85 then
86 strippedName="$(stripHash $src)"
87 fi
88
89 # Restore write permissions to make building work
90 chmod -R u+w "$strippedName"
91
92 # Move the extracted directory into the output folder
93 mv "$strippedName" "$DIR/$packageName"
94 fi
95
96 # Change to the package directory to install dependencies
97 cd "$DIR/$packageName"
98 }
99 '';
100
101 # Bundle the dependencies of the package
102 #
103 # Only include dependencies if they don't exist. They may also be bundled in the package.
104 includeDependencies =
105 { dependencies }:
106 lib.optionalString (dependencies != [ ]) (
107 ''
108 mkdir -p node_modules
109 cd node_modules
110 ''
111 + (lib.concatMapStrings (dependency: ''
112 if [ ! -e "${dependency.packageName}" ]; then
113 ${composePackage dependency}
114 fi
115 '') dependencies)
116 + ''
117 cd ..
118 ''
119 );
120
121 # Recursively composes the dependencies of a package
122 composePackage =
123 {
124 name,
125 packageName,
126 src,
127 dependencies ? [ ],
128 ...
129 }@args:
130 builtins.addErrorContext "while evaluating node package '${packageName}'" ''
131 installPackage "${packageName}" "${src}"
132 ${includeDependencies { inherit dependencies; }}
133 cd ..
134 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
135 '';
136
137 pinpointDependencies =
138 { dependencies, production }:
139 let
140 pinpointDependenciesFromPackageJSON = writeTextFile {
141 name = "pinpointDependencies.js";
142 text = ''
143 var fs = require('fs');
144 var path = require('path');
145
146 function resolveDependencyVersion(location, name) {
147 if(location == process.env['NIX_STORE']) {
148 return null;
149 } else {
150 var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
151
152 if(fs.existsSync(dependencyPackageJSON)) {
153 var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
154
155 if(dependencyPackageObj.name == name) {
156 return dependencyPackageObj.version;
157 }
158 } else {
159 return resolveDependencyVersion(path.resolve(location, ".."), name);
160 }
161 }
162 }
163
164 function replaceDependencies(dependencies) {
165 if(typeof dependencies == "object" && dependencies !== null) {
166 for(var dependency in dependencies) {
167 var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
168
169 if(resolvedVersion === null) {
170 process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
171 } else {
172 dependencies[dependency] = resolvedVersion;
173 }
174 }
175 }
176 }
177
178 /* Read the package.json configuration */
179 var packageObj = JSON.parse(fs.readFileSync('./package.json'));
180
181 /* Pinpoint all dependencies */
182 replaceDependencies(packageObj.dependencies);
183 if(process.argv[2] == "development") {
184 replaceDependencies(packageObj.devDependencies);
185 }
186 else {
187 packageObj.devDependencies = {};
188 }
189 replaceDependencies(packageObj.optionalDependencies);
190 replaceDependencies(packageObj.peerDependencies);
191
192 /* Write the fixed package.json file */
193 fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
194 '';
195 };
196 in
197 ''
198 node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
199
200 ${lib.optionalString (dependencies != [ ]) ''
201 if [ -d node_modules ]
202 then
203 cd node_modules
204 ${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
205 cd ..
206 fi
207 ''}
208 '';
209
210 # Recursively traverses all dependencies of a package and pinpoints all
211 # dependencies in the package.json file to the versions that are actually
212 # being used.
213
214 pinpointDependenciesOfPackage =
215 {
216 packageName,
217 dependencies ? [ ],
218 production ? true,
219 ...
220 }@args:
221 ''
222 if [ -d "${packageName}" ]
223 then
224 cd "${packageName}"
225 ${pinpointDependencies { inherit dependencies production; }}
226 cd ..
227 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
228 fi
229 '';
230
231 # Extract the Node.js source code which is used to compile packages with
232 # native bindings
233 nodeSources = runCommand "node-sources" { } ''
234 tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
235 mv node-* $out
236 '';
237
238 # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
239 addIntegrityFieldsScript = writeTextFile {
240 name = "addintegrityfields.js";
241 text = ''
242 var fs = require('fs');
243 var path = require('path');
244
245 function augmentDependencies(baseDir, dependencies) {
246 for(var dependencyName in dependencies) {
247 var dependency = dependencies[dependencyName];
248
249 // Open package.json and augment metadata fields
250 var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
251 var packageJSONPath = path.join(packageJSONDir, "package.json");
252
253 if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
254 console.log("Adding metadata fields to: "+packageJSONPath);
255 var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
256
257 if(dependency.integrity) {
258 packageObj["_integrity"] = dependency.integrity;
259 } else {
260 packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
261 }
262
263 if(dependency.resolved) {
264 packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
265 } else {
266 packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
267 }
268
269 if(dependency.from !== undefined) { // Adopt from property if one has been provided
270 packageObj["_from"] = dependency.from;
271 }
272
273 fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
274 }
275
276 // Augment transitive dependencies
277 if(dependency.dependencies !== undefined) {
278 augmentDependencies(packageJSONDir, dependency.dependencies);
279 }
280 }
281 }
282
283 if(fs.existsSync("./package-lock.json")) {
284 var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
285
286 if(![1, 2].includes(packageLock.lockfileVersion)) {
287 process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
288 process.exit(1);
289 }
290
291 if(packageLock.dependencies !== undefined) {
292 augmentDependencies(".", packageLock.dependencies);
293 }
294 }
295 '';
296 };
297
298 # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
299 reconstructPackageLock = writeTextFile {
300 name = "reconstructpackagelock.js";
301 text = ''
302 var fs = require('fs');
303 var path = require('path');
304
305 var packageObj = JSON.parse(fs.readFileSync("package.json"));
306
307 var lockObj = {
308 name: packageObj.name,
309 version: packageObj.version,
310 lockfileVersion: 2,
311 requires: true,
312 packages: {
313 "": {
314 name: packageObj.name,
315 version: packageObj.version,
316 license: packageObj.license,
317 bin: packageObj.bin,
318 dependencies: packageObj.dependencies,
319 engines: packageObj.engines,
320 optionalDependencies: packageObj.optionalDependencies
321 }
322 },
323 dependencies: {}
324 };
325
326 function augmentPackageJSON(filePath, packages, dependencies) {
327 var packageJSON = path.join(filePath, "package.json");
328 if(fs.existsSync(packageJSON)) {
329 var packageObj = JSON.parse(fs.readFileSync(packageJSON));
330 packages[filePath] = {
331 version: packageObj.version,
332 integrity: "sha1-000000000000000000000000000=",
333 dependencies: packageObj.dependencies,
334 engines: packageObj.engines,
335 optionalDependencies: packageObj.optionalDependencies
336 };
337 dependencies[packageObj.name] = {
338 version: packageObj.version,
339 integrity: "sha1-000000000000000000000000000=",
340 dependencies: {}
341 };
342 processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
343 }
344 }
345
346 function processDependencies(dir, packages, dependencies) {
347 if(fs.existsSync(dir)) {
348 var files = fs.readdirSync(dir);
349
350 files.forEach(function(entry) {
351 var filePath = path.join(dir, entry);
352 var stats = fs.statSync(filePath);
353
354 if(stats.isDirectory()) {
355 if(entry.substr(0, 1) == "@") {
356 // When we encounter a namespace folder, augment all packages belonging to the scope
357 var pkgFiles = fs.readdirSync(filePath);
358
359 pkgFiles.forEach(function(entry) {
360 if(stats.isDirectory()) {
361 var pkgFilePath = path.join(filePath, entry);
362 augmentPackageJSON(pkgFilePath, packages, dependencies);
363 }
364 });
365 } else {
366 augmentPackageJSON(filePath, packages, dependencies);
367 }
368 }
369 });
370 }
371 }
372
373 processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
374
375 fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
376 '';
377 };
378
379 # Script that links bins defined in package.json to the node_modules bin directory
380 # NPM does not do this for top-level packages itself anymore as of v7
381 linkBinsScript = writeTextFile {
382 name = "linkbins.js";
383 text = ''
384 var fs = require('fs');
385 var path = require('path');
386
387 var packageObj = JSON.parse(fs.readFileSync("package.json"));
388
389 var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
390
391 if(packageObj.bin !== undefined) {
392 fs.mkdirSync(path.join(nodeModules, ".bin"))
393
394 if(typeof packageObj.bin == "object") {
395 Object.keys(packageObj.bin).forEach(function(exe) {
396 if(fs.existsSync(packageObj.bin[exe])) {
397 console.log("linking bin '" + exe + "'");
398 fs.symlinkSync(
399 path.join("..", packageObj.name, packageObj.bin[exe]),
400 path.join(nodeModules, ".bin", exe)
401 );
402 }
403 else {
404 console.log("skipping non-existent bin '" + exe + "'");
405 }
406 })
407 }
408 else {
409 if(fs.existsSync(packageObj.bin)) {
410 console.log("linking bin '" + packageObj.bin + "'");
411 fs.symlinkSync(
412 path.join("..", packageObj.name, packageObj.bin),
413 path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
414 );
415 }
416 else {
417 console.log("skipping non-existent bin '" + packageObj.bin + "'");
418 }
419 }
420 }
421 else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
422 fs.mkdirSync(path.join(nodeModules, ".bin"))
423
424 fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
425 if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
426 console.log("linking bin '" + exe + "'");
427 fs.symlinkSync(
428 path.join("..", packageObj.name, packageObj.directories.bin, exe),
429 path.join(nodeModules, ".bin", exe)
430 );
431 }
432 else {
433 console.log("skipping non-existent bin '" + exe + "'");
434 }
435 })
436 }
437 '';
438 };
439
440 prepareAndInvokeNPM =
441 {
442 packageName,
443 bypassCache,
444 reconstructLock,
445 npmFlags,
446 production,
447 }:
448 let
449 forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
450 in
451 ''
452 # Pinpoint the versions of all dependencies to the ones that are actually being used
453 echo "pinpointing versions of dependencies..."
454 source $pinpointDependenciesScriptPath
455
456 # Patch the shebangs of the bundled modules to prevent them from
457 # calling executables outside the Nix store as much as possible
458 patchShebangs .
459
460 # Deploy the Node.js package by running npm install. Since the
461 # dependencies have been provided already by ourselves, it should not
462 # attempt to install them again, which is good, because we want to make
463 # it Nix's responsibility. If it needs to install any dependencies
464 # anyway (e.g. because the dependency parameters are
465 # incomplete/incorrect), it fails.
466 #
467 # The other responsibilities of NPM are kept -- version checks, build
468 # steps, postprocessing etc.
469
470 export HOME=$TMPDIR
471 cd "${packageName}"
472 runHook preRebuild
473
474 ${lib.optionalString bypassCache ''
475 ${lib.optionalString reconstructLock ''
476 if [ -f package-lock.json ]
477 then
478 echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
479 echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
480 rm package-lock.json
481 else
482 echo "No package-lock.json file found, reconstructing..."
483 fi
484
485 node ${reconstructPackageLock}
486 ''}
487
488 node ${addIntegrityFieldsScript}
489 ''}
490
491 npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
492
493 runHook postRebuild
494
495 if [ "''${dontNpmInstall-}" != "1" ]
496 then
497 # NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
498 rm -f npm-shrinkwrap.json
499
500 npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
501 fi
502
503 # Link executables defined in package.json
504 node ${linkBinsScript}
505 '';
506
507 # Builds and composes an NPM package including all its dependencies
508 buildNodePackage =
509 {
510 name,
511 packageName,
512 version ? null,
513 dependencies ? [ ],
514 buildInputs ? [ ],
515 production ? true,
516 npmFlags ? "",
517 dontNpmInstall ? false,
518 bypassCache ? false,
519 reconstructLock ? false,
520 preRebuild ? "",
521 dontStrip ? true,
522 unpackPhase ? "true",
523 buildPhase ? "true",
524 meta ? { },
525 ...
526 }@args:
527
528 let
529 extraArgs = removeAttrs args [
530 "name"
531 "dependencies"
532 "buildInputs"
533 "dontStrip"
534 "dontNpmInstall"
535 "preRebuild"
536 "unpackPhase"
537 "buildPhase"
538 "meta"
539 ];
540 in
541 stdenv.mkDerivation (
542 {
543 name = "${name}${if version == null then "" else "-${version}"}";
544 buildInputs =
545 [
546 tarWrapper
547 python
548 nodejs
549 ]
550 ++ lib.optional (stdenv.hostPlatform.isLinux) pkgs.util-linux
551 ++ lib.optional (stdenv.hostPlatform.isDarwin) libtool
552 ++ buildInputs;
553
554 inherit nodejs;
555
556 inherit dontStrip; # Stripping may fail a build for some package deployments
557 inherit
558 dontNpmInstall
559 preRebuild
560 unpackPhase
561 buildPhase
562 ;
563
564 compositionScript = composePackage args;
565 pinpointDependenciesScript = pinpointDependenciesOfPackage args;
566
567 passAsFile = [
568 "compositionScript"
569 "pinpointDependenciesScript"
570 ];
571
572 installPhase = ''
573 source ${installPackage}
574
575 # Create and enter a root node_modules/ folder
576 mkdir -p $out/lib/node_modules
577 cd $out/lib/node_modules
578
579 # Compose the package and all its dependencies
580 source $compositionScriptPath
581
582 ${prepareAndInvokeNPM {
583 inherit
584 packageName
585 bypassCache
586 reconstructLock
587 npmFlags
588 production
589 ;
590 }}
591
592 # Create symlink to the deployed executable folder, if applicable
593 if [ -d "$out/lib/node_modules/.bin" ]
594 then
595 ln -s $out/lib/node_modules/.bin $out/bin
596
597 # Fixup all executables
598 ls $out/bin/* | while read i
599 do
600 file="$(readlink -f "$i")"
601 chmod u+rwx "$file"
602 if isScript "$file"
603 then
604 sed -i 's/\r$//' "$file" # convert crlf to lf
605 fi
606 done
607 fi
608
609 # Create symlinks to the deployed manual page folders, if applicable
610 if [ -d "$out/lib/node_modules/${packageName}/man" ]
611 then
612 mkdir -p $out/share
613 for dir in "$out/lib/node_modules/${packageName}/man/"*
614 do
615 mkdir -p $out/share/man/$(basename "$dir")
616 for page in "$dir"/*
617 do
618 ln -s $page $out/share/man/$(basename "$dir")
619 done
620 done
621 fi
622
623 # Run post install hook, if provided
624 runHook postInstall
625 '';
626
627 meta = {
628 # default to Node.js' platforms
629 platforms = nodejs.meta.platforms;
630 } // meta;
631 }
632 // extraArgs
633 );
634
635 # Builds a node environment (a node_modules folder and a set of binaries)
636 buildNodeDependencies =
637 {
638 name,
639 packageName,
640 version ? null,
641 src,
642 dependencies ? [ ],
643 buildInputs ? [ ],
644 production ? true,
645 npmFlags ? "",
646 dontNpmInstall ? false,
647 bypassCache ? false,
648 reconstructLock ? false,
649 dontStrip ? true,
650 unpackPhase ? "true",
651 buildPhase ? "true",
652 ...
653 }@args:
654
655 let
656 extraArgs = removeAttrs args [
657 "name"
658 "dependencies"
659 "buildInputs"
660 ];
661 in
662 stdenv.mkDerivation (
663 {
664 name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
665
666 buildInputs =
667 [
668 tarWrapper
669 python
670 nodejs
671 ]
672 ++ lib.optional (stdenv.hostPlatform.isLinux) pkgs.util-linux
673 ++ lib.optional (stdenv.hostPlatform.isDarwin) libtool
674 ++ buildInputs;
675
676 inherit dontStrip; # Stripping may fail a build for some package deployments
677 inherit dontNpmInstall unpackPhase buildPhase;
678
679 includeScript = includeDependencies { inherit dependencies; };
680 pinpointDependenciesScript = pinpointDependenciesOfPackage args;
681
682 passAsFile = [
683 "includeScript"
684 "pinpointDependenciesScript"
685 ];
686
687 installPhase = ''
688 source ${installPackage}
689
690 mkdir -p $out/${packageName}
691 cd $out/${packageName}
692
693 source $includeScriptPath
694
695 # Create fake package.json to make the npm commands work properly
696 cp ${src}/package.json .
697 chmod 644 package.json
698 ${lib.optionalString bypassCache ''
699 if [ -f ${src}/package-lock.json ]
700 then
701 cp ${src}/package-lock.json .
702 chmod 644 package-lock.json
703 fi
704 ''}
705
706 # Go to the parent folder to make sure that all packages are pinpointed
707 cd ..
708 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
709
710 ${prepareAndInvokeNPM {
711 inherit
712 packageName
713 bypassCache
714 reconstructLock
715 npmFlags
716 production
717 ;
718 }}
719
720 # Expose the executables that were installed
721 cd ..
722 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
723
724 mv ${packageName} lib
725 ln -s $out/lib/node_modules/.bin $out/bin
726 '';
727 }
728 // extraArgs
729 );
730
731 # Builds a development shell
732 buildNodeShell =
733 {
734 name,
735 packageName,
736 version ? null,
737 src,
738 dependencies ? [ ],
739 buildInputs ? [ ],
740 production ? true,
741 npmFlags ? "",
742 dontNpmInstall ? false,
743 bypassCache ? false,
744 reconstructLock ? false,
745 dontStrip ? true,
746 unpackPhase ? "true",
747 buildPhase ? "true",
748 ...
749 }@args:
750
751 let
752 nodeDependencies = buildNodeDependencies args;
753 extraArgs = removeAttrs args [
754 "name"
755 "dependencies"
756 "buildInputs"
757 "dontStrip"
758 "dontNpmInstall"
759 "unpackPhase"
760 "buildPhase"
761 ];
762 in
763 stdenv.mkDerivation (
764 {
765 name = "node-shell-${name}${if version == null then "" else "-${version}"}";
766
767 buildInputs =
768 [
769 python
770 nodejs
771 ]
772 ++ lib.optional (stdenv.hostPlatform.isLinux) pkgs.util-linux
773 ++ buildInputs;
774 buildCommand = ''
775 mkdir -p $out/bin
776 cat > $out/bin/shell <<EOF
777 #! ${stdenv.shell} -e
778 $shellHook
779 exec ${stdenv.shell}
780 EOF
781 chmod +x $out/bin/shell
782 '';
783
784 # Provide the dependencies in a development shell through the NODE_PATH environment variable
785 inherit nodeDependencies;
786 shellHook = lib.optionalString (dependencies != [ ]) ''
787 export NODE_PATH=${nodeDependencies}/lib/node_modules
788 export PATH="${nodeDependencies}/bin:$PATH"
789 '';
790 }
791 // extraArgs
792 );
793in
794{
795 buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
796 buildNodePackage = lib.makeOverridable buildNodePackage;
797 buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
798 buildNodeShell = lib.makeOverridable buildNodeShell;
799}