1# This file originates from node2nix
2
3{
4 lib,
5 stdenv,
6 nodejs,
7 pkgs,
8 libtool,
9 runCommand,
10 writeTextFile,
11 writeShellScript,
12}:
13
14let
15 inherit (nodejs) python;
16
17 # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
18 tarWrapper = runCommand "tarWrapper" { } ''
19 mkdir -p $out/bin
20
21 cat > $out/bin/tar <<EOF
22 #! ${stdenv.shell} -e
23 $(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
24 EOF
25
26 chmod +x $out/bin/tar
27 '';
28
29 # Function that generates a TGZ file from a NPM project
30 buildNodeSourceDist =
31 {
32 name,
33 version,
34 src,
35 ...
36 }:
37
38 stdenv.mkDerivation {
39 name = "node-tarball-${name}-${version}";
40 inherit src;
41 buildInputs = [ nodejs ];
42 buildPhase = ''
43 export HOME=$TMPDIR
44 tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
45 '';
46 installPhase = ''
47 mkdir -p $out/tarballs
48 mv $tgzFile $out/tarballs
49 mkdir -p $out/nix-support
50 echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
51 '';
52 };
53
54 # Common shell logic
55 installPackage = writeShellScript "install-package" ''
56 installPackage() {
57 local packageName=$1 src=$2
58
59 local strippedName
60
61 local DIR=$PWD
62 cd $TMPDIR
63
64 unpackFile $src
65
66 # Make the base dir in which the target dependency resides first
67 mkdir -p "$(dirname "$DIR/$packageName")"
68
69 if [ -f "$src" ]
70 then
71 # Figure out what directory has been unpacked
72 packageDir="$(find . -maxdepth 1 -type d | tail -1)"
73
74 # Restore write permissions to make building work
75 find "$packageDir" -type d -exec chmod u+x {} \;
76 chmod -R u+w "$packageDir"
77
78 # Move the extracted tarball into the output folder
79 mv "$packageDir" "$DIR/$packageName"
80 elif [ -d "$src" ]
81 then
82 # Get a stripped name (without hash) of the source directory.
83 # On old nixpkgs it's already set internally.
84 if [ -z "$strippedName" ]
85 then
86 strippedName="$(stripHash $src)"
87 fi
88
89 # Restore write permissions to make building work
90 chmod -R u+w "$strippedName"
91
92 # Move the extracted directory into the output folder
93 mv "$strippedName" "$DIR/$packageName"
94 fi
95
96 # Change to the package directory to install dependencies
97 cd "$DIR/$packageName"
98 }
99 '';
100
101 # Bundle the dependencies of the package
102 #
103 # Only include dependencies if they don't exist. They may also be bundled in the package.
104 includeDependencies =
105 { dependencies }:
106 lib.optionalString (dependencies != [ ]) (
107 ''
108 mkdir -p node_modules
109 cd node_modules
110 ''
111 + (lib.concatMapStrings (dependency: ''
112 if [ ! -e "${dependency.packageName}" ]; then
113 ${composePackage dependency}
114 fi
115 '') dependencies)
116 + ''
117 cd ..
118 ''
119 );
120
121 # Recursively composes the dependencies of a package
122 composePackage =
123 {
124 name,
125 packageName,
126 src,
127 dependencies ? [ ],
128 ...
129 }@args:
130 builtins.addErrorContext "while evaluating node package '${packageName}'" ''
131 installPackage "${packageName}" "${src}"
132 ${includeDependencies { inherit dependencies; }}
133 cd ..
134 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
135 '';
136
137 pinpointDependencies =
138 { dependencies, production }:
139 let
140 pinpointDependenciesFromPackageJSON = writeTextFile {
141 name = "pinpointDependencies.js";
142 text = ''
143 var fs = require('fs');
144 var path = require('path');
145
146 function resolveDependencyVersion(location, name) {
147 if(location == process.env['NIX_STORE']) {
148 return null;
149 } else {
150 var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
151
152 if(fs.existsSync(dependencyPackageJSON)) {
153 var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
154
155 if(dependencyPackageObj.name == name) {
156 return dependencyPackageObj.version;
157 }
158 } else {
159 return resolveDependencyVersion(path.resolve(location, ".."), name);
160 }
161 }
162 }
163
164 function replaceDependencies(dependencies) {
165 if(typeof dependencies == "object" && dependencies !== null) {
166 for(var dependency in dependencies) {
167 var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
168
169 if(resolvedVersion === null) {
170 process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
171 } else {
172 dependencies[dependency] = resolvedVersion;
173 }
174 }
175 }
176 }
177
178 /* Read the package.json configuration */
179 var packageObj = JSON.parse(fs.readFileSync('./package.json'));
180
181 /* Pinpoint all dependencies */
182 replaceDependencies(packageObj.dependencies);
183 if(process.argv[2] == "development") {
184 replaceDependencies(packageObj.devDependencies);
185 }
186 else {
187 packageObj.devDependencies = {};
188 }
189 replaceDependencies(packageObj.optionalDependencies);
190 replaceDependencies(packageObj.peerDependencies);
191
192 /* Write the fixed package.json file */
193 fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
194 '';
195 };
196 in
197 ''
198 node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
199
200 ${lib.optionalString (dependencies != [ ]) ''
201 if [ -d node_modules ]
202 then
203 cd node_modules
204 ${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
205 cd ..
206 fi
207 ''}
208 '';
209
210 # Recursively traverses all dependencies of a package and pinpoints all
211 # dependencies in the package.json file to the versions that are actually
212 # being used.
213
214 pinpointDependenciesOfPackage =
215 {
216 packageName,
217 dependencies ? [ ],
218 production ? true,
219 ...
220 }@args:
221 ''
222 if [ -d "${packageName}" ]
223 then
224 cd "${packageName}"
225 ${pinpointDependencies { inherit dependencies production; }}
226 cd ..
227 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
228 fi
229 '';
230
231 # Extract the Node.js source code which is used to compile packages with
232 # native bindings
233 nodeSources = runCommand "node-sources" { } ''
234 tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
235 mv node-* $out
236 '';
237
238 # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
239 addIntegrityFieldsScript = writeTextFile {
240 name = "addintegrityfields.js";
241 text = ''
242 var fs = require('fs');
243 var path = require('path');
244
245 function augmentDependencies(baseDir, dependencies) {
246 for(var dependencyName in dependencies) {
247 var dependency = dependencies[dependencyName];
248
249 // Open package.json and augment metadata fields
250 var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
251 var packageJSONPath = path.join(packageJSONDir, "package.json");
252
253 if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
254 console.log("Adding metadata fields to: "+packageJSONPath);
255 var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
256
257 if(dependency.integrity) {
258 packageObj["_integrity"] = dependency.integrity;
259 } else {
260 packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
261 }
262
263 if(dependency.resolved) {
264 packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
265 } else {
266 packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
267 }
268
269 if(dependency.from !== undefined) { // Adopt from property if one has been provided
270 packageObj["_from"] = dependency.from;
271 }
272
273 fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
274 }
275
276 // Augment transitive dependencies
277 if(dependency.dependencies !== undefined) {
278 augmentDependencies(packageJSONDir, dependency.dependencies);
279 }
280 }
281 }
282
283 if(fs.existsSync("./package-lock.json")) {
284 var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
285
286 if(![1, 2].includes(packageLock.lockfileVersion)) {
287 process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
288 process.exit(1);
289 }
290
291 if(packageLock.dependencies !== undefined) {
292 augmentDependencies(".", packageLock.dependencies);
293 }
294 }
295 '';
296 };
297
298 # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
299 reconstructPackageLock = writeTextFile {
300 name = "reconstructpackagelock.js";
301 text = ''
302 var fs = require('fs');
303 var path = require('path');
304
305 var packageObj = JSON.parse(fs.readFileSync("package.json"));
306
307 var lockObj = {
308 name: packageObj.name,
309 version: packageObj.version,
310 lockfileVersion: 2,
311 requires: true,
312 packages: {
313 "": {
314 name: packageObj.name,
315 version: packageObj.version,
316 license: packageObj.license,
317 bin: packageObj.bin,
318 dependencies: packageObj.dependencies,
319 engines: packageObj.engines,
320 optionalDependencies: packageObj.optionalDependencies
321 }
322 },
323 dependencies: {}
324 };
325
326 function augmentPackageJSON(filePath, packages, dependencies) {
327 var packageJSON = path.join(filePath, "package.json");
328 if(fs.existsSync(packageJSON)) {
329 var packageObj = JSON.parse(fs.readFileSync(packageJSON));
330 packages[filePath] = {
331 version: packageObj.version,
332 integrity: "sha1-000000000000000000000000000=",
333 dependencies: packageObj.dependencies,
334 engines: packageObj.engines,
335 optionalDependencies: packageObj.optionalDependencies
336 };
337 dependencies[packageObj.name] = {
338 version: packageObj.version,
339 integrity: "sha1-000000000000000000000000000=",
340 dependencies: {}
341 };
342 processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
343 }
344 }
345
346 function processDependencies(dir, packages, dependencies) {
347 if(fs.existsSync(dir)) {
348 var files = fs.readdirSync(dir);
349
350 files.forEach(function(entry) {
351 var filePath = path.join(dir, entry);
352 var stats = fs.statSync(filePath);
353
354 if(stats.isDirectory()) {
355 if(entry.substr(0, 1) == "@") {
356 // When we encounter a namespace folder, augment all packages belonging to the scope
357 var pkgFiles = fs.readdirSync(filePath);
358
359 pkgFiles.forEach(function(entry) {
360 if(stats.isDirectory()) {
361 var pkgFilePath = path.join(filePath, entry);
362 augmentPackageJSON(pkgFilePath, packages, dependencies);
363 }
364 });
365 } else {
366 augmentPackageJSON(filePath, packages, dependencies);
367 }
368 }
369 });
370 }
371 }
372
373 processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
374
375 fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
376 '';
377 };
378
379 # Script that links bins defined in package.json to the node_modules bin directory
380 # NPM does not do this for top-level packages itself anymore as of v7
381 linkBinsScript = writeTextFile {
382 name = "linkbins.js";
383 text = ''
384 var fs = require('fs');
385 var path = require('path');
386
387 var packageObj = JSON.parse(fs.readFileSync("package.json"));
388
389 var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
390
391 if(packageObj.bin !== undefined) {
392 fs.mkdirSync(path.join(nodeModules, ".bin"))
393
394 if(typeof packageObj.bin == "object") {
395 Object.keys(packageObj.bin).forEach(function(exe) {
396 if(fs.existsSync(packageObj.bin[exe])) {
397 console.log("linking bin '" + exe + "'");
398 fs.symlinkSync(
399 path.join("..", packageObj.name, packageObj.bin[exe]),
400 path.join(nodeModules, ".bin", exe)
401 );
402 }
403 else {
404 console.log("skipping non-existent bin '" + exe + "'");
405 }
406 })
407 }
408 else {
409 if(fs.existsSync(packageObj.bin)) {
410 console.log("linking bin '" + packageObj.bin + "'");
411 fs.symlinkSync(
412 path.join("..", packageObj.name, packageObj.bin),
413 path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
414 );
415 }
416 else {
417 console.log("skipping non-existent bin '" + packageObj.bin + "'");
418 }
419 }
420 }
421 else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
422 fs.mkdirSync(path.join(nodeModules, ".bin"))
423
424 fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
425 if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
426 console.log("linking bin '" + exe + "'");
427 fs.symlinkSync(
428 path.join("..", packageObj.name, packageObj.directories.bin, exe),
429 path.join(nodeModules, ".bin", exe)
430 );
431 }
432 else {
433 console.log("skipping non-existent bin '" + exe + "'");
434 }
435 })
436 }
437 '';
438 };
439
440 prepareAndInvokeNPM =
441 {
442 packageName,
443 bypassCache,
444 reconstructLock,
445 npmFlags,
446 production,
447 }:
448 let
449 forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
450 in
451 ''
452 # Pinpoint the versions of all dependencies to the ones that are actually being used
453 echo "pinpointing versions of dependencies..."
454 source $pinpointDependenciesScriptPath
455
456 # Patch the shebangs of the bundled modules to prevent them from
457 # calling executables outside the Nix store as much as possible
458 patchShebangs .
459
460 # Deploy the Node.js package by running npm install. Since the
461 # dependencies have been provided already by ourselves, it should not
462 # attempt to install them again, which is good, because we want to make
463 # it Nix's responsibility. If it needs to install any dependencies
464 # anyway (e.g. because the dependency parameters are
465 # incomplete/incorrect), it fails.
466 #
467 # The other responsibilities of NPM are kept -- version checks, build
468 # steps, postprocessing etc.
469
470 export HOME=$TMPDIR
471 cd "${packageName}"
472 runHook preRebuild
473
474 ${lib.optionalString bypassCache ''
475 ${lib.optionalString reconstructLock ''
476 if [ -f package-lock.json ]
477 then
478 echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
479 echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
480 rm package-lock.json
481 else
482 echo "No package-lock.json file found, reconstructing..."
483 fi
484
485 node ${reconstructPackageLock}
486 ''}
487
488 node ${addIntegrityFieldsScript}
489 ''}
490
491 npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
492
493 runHook postRebuild
494
495 if [ "''${dontNpmInstall-}" != "1" ]
496 then
497 # NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
498 rm -f npm-shrinkwrap.json
499
500 npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
501 fi
502
503 # Link executables defined in package.json
504 node ${linkBinsScript}
505 '';
506
507 # Builds and composes an NPM package including all its dependencies
508 buildNodePackage =
509 {
510 name,
511 packageName,
512 version ? null,
513 dependencies ? [ ],
514 buildInputs ? [ ],
515 production ? true,
516 npmFlags ? "",
517 dontNpmInstall ? false,
518 bypassCache ? false,
519 reconstructLock ? false,
520 preRebuild ? "",
521 dontStrip ? true,
522 unpackPhase ? "true",
523 buildPhase ? "true",
524 meta ? { },
525 ...
526 }@args:
527
528 let
529 extraArgs = removeAttrs args [
530 "name"
531 "dependencies"
532 "buildInputs"
533 "dontStrip"
534 "dontNpmInstall"
535 "preRebuild"
536 "unpackPhase"
537 "buildPhase"
538 "meta"
539 ];
540 in
541 stdenv.mkDerivation (
542 {
543 name = "${name}${if version == null then "" else "-${version}"}";
544 buildInputs = [
545 tarWrapper
546 python
547 nodejs
548 ]
549 ++ lib.optional (stdenv.hostPlatform.isLinux) pkgs.util-linux
550 ++ lib.optional (stdenv.hostPlatform.isDarwin) libtool
551 ++ buildInputs;
552
553 inherit nodejs;
554
555 inherit dontStrip; # Stripping may fail a build for some package deployments
556 inherit
557 dontNpmInstall
558 preRebuild
559 unpackPhase
560 buildPhase
561 ;
562
563 compositionScript = composePackage args;
564 pinpointDependenciesScript = pinpointDependenciesOfPackage args;
565
566 passAsFile = [
567 "compositionScript"
568 "pinpointDependenciesScript"
569 ];
570
571 installPhase = ''
572 source ${installPackage}
573
574 # Create and enter a root node_modules/ folder
575 mkdir -p $out/lib/node_modules
576 cd $out/lib/node_modules
577
578 # Compose the package and all its dependencies
579 source $compositionScriptPath
580
581 ${prepareAndInvokeNPM {
582 inherit
583 packageName
584 bypassCache
585 reconstructLock
586 npmFlags
587 production
588 ;
589 }}
590
591 # Create symlink to the deployed executable folder, if applicable
592 if [ -d "$out/lib/node_modules/.bin" ]
593 then
594 ln -s $out/lib/node_modules/.bin $out/bin
595
596 # Fixup all executables
597 ls $out/bin/* | while read i
598 do
599 file="$(readlink -f "$i")"
600 chmod u+rwx "$file"
601 if isScript "$file"
602 then
603 sed -i 's/\r$//' "$file" # convert crlf to lf
604 fi
605 done
606 fi
607
608 # Create symlinks to the deployed manual page folders, if applicable
609 if [ -d "$out/lib/node_modules/${packageName}/man" ]
610 then
611 mkdir -p $out/share
612 for dir in "$out/lib/node_modules/${packageName}/man/"*
613 do
614 mkdir -p $out/share/man/$(basename "$dir")
615 for page in "$dir"/*
616 do
617 ln -s $page $out/share/man/$(basename "$dir")
618 done
619 done
620 fi
621
622 # Run post install hook, if provided
623 runHook postInstall
624 '';
625
626 meta = {
627 # default to Node.js' platforms
628 platforms = nodejs.meta.platforms;
629 }
630 // meta;
631 }
632 // extraArgs
633 );
634
635 # Builds a node environment (a node_modules folder and a set of binaries)
636 buildNodeDependencies =
637 {
638 name,
639 packageName,
640 version ? null,
641 src,
642 dependencies ? [ ],
643 buildInputs ? [ ],
644 production ? true,
645 npmFlags ? "",
646 dontNpmInstall ? false,
647 bypassCache ? false,
648 reconstructLock ? false,
649 dontStrip ? true,
650 unpackPhase ? "true",
651 buildPhase ? "true",
652 ...
653 }@args:
654
655 let
656 extraArgs = removeAttrs args [
657 "name"
658 "dependencies"
659 "buildInputs"
660 ];
661 in
662 stdenv.mkDerivation (
663 {
664 name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
665
666 buildInputs = [
667 tarWrapper
668 python
669 nodejs
670 ]
671 ++ lib.optional (stdenv.hostPlatform.isLinux) pkgs.util-linux
672 ++ lib.optional (stdenv.hostPlatform.isDarwin) libtool
673 ++ buildInputs;
674
675 inherit dontStrip; # Stripping may fail a build for some package deployments
676 inherit dontNpmInstall unpackPhase buildPhase;
677
678 includeScript = includeDependencies { inherit dependencies; };
679 pinpointDependenciesScript = pinpointDependenciesOfPackage args;
680
681 passAsFile = [
682 "includeScript"
683 "pinpointDependenciesScript"
684 ];
685
686 installPhase = ''
687 source ${installPackage}
688
689 mkdir -p $out/${packageName}
690 cd $out/${packageName}
691
692 source $includeScriptPath
693
694 # Create fake package.json to make the npm commands work properly
695 cp ${src}/package.json .
696 chmod 644 package.json
697 ${lib.optionalString bypassCache ''
698 if [ -f ${src}/package-lock.json ]
699 then
700 cp ${src}/package-lock.json .
701 chmod 644 package-lock.json
702 fi
703 ''}
704
705 # Go to the parent folder to make sure that all packages are pinpointed
706 cd ..
707 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
708
709 ${prepareAndInvokeNPM {
710 inherit
711 packageName
712 bypassCache
713 reconstructLock
714 npmFlags
715 production
716 ;
717 }}
718
719 # Expose the executables that were installed
720 cd ..
721 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
722
723 mv ${packageName} lib
724 ln -s $out/lib/node_modules/.bin $out/bin
725 '';
726 }
727 // extraArgs
728 );
729
730 # Builds a development shell
731 buildNodeShell =
732 {
733 name,
734 packageName,
735 version ? null,
736 src,
737 dependencies ? [ ],
738 buildInputs ? [ ],
739 production ? true,
740 npmFlags ? "",
741 dontNpmInstall ? false,
742 bypassCache ? false,
743 reconstructLock ? false,
744 dontStrip ? true,
745 unpackPhase ? "true",
746 buildPhase ? "true",
747 ...
748 }@args:
749
750 let
751 nodeDependencies = buildNodeDependencies args;
752 extraArgs = removeAttrs args [
753 "name"
754 "dependencies"
755 "buildInputs"
756 "dontStrip"
757 "dontNpmInstall"
758 "unpackPhase"
759 "buildPhase"
760 ];
761 in
762 stdenv.mkDerivation (
763 {
764 name = "node-shell-${name}${if version == null then "" else "-${version}"}";
765
766 buildInputs = [
767 python
768 nodejs
769 ]
770 ++ lib.optional (stdenv.hostPlatform.isLinux) pkgs.util-linux
771 ++ buildInputs;
772 buildCommand = ''
773 mkdir -p $out/bin
774 cat > $out/bin/shell <<EOF
775 #! ${stdenv.shell} -e
776 $shellHook
777 exec ${stdenv.shell}
778 EOF
779 chmod +x $out/bin/shell
780 '';
781
782 # Provide the dependencies in a development shell through the NODE_PATH environment variable
783 inherit nodeDependencies;
784 shellHook = lib.optionalString (dependencies != [ ]) ''
785 export NODE_PATH=${nodeDependencies}/lib/node_modules
786 export PATH="${nodeDependencies}/bin:$PATH"
787 '';
788 }
789 // extraArgs
790 );
791in
792{
793 buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
794 buildNodePackage = lib.makeOverridable buildNodePackage;
795 buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
796 buildNodeShell = lib.makeOverridable buildNodeShell;
797}