1# This file originates from node2nix
2
3{stdenv, nodejs, python2, utillinux, libtool, runCommand, writeTextFile}:
4
5let
6 python = if nodejs ? python then nodejs.python else python2;
7
8 # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
9 tarWrapper = runCommand "tarWrapper" {} ''
10 mkdir -p $out/bin
11
12 cat > $out/bin/tar <<EOF
13 #! ${stdenv.shell} -e
14 $(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
15 EOF
16
17 chmod +x $out/bin/tar
18 '';
19
20 # Function that generates a TGZ file from a NPM project
21 buildNodeSourceDist =
22 { name, version, src, ... }:
23
24 stdenv.mkDerivation {
25 name = "node-tarball-${name}-${version}";
26 inherit src;
27 buildInputs = [ nodejs ];
28 buildPhase = ''
29 export HOME=$TMPDIR
30 tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
31 '';
32 installPhase = ''
33 mkdir -p $out/tarballs
34 mv $tgzFile $out/tarballs
35 mkdir -p $out/nix-support
36 echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
37 '';
38 };
39
40 includeDependencies = {dependencies}:
41 stdenv.lib.optionalString (dependencies != [])
42 (stdenv.lib.concatMapStrings (dependency:
43 ''
44 # Bundle the dependencies of the package
45 mkdir -p node_modules
46 cd node_modules
47
48 # Only include dependencies if they don't exist. They may also be bundled in the package.
49 if [ ! -e "${dependency.name}" ]
50 then
51 ${composePackage dependency}
52 fi
53
54 cd ..
55 ''
56 ) dependencies);
57
58 # Recursively composes the dependencies of a package
59 composePackage = { name, packageName, src, dependencies ? [], ... }@args:
60 builtins.addErrorContext "while evaluating node package '${packageName}'" ''
61 DIR=$(pwd)
62 cd $TMPDIR
63
64 unpackFile ${src}
65
66 # Make the base dir in which the target dependency resides first
67 mkdir -p "$(dirname "$DIR/${packageName}")"
68
69 if [ -f "${src}" ]
70 then
71 # Figure out what directory has been unpacked
72 packageDir="$(find . -maxdepth 1 -type d | tail -1)"
73
74 # Restore write permissions to make building work
75 find "$packageDir" -type d -exec chmod u+x {} \;
76 chmod -R u+w "$packageDir"
77
78 # Move the extracted tarball into the output folder
79 mv "$packageDir" "$DIR/${packageName}"
80 elif [ -d "${src}" ]
81 then
82 # Get a stripped name (without hash) of the source directory.
83 # On old nixpkgs it's already set internally.
84 if [ -z "$strippedName" ]
85 then
86 strippedName="$(stripHash ${src})"
87 fi
88
89 # Restore write permissions to make building work
90 chmod -R u+w "$strippedName"
91
92 # Move the extracted directory into the output folder
93 mv "$strippedName" "$DIR/${packageName}"
94 fi
95
96 # Unset the stripped name to not confuse the next unpack step
97 unset strippedName
98
99 # Include the dependencies of the package
100 cd "$DIR/${packageName}"
101 ${includeDependencies { inherit dependencies; }}
102 cd ..
103 ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
104 '';
105
106 pinpointDependencies = {dependencies, production}:
107 let
108 pinpointDependenciesFromPackageJSON = writeTextFile {
109 name = "pinpointDependencies.js";
110 text = ''
111 var fs = require('fs');
112 var path = require('path');
113
114 function resolveDependencyVersion(location, name) {
115 if(location == process.env['NIX_STORE']) {
116 return null;
117 } else {
118 var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
119
120 if(fs.existsSync(dependencyPackageJSON)) {
121 var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
122
123 if(dependencyPackageObj.name == name) {
124 return dependencyPackageObj.version;
125 }
126 } else {
127 return resolveDependencyVersion(path.resolve(location, ".."), name);
128 }
129 }
130 }
131
132 function replaceDependencies(dependencies) {
133 if(typeof dependencies == "object" && dependencies !== null) {
134 for(var dependency in dependencies) {
135 var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
136
137 if(resolvedVersion === null) {
138 process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
139 } else {
140 dependencies[dependency] = resolvedVersion;
141 }
142 }
143 }
144 }
145
146 /* Read the package.json configuration */
147 var packageObj = JSON.parse(fs.readFileSync('./package.json'));
148
149 /* Pinpoint all dependencies */
150 replaceDependencies(packageObj.dependencies);
151 if(process.argv[2] == "development") {
152 replaceDependencies(packageObj.devDependencies);
153 }
154 replaceDependencies(packageObj.optionalDependencies);
155
156 /* Write the fixed package.json file */
157 fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
158 '';
159 };
160 in
161 ''
162 node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
163
164 ${stdenv.lib.optionalString (dependencies != [])
165 ''
166 if [ -d node_modules ]
167 then
168 cd node_modules
169 ${stdenv.lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
170 cd ..
171 fi
172 ''}
173 '';
174
175 # Recursively traverses all dependencies of a package and pinpoints all
176 # dependencies in the package.json file to the versions that are actually
177 # being used.
178
179 pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
180 ''
181 if [ -d "${packageName}" ]
182 then
183 cd "${packageName}"
184 ${pinpointDependencies { inherit dependencies production; }}
185 cd ..
186 ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
187 fi
188 '';
189
190 # Extract the Node.js source code which is used to compile packages with
191 # native bindings
192 nodeSources = runCommand "node-sources" {} ''
193 tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
194 mv node-* $out
195 '';
196
197 # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
198 addIntegrityFieldsScript = writeTextFile {
199 name = "addintegrityfields.js";
200 text = ''
201 var fs = require('fs');
202 var path = require('path');
203
204 function augmentDependencies(baseDir, dependencies) {
205 for(var dependencyName in dependencies) {
206 var dependency = dependencies[dependencyName];
207
208 // Open package.json and augment metadata fields
209 var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
210 var packageJSONPath = path.join(packageJSONDir, "package.json");
211
212 if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
213 console.log("Adding metadata fields to: "+packageJSONPath);
214 var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
215
216 if(dependency.integrity) {
217 packageObj["_integrity"] = dependency.integrity;
218 } else {
219 packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
220 }
221
222 if(dependency.resolved) {
223 packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
224 } else {
225 packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
226 }
227
228 if(dependency.from !== undefined) { // Adopt from property if one has been provided
229 packageObj["_from"] = dependency.from;
230 }
231
232 fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
233 }
234
235 // Augment transitive dependencies
236 if(dependency.dependencies !== undefined) {
237 augmentDependencies(packageJSONDir, dependency.dependencies);
238 }
239 }
240 }
241
242 if(fs.existsSync("./package-lock.json")) {
243 var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
244
245 if(packageLock.lockfileVersion !== 1) {
246 process.stderr.write("Sorry, I only understand lock file version 1!\n");
247 process.exit(1);
248 }
249
250 if(packageLock.dependencies !== undefined) {
251 augmentDependencies(".", packageLock.dependencies);
252 }
253 }
254 '';
255 };
256
257 # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
258 reconstructPackageLock = writeTextFile {
259 name = "addintegrityfields.js";
260 text = ''
261 var fs = require('fs');
262 var path = require('path');
263
264 var packageObj = JSON.parse(fs.readFileSync("package.json"));
265
266 var lockObj = {
267 name: packageObj.name,
268 version: packageObj.version,
269 lockfileVersion: 1,
270 requires: true,
271 dependencies: {}
272 };
273
274 function augmentPackageJSON(filePath, dependencies) {
275 var packageJSON = path.join(filePath, "package.json");
276 if(fs.existsSync(packageJSON)) {
277 var packageObj = JSON.parse(fs.readFileSync(packageJSON));
278 dependencies[packageObj.name] = {
279 version: packageObj.version,
280 integrity: "sha1-000000000000000000000000000=",
281 dependencies: {}
282 };
283 processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
284 }
285 }
286
287 function processDependencies(dir, dependencies) {
288 if(fs.existsSync(dir)) {
289 var files = fs.readdirSync(dir);
290
291 files.forEach(function(entry) {
292 var filePath = path.join(dir, entry);
293 var stats = fs.statSync(filePath);
294
295 if(stats.isDirectory()) {
296 if(entry.substr(0, 1) == "@") {
297 // When we encounter a namespace folder, augment all packages belonging to the scope
298 var pkgFiles = fs.readdirSync(filePath);
299
300 pkgFiles.forEach(function(entry) {
301 if(stats.isDirectory()) {
302 var pkgFilePath = path.join(filePath, entry);
303 augmentPackageJSON(pkgFilePath, dependencies);
304 }
305 });
306 } else {
307 augmentPackageJSON(filePath, dependencies);
308 }
309 }
310 });
311 }
312 }
313
314 processDependencies("node_modules", lockObj.dependencies);
315
316 fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
317 '';
318 };
319
320 prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
321 let
322 forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
323 in
324 ''
325 # Pinpoint the versions of all dependencies to the ones that are actually being used
326 echo "pinpointing versions of dependencies..."
327 source $pinpointDependenciesScriptPath
328
329 # Patch the shebangs of the bundled modules to prevent them from
330 # calling executables outside the Nix store as much as possible
331 patchShebangs .
332
333 # Deploy the Node.js package by running npm install. Since the
334 # dependencies have been provided already by ourselves, it should not
335 # attempt to install them again, which is good, because we want to make
336 # it Nix's responsibility. If it needs to install any dependencies
337 # anyway (e.g. because the dependency parameters are
338 # incomplete/incorrect), it fails.
339 #
340 # The other responsibilities of NPM are kept -- version checks, build
341 # steps, postprocessing etc.
342
343 export HOME=$TMPDIR
344 cd "${packageName}"
345 runHook preRebuild
346
347 ${stdenv.lib.optionalString bypassCache ''
348 ${stdenv.lib.optionalString reconstructLock ''
349 if [ -f package-lock.json ]
350 then
351 echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
352 echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
353 rm package-lock.json
354 else
355 echo "No package-lock.json file found, reconstructing..."
356 fi
357
358 node ${reconstructPackageLock}
359 ''}
360
361 node ${addIntegrityFieldsScript}
362 ''}
363
364 npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild
365
366 if [ "''${dontNpmInstall-}" != "1" ]
367 then
368 # NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
369 rm -f npm-shrinkwrap.json
370
371 npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install
372 fi
373 '';
374
375 # Builds and composes an NPM package including all its dependencies
376 buildNodePackage =
377 { name
378 , packageName
379 , version
380 , dependencies ? []
381 , buildInputs ? []
382 , production ? true
383 , npmFlags ? ""
384 , dontNpmInstall ? false
385 , bypassCache ? false
386 , reconstructLock ? false
387 , preRebuild ? ""
388 , dontStrip ? true
389 , unpackPhase ? "true"
390 , buildPhase ? "true"
391 , ... }@args:
392
393 let
394 extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ];
395 in
396 stdenv.mkDerivation ({
397 name = "node_${name}-${version}";
398 buildInputs = [ tarWrapper python nodejs ]
399 ++ stdenv.lib.optional (stdenv.isLinux) utillinux
400 ++ stdenv.lib.optional (stdenv.isDarwin) libtool
401 ++ buildInputs;
402
403 inherit nodejs;
404
405 inherit dontStrip; # Stripping may fail a build for some package deployments
406 inherit dontNpmInstall preRebuild unpackPhase buildPhase;
407
408 compositionScript = composePackage args;
409 pinpointDependenciesScript = pinpointDependenciesOfPackage args;
410
411 passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
412
413 installPhase = ''
414 # Create and enter a root node_modules/ folder
415 mkdir -p $out/lib/node_modules
416 cd $out/lib/node_modules
417
418 # Compose the package and all its dependencies
419 source $compositionScriptPath
420
421 ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
422
423 # Create symlink to the deployed executable folder, if applicable
424 if [ -d "$out/lib/node_modules/.bin" ]
425 then
426 ln -s $out/lib/node_modules/.bin $out/bin
427 fi
428
429 # Create symlinks to the deployed manual page folders, if applicable
430 if [ -d "$out/lib/node_modules/${packageName}/man" ]
431 then
432 mkdir -p $out/share
433 for dir in "$out/lib/node_modules/${packageName}/man/"*
434 do
435 mkdir -p $out/share/man/$(basename "$dir")
436 for page in "$dir"/*
437 do
438 ln -s $page $out/share/man/$(basename "$dir")
439 done
440 done
441 fi
442
443 # Run post install hook, if provided
444 runHook postInstall
445 '';
446 } // extraArgs);
447
448 # Builds a development shell
449 buildNodeShell =
450 { name
451 , packageName
452 , version
453 , src
454 , dependencies ? []
455 , buildInputs ? []
456 , production ? true
457 , npmFlags ? ""
458 , dontNpmInstall ? false
459 , bypassCache ? false
460 , reconstructLock ? false
461 , dontStrip ? true
462 , unpackPhase ? "true"
463 , buildPhase ? "true"
464 , ... }@args:
465
466 let
467 extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
468
469 nodeDependencies = stdenv.mkDerivation ({
470 name = "node-dependencies-${name}-${version}";
471
472 buildInputs = [ tarWrapper python nodejs ]
473 ++ stdenv.lib.optional (stdenv.isLinux) utillinux
474 ++ stdenv.lib.optional (stdenv.isDarwin) libtool
475 ++ buildInputs;
476
477 inherit dontStrip; # Stripping may fail a build for some package deployments
478 inherit dontNpmInstall unpackPhase buildPhase;
479
480 includeScript = includeDependencies { inherit dependencies; };
481 pinpointDependenciesScript = pinpointDependenciesOfPackage args;
482
483 passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
484
485 installPhase = ''
486 mkdir -p $out/${packageName}
487 cd $out/${packageName}
488
489 source $includeScriptPath
490
491 # Create fake package.json to make the npm commands work properly
492 cp ${src}/package.json .
493 chmod 644 package.json
494 ${stdenv.lib.optionalString bypassCache ''
495 if [ -f ${src}/package-lock.json ]
496 then
497 cp ${src}/package-lock.json .
498 fi
499 ''}
500
501 # Go to the parent folder to make sure that all packages are pinpointed
502 cd ..
503 ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
504
505 ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
506
507 # Expose the executables that were installed
508 cd ..
509 ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
510
511 mv ${packageName} lib
512 ln -s $out/lib/node_modules/.bin $out/bin
513 '';
514 } // extraArgs);
515 in
516 stdenv.mkDerivation {
517 name = "node-shell-${name}-${version}";
518
519 buildInputs = [ python nodejs ] ++ stdenv.lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
520 buildCommand = ''
521 mkdir -p $out/bin
522 cat > $out/bin/shell <<EOF
523 #! ${stdenv.shell} -e
524 $shellHook
525 exec ${stdenv.shell}
526 EOF
527 chmod +x $out/bin/shell
528 '';
529
530 # Provide the dependencies in a development shell through the NODE_PATH environment variable
531 inherit nodeDependencies;
532 shellHook = stdenv.lib.optionalString (dependencies != []) ''
533 export NODE_PATH=${nodeDependencies}/lib/node_modules
534 export PATH="${nodeDependencies}/bin:$PATH"
535 '';
536 };
537in
538{
539 buildNodeSourceDist = stdenv.lib.makeOverridable buildNodeSourceDist;
540 buildNodePackage = stdenv.lib.makeOverridable buildNodePackage;
541 buildNodeShell = stdenv.lib.makeOverridable buildNodeShell;
542}