fork
Configure Feed
Select the types of activity you want to include in your feed.
lol
fork
Configure Feed
Select the types of activity you want to include in your feed.
1# This file originates from node2nix
2
3{lib, stdenv, nodejs, python2, util-linux, libtool, runCommand, writeTextFile}:
4
5let
6 python = if nodejs ? python then nodejs.python else python2;
7
8 # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
9 tarWrapper = runCommand "tarWrapper" {} ''
10 mkdir -p $out/bin
11
12 cat > $out/bin/tar <<EOF
13 #! ${stdenv.shell} -e
14 $(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
15 EOF
16
17 chmod +x $out/bin/tar
18 '';
19
20 # Function that generates a TGZ file from a NPM project
21 buildNodeSourceDist =
22 { name, version, src, ... }:
23
24 stdenv.mkDerivation {
25 name = "node-tarball-${name}-${version}";
26 inherit src;
27 buildInputs = [ nodejs ];
28 buildPhase = ''
29 export HOME=$TMPDIR
30 tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
31 '';
32 installPhase = ''
33 mkdir -p $out/tarballs
34 mv $tgzFile $out/tarballs
35 mkdir -p $out/nix-support
36 echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
37 '';
38 };
39
40 includeDependencies = {dependencies}:
41 lib.optionalString (dependencies != [])
42 (lib.concatMapStrings (dependency:
43 ''
44 # Bundle the dependencies of the package
45 mkdir -p node_modules
46 cd node_modules
47
48 # Only include dependencies if they don't exist. They may also be bundled in the package.
49 if [ ! -e "${dependency.name}" ]
50 then
51 ${composePackage dependency}
52 fi
53
54 cd ..
55 ''
56 ) dependencies);
57
58 # Recursively composes the dependencies of a package
59 composePackage = { name, packageName, src, dependencies ? [], ... }@args:
60 builtins.addErrorContext "while evaluating node package '${packageName}'" ''
61 DIR=$(pwd)
62 cd $TMPDIR
63
64 unpackFile ${src}
65
66 # Make the base dir in which the target dependency resides first
67 mkdir -p "$(dirname "$DIR/${packageName}")"
68
69 if [ -f "${src}" ]
70 then
71 # Figure out what directory has been unpacked
72 packageDir="$(find . -maxdepth 1 -type d | tail -1)"
73
74 # Restore write permissions to make building work
75 find "$packageDir" -type d -exec chmod u+x {} \;
76 chmod -R u+w "$packageDir"
77
78 # Move the extracted tarball into the output folder
79 mv "$packageDir" "$DIR/${packageName}"
80 elif [ -d "${src}" ]
81 then
82 # Get a stripped name (without hash) of the source directory.
83 # On old nixpkgs it's already set internally.
84 if [ -z "$strippedName" ]
85 then
86 strippedName="$(stripHash ${src})"
87 fi
88
89 # Restore write permissions to make building work
90 chmod -R u+w "$strippedName"
91
92 # Move the extracted directory into the output folder
93 mv "$strippedName" "$DIR/${packageName}"
94 fi
95
96 # Unset the stripped name to not confuse the next unpack step
97 unset strippedName
98
99 # Include the dependencies of the package
100 cd "$DIR/${packageName}"
101 ${includeDependencies { inherit dependencies; }}
102 cd ..
103 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
104 '';
105
106 pinpointDependencies = {dependencies, production}:
107 let
108 pinpointDependenciesFromPackageJSON = writeTextFile {
109 name = "pinpointDependencies.js";
110 text = ''
111 var fs = require('fs');
112 var path = require('path');
113
114 function resolveDependencyVersion(location, name) {
115 if(location == process.env['NIX_STORE']) {
116 return null;
117 } else {
118 var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
119
120 if(fs.existsSync(dependencyPackageJSON)) {
121 var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
122
123 if(dependencyPackageObj.name == name) {
124 return dependencyPackageObj.version;
125 }
126 } else {
127 return resolveDependencyVersion(path.resolve(location, ".."), name);
128 }
129 }
130 }
131
132 function replaceDependencies(dependencies) {
133 if(typeof dependencies == "object" && dependencies !== null) {
134 for(var dependency in dependencies) {
135 var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
136
137 if(resolvedVersion === null) {
138 process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
139 } else {
140 dependencies[dependency] = resolvedVersion;
141 }
142 }
143 }
144 }
145
146 /* Read the package.json configuration */
147 var packageObj = JSON.parse(fs.readFileSync('./package.json'));
148
149 /* Pinpoint all dependencies */
150 replaceDependencies(packageObj.dependencies);
151 if(process.argv[2] == "development") {
152 replaceDependencies(packageObj.devDependencies);
153 }
154 replaceDependencies(packageObj.optionalDependencies);
155
156 /* Write the fixed package.json file */
157 fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
158 '';
159 };
160 in
161 ''
162 node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
163
164 ${lib.optionalString (dependencies != [])
165 ''
166 if [ -d node_modules ]
167 then
168 cd node_modules
169 ${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
170 cd ..
171 fi
172 ''}
173 '';
174
175 # Recursively traverses all dependencies of a package and pinpoints all
176 # dependencies in the package.json file to the versions that are actually
177 # being used.
178
179 pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
180 ''
181 if [ -d "${packageName}" ]
182 then
183 cd "${packageName}"
184 ${pinpointDependencies { inherit dependencies production; }}
185 cd ..
186 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
187 fi
188 '';
189
190 # Extract the Node.js source code which is used to compile packages with
191 # native bindings
192 nodeSources = runCommand "node-sources" {} ''
193 tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
194 mv node-* $out
195 '';
196
197 # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
198 addIntegrityFieldsScript = writeTextFile {
199 name = "addintegrityfields.js";
200 text = ''
201 var fs = require('fs');
202 var path = require('path');
203
204 function augmentDependencies(baseDir, dependencies) {
205 for(var dependencyName in dependencies) {
206 var dependency = dependencies[dependencyName];
207
208 // Open package.json and augment metadata fields
209 var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
210 var packageJSONPath = path.join(packageJSONDir, "package.json");
211
212 if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
213 console.log("Adding metadata fields to: "+packageJSONPath);
214 var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
215
216 if(dependency.integrity) {
217 packageObj["_integrity"] = dependency.integrity;
218 } else {
219 packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
220 }
221
222 if(dependency.resolved) {
223 packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
224 } else {
225 packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
226 }
227
228 if(dependency.from !== undefined) { // Adopt from property if one has been provided
229 packageObj["_from"] = dependency.from;
230 }
231
232 fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
233 }
234
235 // Augment transitive dependencies
236 if(dependency.dependencies !== undefined) {
237 augmentDependencies(packageJSONDir, dependency.dependencies);
238 }
239 }
240 }
241
242 if(fs.existsSync("./package-lock.json")) {
243 var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
244
245 if(packageLock.lockfileVersion !== 1) {
246 process.stderr.write("Sorry, I only understand lock file version 1!\n");
247 process.exit(1);
248 }
249
250 if(packageLock.dependencies !== undefined) {
251 augmentDependencies(".", packageLock.dependencies);
252 }
253 }
254 '';
255 };
256
257 # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
258 reconstructPackageLock = writeTextFile {
259 name = "addintegrityfields.js";
260 text = ''
261 var fs = require('fs');
262 var path = require('path');
263
264 var packageObj = JSON.parse(fs.readFileSync("package.json"));
265
266 var lockObj = {
267 name: packageObj.name,
268 version: packageObj.version,
269 lockfileVersion: 1,
270 requires: true,
271 dependencies: {}
272 };
273
274 function augmentPackageJSON(filePath, dependencies) {
275 var packageJSON = path.join(filePath, "package.json");
276 if(fs.existsSync(packageJSON)) {
277 var packageObj = JSON.parse(fs.readFileSync(packageJSON));
278 dependencies[packageObj.name] = {
279 version: packageObj.version,
280 integrity: "sha1-000000000000000000000000000=",
281 dependencies: {}
282 };
283 processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
284 }
285 }
286
287 function processDependencies(dir, dependencies) {
288 if(fs.existsSync(dir)) {
289 var files = fs.readdirSync(dir);
290
291 files.forEach(function(entry) {
292 var filePath = path.join(dir, entry);
293 var stats = fs.statSync(filePath);
294
295 if(stats.isDirectory()) {
296 if(entry.substr(0, 1) == "@") {
297 // When we encounter a namespace folder, augment all packages belonging to the scope
298 var pkgFiles = fs.readdirSync(filePath);
299
300 pkgFiles.forEach(function(entry) {
301 if(stats.isDirectory()) {
302 var pkgFilePath = path.join(filePath, entry);
303 augmentPackageJSON(pkgFilePath, dependencies);
304 }
305 });
306 } else {
307 augmentPackageJSON(filePath, dependencies);
308 }
309 }
310 });
311 }
312 }
313
314 processDependencies("node_modules", lockObj.dependencies);
315
316 fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
317 '';
318 };
319
320 prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
321 let
322 forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
323 in
324 ''
325 # Pinpoint the versions of all dependencies to the ones that are actually being used
326 echo "pinpointing versions of dependencies..."
327 source $pinpointDependenciesScriptPath
328
329 # Patch the shebangs of the bundled modules to prevent them from
330 # calling executables outside the Nix store as much as possible
331 patchShebangs .
332
333 # Deploy the Node.js package by running npm install. Since the
334 # dependencies have been provided already by ourselves, it should not
335 # attempt to install them again, which is good, because we want to make
336 # it Nix's responsibility. If it needs to install any dependencies
337 # anyway (e.g. because the dependency parameters are
338 # incomplete/incorrect), it fails.
339 #
340 # The other responsibilities of NPM are kept -- version checks, build
341 # steps, postprocessing etc.
342
343 export HOME=$TMPDIR
344 cd "${packageName}"
345 runHook preRebuild
346
347 ${lib.optionalString bypassCache ''
348 ${lib.optionalString reconstructLock ''
349 if [ -f package-lock.json ]
350 then
351 echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
352 echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
353 rm package-lock.json
354 else
355 echo "No package-lock.json file found, reconstructing..."
356 fi
357
358 node ${reconstructPackageLock}
359 ''}
360
361 node ${addIntegrityFieldsScript}
362 ''}
363
364 npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
365
366 if [ "''${dontNpmInstall-}" != "1" ]
367 then
368 # NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
369 rm -f npm-shrinkwrap.json
370
371 npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
372 fi
373 '';
374
375 # Builds and composes an NPM package including all its dependencies
376 buildNodePackage =
377 { name
378 , packageName
379 , version
380 , dependencies ? []
381 , buildInputs ? []
382 , production ? true
383 , npmFlags ? ""
384 , dontNpmInstall ? false
385 , bypassCache ? false
386 , reconstructLock ? false
387 , preRebuild ? ""
388 , dontStrip ? true
389 , unpackPhase ? "true"
390 , buildPhase ? "true"
391 , meta ? {}
392 , ... }@args:
393
394 let
395 extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
396 in
397 stdenv.mkDerivation ({
398 name = "node_${name}-${version}";
399 buildInputs = [ tarWrapper python nodejs ]
400 ++ lib.optional (stdenv.isLinux) util-linux
401 ++ lib.optional (stdenv.isDarwin) libtool
402 ++ buildInputs;
403
404 inherit nodejs;
405
406 inherit dontStrip; # Stripping may fail a build for some package deployments
407 inherit dontNpmInstall preRebuild unpackPhase buildPhase;
408
409 compositionScript = composePackage args;
410 pinpointDependenciesScript = pinpointDependenciesOfPackage args;
411
412 passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
413
414 installPhase = ''
415 # Create and enter a root node_modules/ folder
416 mkdir -p $out/lib/node_modules
417 cd $out/lib/node_modules
418
419 # Compose the package and all its dependencies
420 source $compositionScriptPath
421
422 ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
423
424 # Create symlink to the deployed executable folder, if applicable
425 if [ -d "$out/lib/node_modules/.bin" ]
426 then
427 ln -s $out/lib/node_modules/.bin $out/bin
428 fi
429
430 # Create symlinks to the deployed manual page folders, if applicable
431 if [ -d "$out/lib/node_modules/${packageName}/man" ]
432 then
433 mkdir -p $out/share
434 for dir in "$out/lib/node_modules/${packageName}/man/"*
435 do
436 mkdir -p $out/share/man/$(basename "$dir")
437 for page in "$dir"/*
438 do
439 ln -s $page $out/share/man/$(basename "$dir")
440 done
441 done
442 fi
443
444 # Run post install hook, if provided
445 runHook postInstall
446 '';
447
448 meta = {
449 # default to Node.js' platforms
450 platforms = nodejs.meta.platforms;
451 } // meta;
452 } // extraArgs);
453
454 # Builds a development shell
455 buildNodeShell =
456 { name
457 , packageName
458 , version
459 , src
460 , dependencies ? []
461 , buildInputs ? []
462 , production ? true
463 , npmFlags ? ""
464 , dontNpmInstall ? false
465 , bypassCache ? false
466 , reconstructLock ? false
467 , dontStrip ? true
468 , unpackPhase ? "true"
469 , buildPhase ? "true"
470 , ... }@args:
471
472 let
473 extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
474
475 nodeDependencies = stdenv.mkDerivation ({
476 name = "node-dependencies-${name}-${version}";
477
478 buildInputs = [ tarWrapper python nodejs ]
479 ++ lib.optional (stdenv.isLinux) util-linux
480 ++ lib.optional (stdenv.isDarwin) libtool
481 ++ buildInputs;
482
483 inherit dontStrip; # Stripping may fail a build for some package deployments
484 inherit dontNpmInstall unpackPhase buildPhase;
485
486 includeScript = includeDependencies { inherit dependencies; };
487 pinpointDependenciesScript = pinpointDependenciesOfPackage args;
488
489 passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
490
491 installPhase = ''
492 mkdir -p $out/${packageName}
493 cd $out/${packageName}
494
495 source $includeScriptPath
496
497 # Create fake package.json to make the npm commands work properly
498 cp ${src}/package.json .
499 chmod 644 package.json
500 ${lib.optionalString bypassCache ''
501 if [ -f ${src}/package-lock.json ]
502 then
503 cp ${src}/package-lock.json .
504 fi
505 ''}
506
507 # Go to the parent folder to make sure that all packages are pinpointed
508 cd ..
509 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
510
511 ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
512
513 # Expose the executables that were installed
514 cd ..
515 ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
516
517 mv ${packageName} lib
518 ln -s $out/lib/node_modules/.bin $out/bin
519 '';
520 } // extraArgs);
521 in
522 stdenv.mkDerivation {
523 name = "node-shell-${name}-${version}";
524
525 buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) util-linux ++ buildInputs;
526 buildCommand = ''
527 mkdir -p $out/bin
528 cat > $out/bin/shell <<EOF
529 #! ${stdenv.shell} -e
530 $shellHook
531 exec ${stdenv.shell}
532 EOF
533 chmod +x $out/bin/shell
534 '';
535
536 # Provide the dependencies in a development shell through the NODE_PATH environment variable
537 inherit nodeDependencies;
538 shellHook = lib.optionalString (dependencies != []) ''
539 export NODE_PATH=${nodeDependencies}/lib/node_modules
540 export PATH="${nodeDependencies}/bin:$PATH"
541 '';
542 };
543in
544{
545 buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
546 buildNodePackage = lib.makeOverridable buildNodePackage;
547 buildNodeShell = lib.makeOverridable buildNodeShell;
548}