]>
Commit | Line | Data |
---|---|---|
1feea3fe IB |
1 | # This file originates from node2nix |
2 | ||
3 | {lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile}: | |
4 | ||
5 | let | |
6 | # Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master | |
7 | utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux; | |
8 | ||
9 | python = if nodejs ? python then nodejs.python else python2; | |
10 | ||
11 | # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise | |
12 | tarWrapper = runCommand "tarWrapper" {} '' | |
13 | mkdir -p $out/bin | |
14 | ||
15 | cat > $out/bin/tar <<EOF | |
16 | #! ${stdenv.shell} -e | |
17 | $(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore | |
18 | EOF | |
19 | ||
20 | chmod +x $out/bin/tar | |
21 | ''; | |
22 | ||
23 | # Function that generates a TGZ file from a NPM project | |
24 | buildNodeSourceDist = | |
25 | { name, version, src, ... }: | |
26 | ||
27 | stdenv.mkDerivation { | |
28 | name = "node-tarball-${name}-${version}"; | |
29 | inherit src; | |
30 | buildInputs = [ nodejs ]; | |
31 | buildPhase = '' | |
32 | export HOME=$TMPDIR | |
33 | tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts) | |
34 | ''; | |
35 | installPhase = '' | |
36 | mkdir -p $out/tarballs | |
37 | mv $tgzFile $out/tarballs | |
38 | mkdir -p $out/nix-support | |
39 | echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products | |
40 | ''; | |
41 | }; | |
42 | ||
43 | includeDependencies = {dependencies}: | |
44 | lib.optionalString (dependencies != []) | |
45 | (lib.concatMapStrings (dependency: | |
46 | '' | |
47 | # Bundle the dependencies of the package | |
48 | mkdir -p node_modules | |
49 | cd node_modules | |
50 | ||
51 | # Only include dependencies if they don't exist. They may also be bundled in the package. | |
52 | if [ ! -e "${dependency.name}" ] | |
53 | then | |
54 | ${composePackage dependency} | |
55 | fi | |
56 | ||
57 | cd .. | |
58 | '' | |
59 | ) dependencies); | |
60 | ||
61 | # Recursively composes the dependencies of a package | |
62 | composePackage = { name, packageName, src, dependencies ? [], ... }@args: | |
63 | builtins.addErrorContext "while evaluating node package '${packageName}'" '' | |
64 | DIR=$(pwd) | |
65 | cd $TMPDIR | |
66 | ||
67 | unpackFile ${src} | |
68 | ||
69 | # Make the base dir in which the target dependency resides first | |
70 | mkdir -p "$(dirname "$DIR/${packageName}")" | |
71 | ||
72 | if [ -f "${src}" ] | |
73 | then | |
74 | # Figure out what directory has been unpacked | |
75 | packageDir="$(find . -maxdepth 1 -type d | tail -1)" | |
76 | ||
77 | # Restore write permissions to make building work | |
78 | find "$packageDir" -type d -exec chmod u+x {} \; | |
79 | chmod -R u+w "$packageDir" | |
80 | ||
81 | # Move the extracted tarball into the output folder | |
82 | mv "$packageDir" "$DIR/${packageName}" | |
83 | elif [ -d "${src}" ] | |
84 | then | |
85 | # Get a stripped name (without hash) of the source directory. | |
86 | # On old nixpkgs it's already set internally. | |
87 | if [ -z "$strippedName" ] | |
88 | then | |
89 | strippedName="$(stripHash ${src})" | |
90 | fi | |
91 | ||
92 | # Restore write permissions to make building work | |
93 | chmod -R u+w "$strippedName" | |
94 | ||
95 | # Move the extracted directory into the output folder | |
96 | mv "$strippedName" "$DIR/${packageName}" | |
97 | fi | |
98 | ||
99 | # Unset the stripped name to not confuse the next unpack step | |
100 | unset strippedName | |
101 | ||
102 | # Include the dependencies of the package | |
103 | cd "$DIR/${packageName}" | |
104 | ${includeDependencies { inherit dependencies; }} | |
105 | cd .. | |
106 | ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} | |
107 | ''; | |
108 | ||
109 | pinpointDependencies = {dependencies, production}: | |
110 | let | |
111 | pinpointDependenciesFromPackageJSON = writeTextFile { | |
112 | name = "pinpointDependencies.js"; | |
113 | text = '' | |
114 | var fs = require('fs'); | |
115 | var path = require('path'); | |
116 | ||
117 | function resolveDependencyVersion(location, name) { | |
118 | if(location == process.env['NIX_STORE']) { | |
119 | return null; | |
120 | } else { | |
121 | var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json"); | |
122 | ||
123 | if(fs.existsSync(dependencyPackageJSON)) { | |
124 | var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON)); | |
125 | ||
126 | if(dependencyPackageObj.name == name) { | |
127 | return dependencyPackageObj.version; | |
128 | } | |
129 | } else { | |
130 | return resolveDependencyVersion(path.resolve(location, ".."), name); | |
131 | } | |
132 | } | |
133 | } | |
134 | ||
135 | function replaceDependencies(dependencies) { | |
136 | if(typeof dependencies == "object" && dependencies !== null) { | |
137 | for(var dependency in dependencies) { | |
138 | var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency); | |
139 | ||
140 | if(resolvedVersion === null) { | |
141 | process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n"); | |
142 | } else { | |
143 | dependencies[dependency] = resolvedVersion; | |
144 | } | |
145 | } | |
146 | } | |
147 | } | |
148 | ||
149 | /* Read the package.json configuration */ | |
150 | var packageObj = JSON.parse(fs.readFileSync('./package.json')); | |
151 | ||
152 | /* Pinpoint all dependencies */ | |
153 | replaceDependencies(packageObj.dependencies); | |
154 | if(process.argv[2] == "development") { | |
155 | replaceDependencies(packageObj.devDependencies); | |
156 | } | |
157 | replaceDependencies(packageObj.optionalDependencies); | |
158 | ||
159 | /* Write the fixed package.json file */ | |
160 | fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2)); | |
161 | ''; | |
162 | }; | |
163 | in | |
164 | '' | |
165 | node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"} | |
166 | ||
167 | ${lib.optionalString (dependencies != []) | |
168 | '' | |
169 | if [ -d node_modules ] | |
170 | then | |
171 | cd node_modules | |
172 | ${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies} | |
173 | cd .. | |
174 | fi | |
175 | ''} | |
176 | ''; | |
177 | ||
178 | # Recursively traverses all dependencies of a package and pinpoints all | |
179 | # dependencies in the package.json file to the versions that are actually | |
180 | # being used. | |
181 | ||
182 | pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args: | |
183 | '' | |
184 | if [ -d "${packageName}" ] | |
185 | then | |
186 | cd "${packageName}" | |
187 | ${pinpointDependencies { inherit dependencies production; }} | |
188 | cd .. | |
189 | ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} | |
190 | fi | |
191 | ''; | |
192 | ||
193 | # Extract the Node.js source code which is used to compile packages with | |
194 | # native bindings | |
195 | nodeSources = runCommand "node-sources" {} '' | |
196 | tar --no-same-owner --no-same-permissions -xf ${nodejs.src} | |
197 | mv node-* $out | |
198 | ''; | |
199 | ||
200 | # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty) | |
201 | addIntegrityFieldsScript = writeTextFile { | |
202 | name = "addintegrityfields.js"; | |
203 | text = '' | |
204 | var fs = require('fs'); | |
205 | var path = require('path'); | |
206 | ||
207 | function augmentDependencies(baseDir, dependencies) { | |
208 | for(var dependencyName in dependencies) { | |
209 | var dependency = dependencies[dependencyName]; | |
210 | ||
211 | // Open package.json and augment metadata fields | |
212 | var packageJSONDir = path.join(baseDir, "node_modules", dependencyName); | |
213 | var packageJSONPath = path.join(packageJSONDir, "package.json"); | |
214 | ||
215 | if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored | |
216 | console.log("Adding metadata fields to: "+packageJSONPath); | |
217 | var packageObj = JSON.parse(fs.readFileSync(packageJSONPath)); | |
218 | ||
219 | if(dependency.integrity) { | |
220 | packageObj["_integrity"] = dependency.integrity; | |
221 | } else { | |
222 | packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads. | |
223 | } | |
224 | ||
225 | if(dependency.resolved) { | |
226 | packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided | |
227 | } else { | |
228 | packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories. | |
229 | } | |
230 | ||
231 | if(dependency.from !== undefined) { // Adopt from property if one has been provided | |
232 | packageObj["_from"] = dependency.from; | |
233 | } | |
234 | ||
235 | fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2)); | |
236 | } | |
237 | ||
238 | // Augment transitive dependencies | |
239 | if(dependency.dependencies !== undefined) { | |
240 | augmentDependencies(packageJSONDir, dependency.dependencies); | |
241 | } | |
242 | } | |
243 | } | |
244 | ||
245 | if(fs.existsSync("./package-lock.json")) { | |
246 | var packageLock = JSON.parse(fs.readFileSync("./package-lock.json")); | |
247 | ||
248 | if(![1, 2].includes(packageLock.lockfileVersion)) { | |
249 | process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n"); | |
250 | process.exit(1); | |
251 | } | |
252 | ||
253 | if(packageLock.dependencies !== undefined) { | |
254 | augmentDependencies(".", packageLock.dependencies); | |
255 | } | |
256 | } | |
257 | ''; | |
258 | }; | |
259 | ||
260 | # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes | |
261 | reconstructPackageLock = writeTextFile { | |
262 | name = "addintegrityfields.js"; | |
263 | text = '' | |
264 | var fs = require('fs'); | |
265 | var path = require('path'); | |
266 | ||
267 | var packageObj = JSON.parse(fs.readFileSync("package.json")); | |
268 | ||
269 | var lockObj = { | |
270 | name: packageObj.name, | |
271 | version: packageObj.version, | |
272 | lockfileVersion: 1, | |
273 | requires: true, | |
274 | dependencies: {} | |
275 | }; | |
276 | ||
277 | function augmentPackageJSON(filePath, dependencies) { | |
278 | var packageJSON = path.join(filePath, "package.json"); | |
279 | if(fs.existsSync(packageJSON)) { | |
280 | var packageObj = JSON.parse(fs.readFileSync(packageJSON)); | |
281 | dependencies[packageObj.name] = { | |
282 | version: packageObj.version, | |
283 | integrity: "sha1-000000000000000000000000000=", | |
284 | dependencies: {} | |
285 | }; | |
286 | processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies); | |
287 | } | |
288 | } | |
289 | ||
290 | function processDependencies(dir, dependencies) { | |
291 | if(fs.existsSync(dir)) { | |
292 | var files = fs.readdirSync(dir); | |
293 | ||
294 | files.forEach(function(entry) { | |
295 | var filePath = path.join(dir, entry); | |
296 | var stats = fs.statSync(filePath); | |
297 | ||
298 | if(stats.isDirectory()) { | |
299 | if(entry.substr(0, 1) == "@") { | |
300 | // When we encounter a namespace folder, augment all packages belonging to the scope | |
301 | var pkgFiles = fs.readdirSync(filePath); | |
302 | ||
303 | pkgFiles.forEach(function(entry) { | |
304 | if(stats.isDirectory()) { | |
305 | var pkgFilePath = path.join(filePath, entry); | |
306 | augmentPackageJSON(pkgFilePath, dependencies); | |
307 | } | |
308 | }); | |
309 | } else { | |
310 | augmentPackageJSON(filePath, dependencies); | |
311 | } | |
312 | } | |
313 | }); | |
314 | } | |
315 | } | |
316 | ||
317 | processDependencies("node_modules", lockObj.dependencies); | |
318 | ||
319 | fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2)); | |
320 | ''; | |
321 | }; | |
322 | ||
323 | prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}: | |
324 | let | |
325 | forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; | |
326 | in | |
327 | '' | |
328 | # Pinpoint the versions of all dependencies to the ones that are actually being used | |
329 | echo "pinpointing versions of dependencies..." | |
330 | source $pinpointDependenciesScriptPath | |
331 | ||
332 | # Patch the shebangs of the bundled modules to prevent them from | |
333 | # calling executables outside the Nix store as much as possible | |
334 | patchShebangs . | |
335 | ||
336 | # Deploy the Node.js package by running npm install. Since the | |
337 | # dependencies have been provided already by ourselves, it should not | |
338 | # attempt to install them again, which is good, because we want to make | |
339 | # it Nix's responsibility. If it needs to install any dependencies | |
340 | # anyway (e.g. because the dependency parameters are | |
341 | # incomplete/incorrect), it fails. | |
342 | # | |
343 | # The other responsibilities of NPM are kept -- version checks, build | |
344 | # steps, postprocessing etc. | |
345 | ||
346 | export HOME=$TMPDIR | |
347 | cd "${packageName}" | |
348 | runHook preRebuild | |
349 | ||
350 | ${lib.optionalString bypassCache '' | |
351 | ${lib.optionalString reconstructLock '' | |
352 | if [ -f package-lock.json ] | |
353 | then | |
354 | echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!" | |
355 | echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!" | |
356 | rm package-lock.json | |
357 | else | |
358 | echo "No package-lock.json file found, reconstructing..." | |
359 | fi | |
360 | ||
361 | node ${reconstructPackageLock} | |
362 | ''} | |
363 | ||
364 | node ${addIntegrityFieldsScript} | |
365 | ''} | |
366 | ||
367 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild | |
368 | ||
369 | if [ "''${dontNpmInstall-}" != "1" ] | |
370 | then | |
371 | # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. | |
372 | rm -f npm-shrinkwrap.json | |
373 | ||
374 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install | |
375 | fi | |
376 | ''; | |
377 | ||
378 | # Builds and composes an NPM package including all its dependencies | |
379 | buildNodePackage = | |
380 | { name | |
381 | , packageName | |
382 | , version | |
383 | , dependencies ? [] | |
384 | , buildInputs ? [] | |
385 | , production ? true | |
386 | , npmFlags ? "" | |
387 | , dontNpmInstall ? false | |
388 | , bypassCache ? false | |
389 | , reconstructLock ? false | |
390 | , preRebuild ? "" | |
391 | , dontStrip ? true | |
392 | , unpackPhase ? "true" | |
393 | , buildPhase ? "true" | |
394 | , ... }@args: | |
395 | ||
396 | let | |
397 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ]; | |
398 | in | |
399 | stdenv.mkDerivation ({ | |
400 | name = "node_${name}-${version}"; | |
401 | buildInputs = [ tarWrapper python nodejs ] | |
402 | ++ lib.optional (stdenv.isLinux) utillinux | |
403 | ++ lib.optional (stdenv.isDarwin) libtool | |
404 | ++ buildInputs; | |
405 | ||
406 | inherit nodejs; | |
407 | ||
408 | inherit dontStrip; # Stripping may fail a build for some package deployments | |
409 | inherit dontNpmInstall preRebuild unpackPhase buildPhase; | |
410 | ||
411 | compositionScript = composePackage args; | |
412 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; | |
413 | ||
414 | passAsFile = [ "compositionScript" "pinpointDependenciesScript" ]; | |
415 | ||
416 | installPhase = '' | |
417 | # Create and enter a root node_modules/ folder | |
418 | mkdir -p $out/lib/node_modules | |
419 | cd $out/lib/node_modules | |
420 | ||
421 | # Compose the package and all its dependencies | |
422 | source $compositionScriptPath | |
423 | ||
424 | ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} | |
425 | ||
426 | # Create symlink to the deployed executable folder, if applicable | |
427 | if [ -d "$out/lib/node_modules/.bin" ] | |
428 | then | |
429 | ln -s $out/lib/node_modules/.bin $out/bin | |
430 | fi | |
431 | ||
432 | # Create symlinks to the deployed manual page folders, if applicable | |
433 | if [ -d "$out/lib/node_modules/${packageName}/man" ] | |
434 | then | |
435 | mkdir -p $out/share | |
436 | for dir in "$out/lib/node_modules/${packageName}/man/"* | |
437 | do | |
438 | mkdir -p $out/share/man/$(basename "$dir") | |
439 | for page in "$dir"/* | |
440 | do | |
441 | ln -s $page $out/share/man/$(basename "$dir") | |
442 | done | |
443 | done | |
444 | fi | |
445 | ||
446 | # Run post install hook, if provided | |
447 | runHook postInstall | |
448 | ''; | |
449 | } // extraArgs); | |
450 | ||
451 | # Builds a node environment (a node_modules folder and a set of binaries) | |
452 | buildNodeDependencies = | |
453 | { name | |
454 | , packageName | |
455 | , version | |
456 | , src | |
457 | , dependencies ? [] | |
458 | , buildInputs ? [] | |
459 | , production ? true | |
460 | , npmFlags ? "" | |
461 | , dontNpmInstall ? false | |
462 | , bypassCache ? false | |
463 | , reconstructLock ? false | |
464 | , dontStrip ? true | |
465 | , unpackPhase ? "true" | |
466 | , buildPhase ? "true" | |
467 | , ... }@args: | |
468 | ||
469 | let | |
470 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ]; | |
471 | in | |
472 | stdenv.mkDerivation ({ | |
473 | name = "node-dependencies-${name}-${version}"; | |
474 | ||
475 | buildInputs = [ tarWrapper python nodejs ] | |
476 | ++ lib.optional (stdenv.isLinux) utillinux | |
477 | ++ lib.optional (stdenv.isDarwin) libtool | |
478 | ++ buildInputs; | |
479 | ||
480 | inherit dontStrip; # Stripping may fail a build for some package deployments | |
481 | inherit dontNpmInstall unpackPhase buildPhase; | |
482 | ||
483 | includeScript = includeDependencies { inherit dependencies; }; | |
484 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; | |
485 | ||
486 | passAsFile = [ "includeScript" "pinpointDependenciesScript" ]; | |
487 | ||
488 | installPhase = '' | |
489 | mkdir -p $out/${packageName} | |
490 | cd $out/${packageName} | |
491 | ||
492 | source $includeScriptPath | |
493 | ||
494 | # Create fake package.json to make the npm commands work properly | |
495 | cp ${src}/package.json . | |
496 | chmod 644 package.json | |
497 | ${lib.optionalString bypassCache '' | |
498 | if [ -f ${src}/package-lock.json ] | |
499 | then | |
500 | cp ${src}/package-lock.json . | |
501 | fi | |
502 | ''} | |
503 | ||
504 | # Go to the parent folder to make sure that all packages are pinpointed | |
505 | cd .. | |
506 | ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} | |
507 | ||
508 | ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} | |
509 | ||
510 | # Expose the executables that were installed | |
511 | cd .. | |
512 | ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} | |
513 | ||
514 | mv ${packageName} lib | |
515 | ln -s $out/lib/node_modules/.bin $out/bin | |
516 | ''; | |
517 | } // extraArgs); | |
518 | ||
519 | # Builds a development shell | |
520 | buildNodeShell = | |
521 | { name | |
522 | , packageName | |
523 | , version | |
524 | , src | |
525 | , dependencies ? [] | |
526 | , buildInputs ? [] | |
527 | , production ? true | |
528 | , npmFlags ? "" | |
529 | , dontNpmInstall ? false | |
530 | , bypassCache ? false | |
531 | , reconstructLock ? false | |
532 | , dontStrip ? true | |
533 | , unpackPhase ? "true" | |
534 | , buildPhase ? "true" | |
535 | , ... }@args: | |
536 | ||
537 | let | |
538 | nodeDependencies = buildNodeDependencies args; | |
539 | in | |
540 | stdenv.mkDerivation { | |
541 | name = "node-shell-${name}-${version}"; | |
542 | ||
543 | buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs; | |
544 | buildCommand = '' | |
545 | mkdir -p $out/bin | |
546 | cat > $out/bin/shell <<EOF | |
547 | #! ${stdenv.shell} -e | |
548 | $shellHook | |
549 | exec ${stdenv.shell} | |
550 | EOF | |
551 | chmod +x $out/bin/shell | |
552 | ''; | |
553 | ||
554 | # Provide the dependencies in a development shell through the NODE_PATH environment variable | |
555 | inherit nodeDependencies; | |
556 | shellHook = lib.optionalString (dependencies != []) '' | |
557 | export NODE_PATH=${nodeDependencies}/lib/node_modules | |
558 | export PATH="${nodeDependencies}/bin:$PATH" | |
559 | ''; | |
560 | }; | |
561 | in | |
562 | { | |
563 | buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist; | |
564 | buildNodePackage = lib.makeOverridable buildNodePackage; | |
565 | buildNodeDependencies = lib.makeOverridable buildNodeDependencies; | |
566 | buildNodeShell = lib.makeOverridable buildNodeShell; | |
567 | } |