diff options
Diffstat (limited to 'lib/node-env.nix')
-rw-r--r-- | lib/node-env.nix | 542 |
1 files changed, 542 insertions, 0 deletions
diff --git a/lib/node-env.nix b/lib/node-env.nix new file mode 100644 index 00000000..720e0cc0 --- /dev/null +++ b/lib/node-env.nix | |||
@@ -0,0 +1,542 @@ | |||
1 | # This file originates from node2nix | ||
2 | |||
3 | {stdenv, nodejs, python2, utillinux, libtool, runCommand, writeTextFile}: | ||
4 | |||
5 | let | ||
6 | python = if nodejs ? python then nodejs.python else python2; | ||
7 | |||
8 | # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise | ||
9 | tarWrapper = runCommand "tarWrapper" {} '' | ||
10 | mkdir -p $out/bin | ||
11 | |||
12 | cat > $out/bin/tar <<EOF | ||
13 | #! ${stdenv.shell} -e | ||
14 | $(type -p tar) "\$@" --warning=no-unknown-keyword | ||
15 | EOF | ||
16 | |||
17 | chmod +x $out/bin/tar | ||
18 | ''; | ||
19 | |||
20 | # Function that generates a TGZ file from a NPM project | ||
21 | buildNodeSourceDist = | ||
22 | { name, version, src, ... }: | ||
23 | |||
24 | stdenv.mkDerivation { | ||
25 | name = "node-tarball-${name}-${version}"; | ||
26 | inherit src; | ||
27 | buildInputs = [ nodejs ]; | ||
28 | buildPhase = '' | ||
29 | export HOME=$TMPDIR | ||
30 | tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts) | ||
31 | ''; | ||
32 | installPhase = '' | ||
33 | mkdir -p $out/tarballs | ||
34 | mv $tgzFile $out/tarballs | ||
35 | mkdir -p $out/nix-support | ||
36 | echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products | ||
37 | ''; | ||
38 | }; | ||
39 | |||
40 | includeDependencies = {dependencies}: | ||
41 | stdenv.lib.optionalString (dependencies != []) | ||
42 | (stdenv.lib.concatMapStrings (dependency: | ||
43 | '' | ||
44 | # Bundle the dependencies of the package | ||
45 | mkdir -p node_modules | ||
46 | cd node_modules | ||
47 | |||
48 | # Only include dependencies if they don't exist. They may also be bundled in the package. | ||
49 | if [ ! -e "${dependency.name}" ] | ||
50 | then | ||
51 | ${composePackage dependency} | ||
52 | fi | ||
53 | |||
54 | cd .. | ||
55 | '' | ||
56 | ) dependencies); | ||
57 | |||
58 | # Recursively composes the dependencies of a package | ||
59 | composePackage = { name, packageName, src, dependencies ? [], ... }@args: | ||
60 | '' | ||
61 | DIR=$(pwd) | ||
62 | cd $TMPDIR | ||
63 | |||
64 | unpackFile ${src} | ||
65 | |||
66 | # Make the base dir in which the target dependency resides first | ||
67 | mkdir -p "$(dirname "$DIR/${packageName}")" | ||
68 | |||
69 | if [ -f "${src}" ] | ||
70 | then | ||
71 | # Figure out what directory has been unpacked | ||
72 | packageDir="$(find . -maxdepth 1 -type d | tail -1)" | ||
73 | |||
74 | # Restore write permissions to make building work | ||
75 | find "$packageDir" -type d -print0 | xargs -0 chmod u+x | ||
76 | chmod -R u+w "$packageDir" | ||
77 | |||
78 | # Move the extracted tarball into the output folder | ||
79 | mv "$packageDir" "$DIR/${packageName}" | ||
80 | elif [ -d "${src}" ] | ||
81 | then | ||
82 | # Get a stripped name (without hash) of the source directory. | ||
83 | # On old nixpkgs it's already set internally. | ||
84 | if [ -z "$strippedName" ] | ||
85 | then | ||
86 | strippedName="$(stripHash ${src})" | ||
87 | fi | ||
88 | |||
89 | # Restore write permissions to make building work | ||
90 | chmod -R u+w "$strippedName" | ||
91 | |||
92 | # Move the extracted directory into the output folder | ||
93 | mv "$strippedName" "$DIR/${packageName}" | ||
94 | fi | ||
95 | |||
96 | # Unset the stripped name to not confuse the next unpack step | ||
97 | unset strippedName | ||
98 | |||
99 | # Include the dependencies of the package | ||
100 | cd "$DIR/${packageName}" | ||
101 | ${includeDependencies { inherit dependencies; }} | ||
102 | cd .. | ||
103 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} | ||
104 | ''; | ||
105 | |||
106 | pinpointDependencies = {dependencies, production}: | ||
107 | let | ||
108 | pinpointDependenciesFromPackageJSON = writeTextFile { | ||
109 | name = "pinpointDependencies.js"; | ||
110 | text = '' | ||
111 | var fs = require('fs'); | ||
112 | var path = require('path'); | ||
113 | |||
114 | function resolveDependencyVersion(location, name) { | ||
115 | if(location == process.env['NIX_STORE']) { | ||
116 | return null; | ||
117 | } else { | ||
118 | var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json"); | ||
119 | |||
120 | if(fs.existsSync(dependencyPackageJSON)) { | ||
121 | var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON)); | ||
122 | |||
123 | if(dependencyPackageObj.name == name) { | ||
124 | return dependencyPackageObj.version; | ||
125 | } | ||
126 | } else { | ||
127 | return resolveDependencyVersion(path.resolve(location, ".."), name); | ||
128 | } | ||
129 | } | ||
130 | } | ||
131 | |||
132 | function replaceDependencies(dependencies) { | ||
133 | if(typeof dependencies == "object" && dependencies !== null) { | ||
134 | for(var dependency in dependencies) { | ||
135 | var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency); | ||
136 | |||
137 | if(resolvedVersion === null) { | ||
138 | process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n"); | ||
139 | } else { | ||
140 | dependencies[dependency] = resolvedVersion; | ||
141 | } | ||
142 | } | ||
143 | } | ||
144 | } | ||
145 | |||
146 | /* Read the package.json configuration */ | ||
147 | var packageObj = JSON.parse(fs.readFileSync('./package.json')); | ||
148 | |||
149 | /* Pinpoint all dependencies */ | ||
150 | replaceDependencies(packageObj.dependencies); | ||
151 | if(process.argv[2] == "development") { | ||
152 | replaceDependencies(packageObj.devDependencies); | ||
153 | } | ||
154 | replaceDependencies(packageObj.optionalDependencies); | ||
155 | |||
156 | /* Write the fixed package.json file */ | ||
157 | fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2)); | ||
158 | ''; | ||
159 | }; | ||
160 | in | ||
161 | '' | ||
162 | node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"} | ||
163 | |||
164 | ${stdenv.lib.optionalString (dependencies != []) | ||
165 | '' | ||
166 | if [ -d node_modules ] | ||
167 | then | ||
168 | cd node_modules | ||
169 | ${stdenv.lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies} | ||
170 | cd .. | ||
171 | fi | ||
172 | ''} | ||
173 | ''; | ||
174 | |||
175 | # Recursively traverses all dependencies of a package and pinpoints all | ||
176 | # dependencies in the package.json file to the versions that are actually | ||
177 | # being used. | ||
178 | |||
179 | pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args: | ||
180 | '' | ||
181 | if [ -d "${packageName}" ] | ||
182 | then | ||
183 | cd "${packageName}" | ||
184 | ${pinpointDependencies { inherit dependencies production; }} | ||
185 | cd .. | ||
186 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} | ||
187 | fi | ||
188 | ''; | ||
189 | |||
190 | # Extract the Node.js source code which is used to compile packages with | ||
191 | # native bindings | ||
192 | nodeSources = runCommand "node-sources" {} '' | ||
193 | tar --no-same-owner --no-same-permissions -xf ${nodejs.src} | ||
194 | mv node-* $out | ||
195 | ''; | ||
196 | |||
197 | # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty) | ||
198 | addIntegrityFieldsScript = writeTextFile { | ||
199 | name = "addintegrityfields.js"; | ||
200 | text = '' | ||
201 | var fs = require('fs'); | ||
202 | var path = require('path'); | ||
203 | |||
204 | function augmentDependencies(baseDir, dependencies) { | ||
205 | for(var dependencyName in dependencies) { | ||
206 | var dependency = dependencies[dependencyName]; | ||
207 | |||
208 | // Open package.json and augment metadata fields | ||
209 | var packageJSONDir = path.join(baseDir, "node_modules", dependencyName); | ||
210 | var packageJSONPath = path.join(packageJSONDir, "package.json"); | ||
211 | |||
212 | if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored | ||
213 | console.log("Adding metadata fields to: "+packageJSONPath); | ||
214 | var packageObj = JSON.parse(fs.readFileSync(packageJSONPath)); | ||
215 | |||
216 | if(dependency.integrity) { | ||
217 | packageObj["_integrity"] = dependency.integrity; | ||
218 | } else { | ||
219 | packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads. | ||
220 | } | ||
221 | |||
222 | packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories. | ||
223 | fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2)); | ||
224 | } | ||
225 | |||
226 | // Augment transitive dependencies | ||
227 | if(dependency.dependencies !== undefined) { | ||
228 | augmentDependencies(packageJSONDir, dependency.dependencies); | ||
229 | } | ||
230 | } | ||
231 | } | ||
232 | |||
233 | if(fs.existsSync("./package-lock.json")) { | ||
234 | var packageLock = JSON.parse(fs.readFileSync("./package-lock.json")); | ||
235 | |||
236 | if(packageLock.lockfileVersion !== 1) { | ||
237 | process.stderr.write("Sorry, I only understand lock file version 1!\n"); | ||
238 | process.exit(1); | ||
239 | } | ||
240 | |||
241 | if(packageLock.dependencies !== undefined) { | ||
242 | augmentDependencies(".", packageLock.dependencies); | ||
243 | } | ||
244 | } | ||
245 | ''; | ||
246 | }; | ||
247 | |||
248 | # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes | ||
249 | reconstructPackageLock = writeTextFile { | ||
250 | name = "addintegrityfields.js"; | ||
251 | text = '' | ||
252 | var fs = require('fs'); | ||
253 | var path = require('path'); | ||
254 | |||
255 | var packageObj = JSON.parse(fs.readFileSync("package.json")); | ||
256 | |||
257 | var lockObj = { | ||
258 | name: packageObj.name, | ||
259 | version: packageObj.version, | ||
260 | lockfileVersion: 1, | ||
261 | requires: true, | ||
262 | dependencies: {} | ||
263 | }; | ||
264 | |||
265 | function augmentPackageJSON(filePath, dependencies) { | ||
266 | var packageJSON = path.join(filePath, "package.json"); | ||
267 | if(fs.existsSync(packageJSON)) { | ||
268 | var packageObj = JSON.parse(fs.readFileSync(packageJSON)); | ||
269 | dependencies[packageObj.name] = { | ||
270 | version: packageObj.version, | ||
271 | integrity: "sha1-000000000000000000000000000=", | ||
272 | dependencies: {} | ||
273 | }; | ||
274 | processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies); | ||
275 | } | ||
276 | } | ||
277 | |||
278 | function processDependencies(dir, dependencies) { | ||
279 | if(fs.existsSync(dir)) { | ||
280 | var files = fs.readdirSync(dir); | ||
281 | |||
282 | files.forEach(function(entry) { | ||
283 | var filePath = path.join(dir, entry); | ||
284 | var stats = fs.statSync(filePath); | ||
285 | |||
286 | if(stats.isDirectory()) { | ||
287 | if(entry.substr(0, 1) == "@") { | ||
288 | // When we encounter a namespace folder, augment all packages belonging to the scope | ||
289 | var pkgFiles = fs.readdirSync(filePath); | ||
290 | |||
291 | pkgFiles.forEach(function(entry) { | ||
292 | if(stats.isDirectory()) { | ||
293 | var pkgFilePath = path.join(filePath, entry); | ||
294 | augmentPackageJSON(pkgFilePath, dependencies); | ||
295 | } | ||
296 | }); | ||
297 | } else { | ||
298 | augmentPackageJSON(filePath, dependencies); | ||
299 | } | ||
300 | } | ||
301 | }); | ||
302 | } | ||
303 | } | ||
304 | |||
305 | processDependencies("node_modules", lockObj.dependencies); | ||
306 | |||
307 | fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2)); | ||
308 | ''; | ||
309 | }; | ||
310 | |||
311 | # Builds and composes an NPM package including all its dependencies | ||
312 | buildNodePackage = | ||
313 | { name | ||
314 | , packageName | ||
315 | , version | ||
316 | , dependencies ? [] | ||
317 | , buildInputs ? [] | ||
318 | , production ? true | ||
319 | , npmFlags ? "" | ||
320 | , dontNpmInstall ? false | ||
321 | , bypassCache ? false | ||
322 | , preRebuild ? "" | ||
323 | , dontStrip ? true | ||
324 | , unpackPhase ? "true" | ||
325 | , buildPhase ? "true" | ||
326 | , ... }@args: | ||
327 | |||
328 | let | ||
329 | forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; | ||
330 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ]; | ||
331 | in | ||
332 | stdenv.mkDerivation ({ | ||
333 | name = "node-${name}-${version}"; | ||
334 | buildInputs = [ tarWrapper python nodejs ] | ||
335 | ++ stdenv.lib.optional (stdenv.isLinux) utillinux | ||
336 | ++ stdenv.lib.optional (stdenv.isDarwin) libtool | ||
337 | ++ buildInputs; | ||
338 | |||
339 | inherit dontStrip; # Stripping may fail a build for some package deployments | ||
340 | inherit dontNpmInstall preRebuild unpackPhase buildPhase; | ||
341 | |||
342 | compositionScript = composePackage args; | ||
343 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; | ||
344 | |||
345 | passAsFile = [ "compositionScript" "pinpointDependenciesScript" ]; | ||
346 | |||
347 | installPhase = '' | ||
348 | # Create and enter a root node_modules/ folder | ||
349 | mkdir -p $out/lib/node_modules | ||
350 | cd $out/lib/node_modules | ||
351 | |||
352 | # Compose the package and all its dependencies | ||
353 | source $compositionScriptPath | ||
354 | |||
355 | # Pinpoint the versions of all dependencies to the ones that are actually being used | ||
356 | echo "pinpointing versions of dependencies..." | ||
357 | source $pinpointDependenciesScriptPath | ||
358 | |||
359 | # Patch the shebangs of the bundled modules to prevent them from | ||
360 | # calling executables outside the Nix store as much as possible | ||
361 | patchShebangs . | ||
362 | |||
363 | # Deploy the Node.js package by running npm install. Since the | ||
364 | # dependencies have been provided already by ourselves, it should not | ||
365 | # attempt to install them again, which is good, because we want to make | ||
366 | # it Nix's responsibility. If it needs to install any dependencies | ||
367 | # anyway (e.g. because the dependency parameters are | ||
368 | # incomplete/incorrect), it fails. | ||
369 | # | ||
370 | # The other responsibilities of NPM are kept -- version checks, build | ||
371 | # steps, postprocessing etc. | ||
372 | |||
373 | export HOME=$TMPDIR | ||
374 | cd "${packageName}" | ||
375 | runHook preRebuild | ||
376 | |||
377 | ${stdenv.lib.optionalString bypassCache '' | ||
378 | if [ ! -f package-lock.json ] | ||
379 | then | ||
380 | echo "No package-lock.json file found, reconstructing..." | ||
381 | node ${reconstructPackageLock} | ||
382 | fi | ||
383 | |||
384 | node ${addIntegrityFieldsScript} | ||
385 | ''} | ||
386 | |||
387 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild | ||
388 | |||
389 | if [ "$dontNpmInstall" != "1" ] | ||
390 | then | ||
391 | # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. | ||
392 | rm -f npm-shrinkwrap.json | ||
393 | |||
394 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install | ||
395 | fi | ||
396 | |||
397 | # Create symlink to the deployed executable folder, if applicable | ||
398 | if [ -d "$out/lib/node_modules/.bin" ] | ||
399 | then | ||
400 | ln -s $out/lib/node_modules/.bin $out/bin | ||
401 | fi | ||
402 | |||
403 | # Create symlinks to the deployed manual page folders, if applicable | ||
404 | if [ -d "$out/lib/node_modules/${packageName}/man" ] | ||
405 | then | ||
406 | mkdir -p $out/share | ||
407 | for dir in "$out/lib/node_modules/${packageName}/man/"* | ||
408 | do | ||
409 | mkdir -p $out/share/man/$(basename "$dir") | ||
410 | for page in "$dir"/* | ||
411 | do | ||
412 | ln -s $page $out/share/man/$(basename "$dir") | ||
413 | done | ||
414 | done | ||
415 | fi | ||
416 | |||
417 | # Run post install hook, if provided | ||
418 | runHook postInstall | ||
419 | ''; | ||
420 | } // extraArgs); | ||
421 | |||
422 | # Builds a development shell | ||
423 | buildNodeShell = | ||
424 | { name | ||
425 | , packageName | ||
426 | , version | ||
427 | , src | ||
428 | , dependencies ? [] | ||
429 | , buildInputs ? [] | ||
430 | , production ? true | ||
431 | , npmFlags ? "" | ||
432 | , dontNpmInstall ? false | ||
433 | , bypassCache ? false | ||
434 | , dontStrip ? true | ||
435 | , unpackPhase ? "true" | ||
436 | , buildPhase ? "true" | ||
437 | , ... }@args: | ||
438 | |||
439 | let | ||
440 | forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; | ||
441 | |||
442 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ]; | ||
443 | |||
444 | nodeDependencies = stdenv.mkDerivation ({ | ||
445 | name = "node-dependencies-${name}-${version}"; | ||
446 | |||
447 | buildInputs = [ tarWrapper python nodejs ] | ||
448 | ++ stdenv.lib.optional (stdenv.isLinux) utillinux | ||
449 | ++ stdenv.lib.optional (stdenv.isDarwin) libtool | ||
450 | ++ buildInputs; | ||
451 | |||
452 | inherit dontStrip; # Stripping may fail a build for some package deployments | ||
453 | inherit dontNpmInstall unpackPhase buildPhase; | ||
454 | |||
455 | includeScript = includeDependencies { inherit dependencies; }; | ||
456 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; | ||
457 | |||
458 | passAsFile = [ "includeScript" "pinpointDependenciesScript" ]; | ||
459 | |||
460 | installPhase = '' | ||
461 | mkdir -p $out/${packageName} | ||
462 | cd $out/${packageName} | ||
463 | |||
464 | source $includeScriptPath | ||
465 | |||
466 | # Create fake package.json to make the npm commands work properly | ||
467 | cp ${src}/package.json . | ||
468 | chmod 644 package.json | ||
469 | ${stdenv.lib.optionalString bypassCache '' | ||
470 | if [ -f ${src}/package-lock.json ] | ||
471 | then | ||
472 | cp ${src}/package-lock.json . | ||
473 | fi | ||
474 | ''} | ||
475 | |||
476 | # Pinpoint the versions of all dependencies to the ones that are actually being used | ||
477 | echo "pinpointing versions of dependencies..." | ||
478 | cd .. | ||
479 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} | ||
480 | |||
481 | source $pinpointDependenciesScriptPath | ||
482 | cd ${packageName} | ||
483 | |||
484 | # Patch the shebangs of the bundled modules to prevent them from | ||
485 | # calling executables outside the Nix store as much as possible | ||
486 | patchShebangs . | ||
487 | |||
488 | export HOME=$PWD | ||
489 | |||
490 | ${stdenv.lib.optionalString bypassCache '' | ||
491 | if [ ! -f package-lock.json ] | ||
492 | then | ||
493 | echo "No package-lock.json file found, reconstructing..." | ||
494 | node ${reconstructPackageLock} | ||
495 | fi | ||
496 | |||
497 | node ${addIntegrityFieldsScript} | ||
498 | ''} | ||
499 | |||
500 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild | ||
501 | |||
502 | ${stdenv.lib.optionalString (!dontNpmInstall) '' | ||
503 | # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. | ||
504 | rm -f npm-shrinkwrap.json | ||
505 | |||
506 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install | ||
507 | ''} | ||
508 | |||
509 | cd .. | ||
510 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} | ||
511 | |||
512 | mv ${packageName} lib | ||
513 | ln -s $out/lib/node_modules/.bin $out/bin | ||
514 | ''; | ||
515 | } // extraArgs); | ||
516 | in | ||
517 | stdenv.mkDerivation { | ||
518 | name = "node-shell-${name}-${version}"; | ||
519 | |||
520 | buildInputs = [ python nodejs ] ++ stdenv.lib.optional (stdenv.isLinux) utillinux ++ buildInputs; | ||
521 | buildCommand = '' | ||
522 | mkdir -p $out/bin | ||
523 | cat > $out/bin/shell <<EOF | ||
524 | #! ${stdenv.shell} -e | ||
525 | $shellHook | ||
526 | exec ${stdenv.shell} | ||
527 | EOF | ||
528 | chmod +x $out/bin/shell | ||
529 | ''; | ||
530 | |||
531 | # Provide the dependencies in a development shell through the NODE_PATH environment variable | ||
532 | inherit nodeDependencies; | ||
533 | shellHook = stdenv.lib.optionalString (dependencies != []) '' | ||
534 | export NODE_PATH=$nodeDependencies/lib/node_modules | ||
535 | ''; | ||
536 | }; | ||
537 | in | ||
538 | { | ||
539 | buildNodeSourceDist = stdenv.lib.makeOverridable buildNodeSourceDist; | ||
540 | buildNodePackage = stdenv.lib.makeOverridable buildNodePackage; | ||
541 | buildNodeShell = stdenv.lib.makeOverridable buildNodeShell; | ||
542 | } | ||