aboutsummaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorIsmaël Bouya <ismael.bouya@normalesup.org>2023-10-04 01:35:06 +0200
committerIsmaël Bouya <ismael.bouya@normalesup.org>2023-10-04 02:11:48 +0200
commit1a64deeb894dc95e2645a75771732c6cc53a79ad (patch)
tree1b9df4838f894577a09b9b260151756272efeb53 /lib
parentfa25ffd4583cc362075cd5e1b4130f33306103f0 (diff)
downloadNix-1a64deeb894dc95e2645a75771732c6cc53a79ad.tar.gz
Nix-1a64deeb894dc95e2645a75771732c6cc53a79ad.tar.zst
Nix-1a64deeb894dc95e2645a75771732c6cc53a79ad.zip
Squash changes containing private information
There were a lot of changes since the previous commit, but a lot of them contained personnal information about users. All thos changes got stashed into a single commit (history is kept in a different place) and private information was moved in a separate private repository
Diffstat (limited to 'lib')
-rw-r--r--lib/default.nix16
-rw-r--r--lib/flake-compat-patched.nix190
-rw-r--r--lib/flake-compat.nix8
-rw-r--r--lib/node-env.nix542
-rw-r--r--lib/private/default.nix20
5 files changed, 0 insertions, 776 deletions
diff --git a/lib/default.nix b/lib/default.nix
deleted file mode 100644
index 7b392f1..0000000
--- a/lib/default.nix
+++ /dev/null
@@ -1,16 +0,0 @@
1{ pkgs }:
2with pkgs;
3rec {
4 flakeCompat = import ./flake-compat.nix;
5 nodeEnv = import ./node-env.nix;
6
7 fetchedGithub = path:
8 let
9 json = lib.importJSON path;
10 in rec {
11 version = json.tag;
12 pname = json.meta.name;
13 name = "${pname}-${version}";
14 src = fetchFromGitHub json.github;
15 };
16} // (if builtins.pathExists ./private then callPackage ./private {} else {})
diff --git a/lib/flake-compat-patched.nix b/lib/flake-compat-patched.nix
deleted file mode 100644
index 217a99f..0000000
--- a/lib/flake-compat-patched.nix
+++ /dev/null
@@ -1,190 +0,0 @@
1# Compatibility function to allow flakes to be used by
2# non-flake-enabled Nix versions. Given a source tree containing a
3# 'flake.nix' and 'flake.lock' file, it fetches the flake inputs and
4# calls the flake's 'outputs' function. It then returns an attrset
5# containing 'defaultNix' (to be used in 'default.nix'), 'shellNix'
6# (to be used in 'shell.nix').
7
8{ src, system ? builtins.currentSystem or "unknown-system" }:
9
10let
11
12 lockFilePath = src + "/flake.lock";
13
14 lockFile = builtins.fromJSON (builtins.readFile lockFilePath);
15
16 fetchTree =
17 info:
18 if info.type == "github" then
19 { outPath = fetchTarball "https://api.${info.host or "github.com"}/repos/${info.owner}/${info.repo}/tarball/${info.rev}";
20 rev = info.rev;
21 shortRev = builtins.substring 0 7 info.rev;
22 lastModified = info.lastModified;
23 lastModifiedDate = formatSecondsSinceEpoch info.lastModified;
24 narHash = info.narHash;
25 }
26 else if info.type == "git" then
27 { outPath =
28 builtins.fetchGit
29 ({ url = info.url; }
30 // (if info ? rev then { inherit (info) rev; } else {})
31 // (if info ? ref then { inherit (info) ref; } else {})
32 );
33 lastModified = info.lastModified;
34 lastModifiedDate = formatSecondsSinceEpoch info.lastModified;
35 narHash = info.narHash;
36 } // (if info ? rev then {
37 rev = info.rev;
38 shortRev = builtins.substring 0 7 info.rev;
39 } else {
40 })
41 else if info.type == "path" then
42 { outPath = builtins.path { path =
43 if builtins.substring 0 1 info.path == "."
44 then builtins.toString src + "/" + info.path
45 else info.path;
46 };
47 narHash = info.narHash;
48 }
49 else if info.type == "tarball" then
50 { outPath = fetchTarball info.url;
51 narHash = info.narHash;
52 }
53 else if info.type == "gitlab" then
54 { inherit (info) rev narHash lastModified;
55 outPath = fetchTarball "https://${info.host or "gitlab.com"}/api/v4/projects/${info.owner}%2F${info.repo}/repository/archive.tar.gz?sha=${info.rev}";
56 shortRev = builtins.substring 0 7 info.rev;
57 }
58 else
59 # FIXME: add Mercurial, tarball inputs.
60 throw "flake input has unsupported input type '${info.type}'";
61
62 callFlake4 = flakeSrc: locks:
63 let
64 flake = import (flakeSrc + "/flake.nix");
65
66 inputs = builtins.mapAttrs (n: v:
67 if v.flake or true
68 then callFlake4 (fetchTree (v.locked // v.info)) v.inputs
69 else fetchTree (v.locked // v.info)) locks;
70
71 outputs = flakeSrc // (flake.outputs (inputs // {self = outputs;}));
72 in
73 assert flake.edition == 201909;
74 outputs;
75
76 callLocklessFlake = flakeSrc:
77 let
78 flake = import (flakeSrc + "/flake.nix");
79 outputs = flakeSrc // (flake.outputs ({ self = outputs; }));
80 in outputs;
81
82 rootSrc = let
83 # Try to clean the source tree by using fetchGit, if this source
84 # tree is a valid git repository.
85 tryFetchGit = src:
86 if isGit && !isShallow
87 then
88 let res = builtins.fetchGit src;
89 in if res.rev == "0000000000000000000000000000000000000000" then removeAttrs res ["rev" "shortRev"] else res
90 else { outPath = src; };
91 # NB git worktrees have a file for .git, so we don't check the type of .git
92 isGit = builtins.pathExists (src + "/.git");
93 isShallow = builtins.pathExists (src + "/.git/shallow");
94
95 in
96 { lastModified = 0; lastModifiedDate = formatSecondsSinceEpoch 0; }
97 // (if src ? outPath then src else tryFetchGit src);
98
99 # Format number of seconds in the Unix epoch as %Y%m%d%H%M%S.
100 formatSecondsSinceEpoch = t:
101 let
102 rem = x: y: x - x / y * y;
103 days = t / 86400;
104 secondsInDay = rem t 86400;
105 hours = secondsInDay / 3600;
106 minutes = (rem secondsInDay 3600) / 60;
107 seconds = rem t 60;
108
109 # Courtesy of https://stackoverflow.com/a/32158604.
110 z = days + 719468;
111 era = (if z >= 0 then z else z - 146096) / 146097;
112 doe = z - era * 146097;
113 yoe = (doe - doe / 1460 + doe / 36524 - doe / 146096) / 365;
114 y = yoe + era * 400;
115 doy = doe - (365 * yoe + yoe / 4 - yoe / 100);
116 mp = (5 * doy + 2) / 153;
117 d = doy - (153 * mp + 2) / 5 + 1;
118 m = mp + (if mp < 10 then 3 else -9);
119 y' = y + (if m <= 2 then 1 else 0);
120
121 pad = s: if builtins.stringLength s < 2 then "0" + s else s;
122 in "${toString y'}${pad (toString m)}${pad (toString d)}${pad (toString hours)}${pad (toString minutes)}${pad (toString seconds)}";
123
124 allNodes =
125 builtins.mapAttrs
126 (key: node:
127 let
128 sourceInfo =
129 if key == lockFile.root
130 then rootSrc
131 else fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
132
133 subdir = if key == lockFile.root then "" else node.locked.dir or "";
134
135 flake = import (sourceInfo + (if subdir != "" then "/" else "") + subdir + "/flake.nix");
136
137 inputs = builtins.mapAttrs
138 (inputName: inputSpec: allNodes.${resolveInput inputSpec})
139 (node.inputs or {});
140
141 # Resolve a input spec into a node name. An input spec is
142 # either a node name, or a 'follows' path from the root
143 # node.
144 resolveInput = inputSpec:
145 if builtins.isList inputSpec
146 then getInputByPath lockFile.root inputSpec
147 else inputSpec;
148
149 # Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
150 # root node, returning the final node.
151 getInputByPath = nodeName: path:
152 if path == []
153 then nodeName
154 else
155 getInputByPath
156 # Since this could be a 'follows' input, call resolveInput.
157 (resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
158 (builtins.tail path);
159
160 outputs = flake.outputs (inputs // { self = result; });
161
162 result = outputs // sourceInfo // { inherit inputs; inherit outputs; inherit sourceInfo; };
163 in
164 if node.flake or true then
165 assert builtins.isFunction flake.outputs;
166 result
167 else
168 sourceInfo
169 )
170 lockFile.nodes;
171
172 result =
173 if !(builtins.pathExists lockFilePath)
174 then callLocklessFlake rootSrc
175 else if lockFile.version == 4
176 then callFlake4 rootSrc (lockFile.inputs)
177 else if lockFile.version >= 5 && lockFile.version <= 7
178 then allNodes.${lockFile.root}
179 else throw "lock file '${lockFilePath}' has unsupported version ${toString lockFile.version}";
180
181in
182 rec {
183 defaultNix =
184 result
185 // (if result ? defaultPackage.${system} then { default = result.defaultPackage.${system}; } else {});
186
187 shellNix =
188 defaultNix
189 // (if result ? devShell.${system} then { default = result.devShell.${system}; } else {});
190 }
diff --git a/lib/flake-compat.nix b/lib/flake-compat.nix
deleted file mode 100644
index d3daa10..0000000
--- a/lib/flake-compat.nix
+++ /dev/null
@@ -1,8 +0,0 @@
1src:
2(import ./flake-compat-patched.nix { inherit src; }).defaultNix
3# Until https://github.com/edolstra/flake-compat/pull/18 is accepted
4# (import (
5# fetchTarball {
6# url = "https://github.com/edolstra/flake-compat/archive/99f1c2157fba4bfe6211a321fd0ee43199025dbf.tar.gz";
7# sha256 = "0x2jn3vrawwv9xp15674wjz9pixwjyj3j771izayl962zziivbx2";
8# }) { inherit src; }).defaultNix
diff --git a/lib/node-env.nix b/lib/node-env.nix
deleted file mode 100644
index 720e0cc..0000000
--- a/lib/node-env.nix
+++ /dev/null
@@ -1,542 +0,0 @@
1# This file originates from node2nix
2
3{stdenv, nodejs, python2, utillinux, libtool, runCommand, writeTextFile}:
4
5let
6 python = if nodejs ? python then nodejs.python else python2;
7
8 # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
9 tarWrapper = runCommand "tarWrapper" {} ''
10 mkdir -p $out/bin
11
12 cat > $out/bin/tar <<EOF
13 #! ${stdenv.shell} -e
14 $(type -p tar) "\$@" --warning=no-unknown-keyword
15 EOF
16
17 chmod +x $out/bin/tar
18 '';
19
20 # Function that generates a TGZ file from a NPM project
21 buildNodeSourceDist =
22 { name, version, src, ... }:
23
24 stdenv.mkDerivation {
25 name = "node-tarball-${name}-${version}";
26 inherit src;
27 buildInputs = [ nodejs ];
28 buildPhase = ''
29 export HOME=$TMPDIR
30 tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
31 '';
32 installPhase = ''
33 mkdir -p $out/tarballs
34 mv $tgzFile $out/tarballs
35 mkdir -p $out/nix-support
36 echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
37 '';
38 };
39
40 includeDependencies = {dependencies}:
41 stdenv.lib.optionalString (dependencies != [])
42 (stdenv.lib.concatMapStrings (dependency:
43 ''
44 # Bundle the dependencies of the package
45 mkdir -p node_modules
46 cd node_modules
47
48 # Only include dependencies if they don't exist. They may also be bundled in the package.
49 if [ ! -e "${dependency.name}" ]
50 then
51 ${composePackage dependency}
52 fi
53
54 cd ..
55 ''
56 ) dependencies);
57
58 # Recursively composes the dependencies of a package
59 composePackage = { name, packageName, src, dependencies ? [], ... }@args:
60 ''
61 DIR=$(pwd)
62 cd $TMPDIR
63
64 unpackFile ${src}
65
66 # Make the base dir in which the target dependency resides first
67 mkdir -p "$(dirname "$DIR/${packageName}")"
68
69 if [ -f "${src}" ]
70 then
71 # Figure out what directory has been unpacked
72 packageDir="$(find . -maxdepth 1 -type d | tail -1)"
73
74 # Restore write permissions to make building work
75 find "$packageDir" -type d -print0 | xargs -0 chmod u+x
76 chmod -R u+w "$packageDir"
77
78 # Move the extracted tarball into the output folder
79 mv "$packageDir" "$DIR/${packageName}"
80 elif [ -d "${src}" ]
81 then
82 # Get a stripped name (without hash) of the source directory.
83 # On old nixpkgs it's already set internally.
84 if [ -z "$strippedName" ]
85 then
86 strippedName="$(stripHash ${src})"
87 fi
88
89 # Restore write permissions to make building work
90 chmod -R u+w "$strippedName"
91
92 # Move the extracted directory into the output folder
93 mv "$strippedName" "$DIR/${packageName}"
94 fi
95
96 # Unset the stripped name to not confuse the next unpack step
97 unset strippedName
98
99 # Include the dependencies of the package
100 cd "$DIR/${packageName}"
101 ${includeDependencies { inherit dependencies; }}
102 cd ..
103 ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
104 '';
105
106 pinpointDependencies = {dependencies, production}:
107 let
108 pinpointDependenciesFromPackageJSON = writeTextFile {
109 name = "pinpointDependencies.js";
110 text = ''
111 var fs = require('fs');
112 var path = require('path');
113
114 function resolveDependencyVersion(location, name) {
115 if(location == process.env['NIX_STORE']) {
116 return null;
117 } else {
118 var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
119
120 if(fs.existsSync(dependencyPackageJSON)) {
121 var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
122
123 if(dependencyPackageObj.name == name) {
124 return dependencyPackageObj.version;
125 }
126 } else {
127 return resolveDependencyVersion(path.resolve(location, ".."), name);
128 }
129 }
130 }
131
132 function replaceDependencies(dependencies) {
133 if(typeof dependencies == "object" && dependencies !== null) {
134 for(var dependency in dependencies) {
135 var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
136
137 if(resolvedVersion === null) {
138 process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
139 } else {
140 dependencies[dependency] = resolvedVersion;
141 }
142 }
143 }
144 }
145
146 /* Read the package.json configuration */
147 var packageObj = JSON.parse(fs.readFileSync('./package.json'));
148
149 /* Pinpoint all dependencies */
150 replaceDependencies(packageObj.dependencies);
151 if(process.argv[2] == "development") {
152 replaceDependencies(packageObj.devDependencies);
153 }
154 replaceDependencies(packageObj.optionalDependencies);
155
156 /* Write the fixed package.json file */
157 fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
158 '';
159 };
160 in
161 ''
162 node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
163
164 ${stdenv.lib.optionalString (dependencies != [])
165 ''
166 if [ -d node_modules ]
167 then
168 cd node_modules
169 ${stdenv.lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
170 cd ..
171 fi
172 ''}
173 '';
174
175 # Recursively traverses all dependencies of a package and pinpoints all
176 # dependencies in the package.json file to the versions that are actually
177 # being used.
178
179 pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
180 ''
181 if [ -d "${packageName}" ]
182 then
183 cd "${packageName}"
184 ${pinpointDependencies { inherit dependencies production; }}
185 cd ..
186 ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
187 fi
188 '';
189
190 # Extract the Node.js source code which is used to compile packages with
191 # native bindings
192 nodeSources = runCommand "node-sources" {} ''
193 tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
194 mv node-* $out
195 '';
196
197 # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
198 addIntegrityFieldsScript = writeTextFile {
199 name = "addintegrityfields.js";
200 text = ''
201 var fs = require('fs');
202 var path = require('path');
203
204 function augmentDependencies(baseDir, dependencies) {
205 for(var dependencyName in dependencies) {
206 var dependency = dependencies[dependencyName];
207
208 // Open package.json and augment metadata fields
209 var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
210 var packageJSONPath = path.join(packageJSONDir, "package.json");
211
212 if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
213 console.log("Adding metadata fields to: "+packageJSONPath);
214 var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
215
216 if(dependency.integrity) {
217 packageObj["_integrity"] = dependency.integrity;
218 } else {
219 packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
220 }
221
222 packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
223 fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
224 }
225
226 // Augment transitive dependencies
227 if(dependency.dependencies !== undefined) {
228 augmentDependencies(packageJSONDir, dependency.dependencies);
229 }
230 }
231 }
232
233 if(fs.existsSync("./package-lock.json")) {
234 var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
235
236 if(packageLock.lockfileVersion !== 1) {
237 process.stderr.write("Sorry, I only understand lock file version 1!\n");
238 process.exit(1);
239 }
240
241 if(packageLock.dependencies !== undefined) {
242 augmentDependencies(".", packageLock.dependencies);
243 }
244 }
245 '';
246 };
247
248 # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
249 reconstructPackageLock = writeTextFile {
250 name = "addintegrityfields.js";
251 text = ''
252 var fs = require('fs');
253 var path = require('path');
254
255 var packageObj = JSON.parse(fs.readFileSync("package.json"));
256
257 var lockObj = {
258 name: packageObj.name,
259 version: packageObj.version,
260 lockfileVersion: 1,
261 requires: true,
262 dependencies: {}
263 };
264
265 function augmentPackageJSON(filePath, dependencies) {
266 var packageJSON = path.join(filePath, "package.json");
267 if(fs.existsSync(packageJSON)) {
268 var packageObj = JSON.parse(fs.readFileSync(packageJSON));
269 dependencies[packageObj.name] = {
270 version: packageObj.version,
271 integrity: "sha1-000000000000000000000000000=",
272 dependencies: {}
273 };
274 processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
275 }
276 }
277
278 function processDependencies(dir, dependencies) {
279 if(fs.existsSync(dir)) {
280 var files = fs.readdirSync(dir);
281
282 files.forEach(function(entry) {
283 var filePath = path.join(dir, entry);
284 var stats = fs.statSync(filePath);
285
286 if(stats.isDirectory()) {
287 if(entry.substr(0, 1) == "@") {
288 // When we encounter a namespace folder, augment all packages belonging to the scope
289 var pkgFiles = fs.readdirSync(filePath);
290
291 pkgFiles.forEach(function(entry) {
292 if(stats.isDirectory()) {
293 var pkgFilePath = path.join(filePath, entry);
294 augmentPackageJSON(pkgFilePath, dependencies);
295 }
296 });
297 } else {
298 augmentPackageJSON(filePath, dependencies);
299 }
300 }
301 });
302 }
303 }
304
305 processDependencies("node_modules", lockObj.dependencies);
306
307 fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
308 '';
309 };
310
311 # Builds and composes an NPM package including all its dependencies
312 buildNodePackage =
313 { name
314 , packageName
315 , version
316 , dependencies ? []
317 , buildInputs ? []
318 , production ? true
319 , npmFlags ? ""
320 , dontNpmInstall ? false
321 , bypassCache ? false
322 , preRebuild ? ""
323 , dontStrip ? true
324 , unpackPhase ? "true"
325 , buildPhase ? "true"
326 , ... }@args:
327
328 let
329 forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
330 extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ];
331 in
332 stdenv.mkDerivation ({
333 name = "node-${name}-${version}";
334 buildInputs = [ tarWrapper python nodejs ]
335 ++ stdenv.lib.optional (stdenv.isLinux) utillinux
336 ++ stdenv.lib.optional (stdenv.isDarwin) libtool
337 ++ buildInputs;
338
339 inherit dontStrip; # Stripping may fail a build for some package deployments
340 inherit dontNpmInstall preRebuild unpackPhase buildPhase;
341
342 compositionScript = composePackage args;
343 pinpointDependenciesScript = pinpointDependenciesOfPackage args;
344
345 passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
346
347 installPhase = ''
348 # Create and enter a root node_modules/ folder
349 mkdir -p $out/lib/node_modules
350 cd $out/lib/node_modules
351
352 # Compose the package and all its dependencies
353 source $compositionScriptPath
354
355 # Pinpoint the versions of all dependencies to the ones that are actually being used
356 echo "pinpointing versions of dependencies..."
357 source $pinpointDependenciesScriptPath
358
359 # Patch the shebangs of the bundled modules to prevent them from
360 # calling executables outside the Nix store as much as possible
361 patchShebangs .
362
363 # Deploy the Node.js package by running npm install. Since the
364 # dependencies have been provided already by ourselves, it should not
365 # attempt to install them again, which is good, because we want to make
366 # it Nix's responsibility. If it needs to install any dependencies
367 # anyway (e.g. because the dependency parameters are
368 # incomplete/incorrect), it fails.
369 #
370 # The other responsibilities of NPM are kept -- version checks, build
371 # steps, postprocessing etc.
372
373 export HOME=$TMPDIR
374 cd "${packageName}"
375 runHook preRebuild
376
377 ${stdenv.lib.optionalString bypassCache ''
378 if [ ! -f package-lock.json ]
379 then
380 echo "No package-lock.json file found, reconstructing..."
381 node ${reconstructPackageLock}
382 fi
383
384 node ${addIntegrityFieldsScript}
385 ''}
386
387 npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild
388
389 if [ "$dontNpmInstall" != "1" ]
390 then
391 # NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
392 rm -f npm-shrinkwrap.json
393
394 npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install
395 fi
396
397 # Create symlink to the deployed executable folder, if applicable
398 if [ -d "$out/lib/node_modules/.bin" ]
399 then
400 ln -s $out/lib/node_modules/.bin $out/bin
401 fi
402
403 # Create symlinks to the deployed manual page folders, if applicable
404 if [ -d "$out/lib/node_modules/${packageName}/man" ]
405 then
406 mkdir -p $out/share
407 for dir in "$out/lib/node_modules/${packageName}/man/"*
408 do
409 mkdir -p $out/share/man/$(basename "$dir")
410 for page in "$dir"/*
411 do
412 ln -s $page $out/share/man/$(basename "$dir")
413 done
414 done
415 fi
416
417 # Run post install hook, if provided
418 runHook postInstall
419 '';
420 } // extraArgs);
421
422 # Builds a development shell
423 buildNodeShell =
424 { name
425 , packageName
426 , version
427 , src
428 , dependencies ? []
429 , buildInputs ? []
430 , production ? true
431 , npmFlags ? ""
432 , dontNpmInstall ? false
433 , bypassCache ? false
434 , dontStrip ? true
435 , unpackPhase ? "true"
436 , buildPhase ? "true"
437 , ... }@args:
438
439 let
440 forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
441
442 extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
443
444 nodeDependencies = stdenv.mkDerivation ({
445 name = "node-dependencies-${name}-${version}";
446
447 buildInputs = [ tarWrapper python nodejs ]
448 ++ stdenv.lib.optional (stdenv.isLinux) utillinux
449 ++ stdenv.lib.optional (stdenv.isDarwin) libtool
450 ++ buildInputs;
451
452 inherit dontStrip; # Stripping may fail a build for some package deployments
453 inherit dontNpmInstall unpackPhase buildPhase;
454
455 includeScript = includeDependencies { inherit dependencies; };
456 pinpointDependenciesScript = pinpointDependenciesOfPackage args;
457
458 passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
459
460 installPhase = ''
461 mkdir -p $out/${packageName}
462 cd $out/${packageName}
463
464 source $includeScriptPath
465
466 # Create fake package.json to make the npm commands work properly
467 cp ${src}/package.json .
468 chmod 644 package.json
469 ${stdenv.lib.optionalString bypassCache ''
470 if [ -f ${src}/package-lock.json ]
471 then
472 cp ${src}/package-lock.json .
473 fi
474 ''}
475
476 # Pinpoint the versions of all dependencies to the ones that are actually being used
477 echo "pinpointing versions of dependencies..."
478 cd ..
479 ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
480
481 source $pinpointDependenciesScriptPath
482 cd ${packageName}
483
484 # Patch the shebangs of the bundled modules to prevent them from
485 # calling executables outside the Nix store as much as possible
486 patchShebangs .
487
488 export HOME=$PWD
489
490 ${stdenv.lib.optionalString bypassCache ''
491 if [ ! -f package-lock.json ]
492 then
493 echo "No package-lock.json file found, reconstructing..."
494 node ${reconstructPackageLock}
495 fi
496
497 node ${addIntegrityFieldsScript}
498 ''}
499
500 npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild
501
502 ${stdenv.lib.optionalString (!dontNpmInstall) ''
503 # NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
504 rm -f npm-shrinkwrap.json
505
506 npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install
507 ''}
508
509 cd ..
510 ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
511
512 mv ${packageName} lib
513 ln -s $out/lib/node_modules/.bin $out/bin
514 '';
515 } // extraArgs);
516 in
517 stdenv.mkDerivation {
518 name = "node-shell-${name}-${version}";
519
520 buildInputs = [ python nodejs ] ++ stdenv.lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
521 buildCommand = ''
522 mkdir -p $out/bin
523 cat > $out/bin/shell <<EOF
524 #! ${stdenv.shell} -e
525 $shellHook
526 exec ${stdenv.shell}
527 EOF
528 chmod +x $out/bin/shell
529 '';
530
531 # Provide the dependencies in a development shell through the NODE_PATH environment variable
532 inherit nodeDependencies;
533 shellHook = stdenv.lib.optionalString (dependencies != []) ''
534 export NODE_PATH=$nodeDependencies/lib/node_modules
535 '';
536 };
537in
538{
539 buildNodeSourceDist = stdenv.lib.makeOverridable buildNodeSourceDist;
540 buildNodePackage = stdenv.lib.makeOverridable buildNodePackage;
541 buildNodeShell = stdenv.lib.makeOverridable buildNodeShell;
542}
diff --git a/lib/private/default.nix b/lib/private/default.nix
deleted file mode 100644
index c7d753a..0000000
--- a/lib/private/default.nix
+++ /dev/null
@@ -1,20 +0,0 @@
1{}:
2{
3 # This adds header colors to the builds, but it rebuilds the whole
4 # world from scratch, so only use it to debug!
5 # add it as postHook in derivations
6 immaePostHook = ''
7 header() {
8 echo -ne "\033[1;36m"
9 echo -n "$1"
10 echo -e "\033[0m"
11 }
12
13 echoCmd() {
14 printf "\033[1;34m%s:\033[0m" "$1"
15 shift
16 printf ' %q' "$@"
17 echo
18 }
19 '';
20}