From 1a64deeb894dc95e2645a75771732c6cc53a79ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Isma=C3=ABl=20Bouya?= Date: Wed, 4 Oct 2023 01:35:06 +0200 Subject: Squash changes containing private information There were a lot of changes since the previous commit, but a lot of them contained personnal information about users. All thos changes got stashed into a single commit (history is kept in a different place) and private information was moved in a separate private repository --- lib/default.nix | 16 -- lib/flake-compat-patched.nix | 190 --------------- lib/flake-compat.nix | 8 - lib/node-env.nix | 542 ------------------------------------------- lib/private/default.nix | 20 -- 5 files changed, 776 deletions(-) delete mode 100644 lib/default.nix delete mode 100644 lib/flake-compat-patched.nix delete mode 100644 lib/flake-compat.nix delete mode 100644 lib/node-env.nix delete mode 100644 lib/private/default.nix (limited to 'lib') diff --git a/lib/default.nix b/lib/default.nix deleted file mode 100644 index 7b392f1..0000000 --- a/lib/default.nix +++ /dev/null @@ -1,16 +0,0 @@ -{ pkgs }: -with pkgs; -rec { - flakeCompat = import ./flake-compat.nix; - nodeEnv = import ./node-env.nix; - - fetchedGithub = path: - let - json = lib.importJSON path; - in rec { - version = json.tag; - pname = json.meta.name; - name = "${pname}-${version}"; - src = fetchFromGitHub json.github; - }; -} // (if builtins.pathExists ./private then callPackage ./private {} else {}) diff --git a/lib/flake-compat-patched.nix b/lib/flake-compat-patched.nix deleted file mode 100644 index 217a99f..0000000 --- a/lib/flake-compat-patched.nix +++ /dev/null @@ -1,190 +0,0 @@ -# Compatibility function to allow flakes to be used by -# non-flake-enabled Nix versions. Given a source tree containing a -# 'flake.nix' and 'flake.lock' file, it fetches the flake inputs and -# calls the flake's 'outputs' function. It then returns an attrset -# containing 'defaultNix' (to be used in 'default.nix'), 'shellNix' -# (to be used in 'shell.nix'). - -{ src, system ? builtins.currentSystem or "unknown-system" }: - -let - - lockFilePath = src + "/flake.lock"; - - lockFile = builtins.fromJSON (builtins.readFile lockFilePath); - - fetchTree = - info: - if info.type == "github" then - { outPath = fetchTarball "https://api.${info.host or "github.com"}/repos/${info.owner}/${info.repo}/tarball/${info.rev}"; - rev = info.rev; - shortRev = builtins.substring 0 7 info.rev; - lastModified = info.lastModified; - lastModifiedDate = formatSecondsSinceEpoch info.lastModified; - narHash = info.narHash; - } - else if info.type == "git" then - { outPath = - builtins.fetchGit - ({ url = info.url; } - // (if info ? rev then { inherit (info) rev; } else {}) - // (if info ? ref then { inherit (info) ref; } else {}) - ); - lastModified = info.lastModified; - lastModifiedDate = formatSecondsSinceEpoch info.lastModified; - narHash = info.narHash; - } // (if info ? rev then { - rev = info.rev; - shortRev = builtins.substring 0 7 info.rev; - } else { - }) - else if info.type == "path" then - { outPath = builtins.path { path = - if builtins.substring 0 1 info.path == "." - then builtins.toString src + "/" + info.path - else info.path; - }; - narHash = info.narHash; - } - else if info.type == "tarball" then - { outPath = fetchTarball info.url; - narHash = info.narHash; - } - else if info.type == "gitlab" then - { inherit (info) rev narHash lastModified; - outPath = fetchTarball "https://${info.host or "gitlab.com"}/api/v4/projects/${info.owner}%2F${info.repo}/repository/archive.tar.gz?sha=${info.rev}"; - shortRev = builtins.substring 0 7 info.rev; - } - else - # FIXME: add Mercurial, tarball inputs. - throw "flake input has unsupported input type '${info.type}'"; - - callFlake4 = flakeSrc: locks: - let - flake = import (flakeSrc + "/flake.nix"); - - inputs = builtins.mapAttrs (n: v: - if v.flake or true - then callFlake4 (fetchTree (v.locked // v.info)) v.inputs - else fetchTree (v.locked // v.info)) locks; - - outputs = flakeSrc // (flake.outputs (inputs // {self = outputs;})); - in - assert flake.edition == 201909; - outputs; - - callLocklessFlake = flakeSrc: - let - flake = import (flakeSrc + "/flake.nix"); - outputs = flakeSrc // (flake.outputs ({ self = outputs; })); - in outputs; - - rootSrc = let - # Try to clean the source tree by using fetchGit, if this source - # tree is a valid git repository. - tryFetchGit = src: - if isGit && !isShallow - then - let res = builtins.fetchGit src; - in if res.rev == "0000000000000000000000000000000000000000" then removeAttrs res ["rev" "shortRev"] else res - else { outPath = src; }; - # NB git worktrees have a file for .git, so we don't check the type of .git - isGit = builtins.pathExists (src + "/.git"); - isShallow = builtins.pathExists (src + "/.git/shallow"); - - in - { lastModified = 0; lastModifiedDate = formatSecondsSinceEpoch 0; } - // (if src ? outPath then src else tryFetchGit src); - - # Format number of seconds in the Unix epoch as %Y%m%d%H%M%S. - formatSecondsSinceEpoch = t: - let - rem = x: y: x - x / y * y; - days = t / 86400; - secondsInDay = rem t 86400; - hours = secondsInDay / 3600; - minutes = (rem secondsInDay 3600) / 60; - seconds = rem t 60; - - # Courtesy of https://stackoverflow.com/a/32158604. - z = days + 719468; - era = (if z >= 0 then z else z - 146096) / 146097; - doe = z - era * 146097; - yoe = (doe - doe / 1460 + doe / 36524 - doe / 146096) / 365; - y = yoe + era * 400; - doy = doe - (365 * yoe + yoe / 4 - yoe / 100); - mp = (5 * doy + 2) / 153; - d = doy - (153 * mp + 2) / 5 + 1; - m = mp + (if mp < 10 then 3 else -9); - y' = y + (if m <= 2 then 1 else 0); - - pad = s: if builtins.stringLength s < 2 then "0" + s else s; - in "${toString y'}${pad (toString m)}${pad (toString d)}${pad (toString hours)}${pad (toString minutes)}${pad (toString seconds)}"; - - allNodes = - builtins.mapAttrs - (key: node: - let - sourceInfo = - if key == lockFile.root - then rootSrc - else fetchTree (node.info or {} // removeAttrs node.locked ["dir"]); - - subdir = if key == lockFile.root then "" else node.locked.dir or ""; - - flake = import (sourceInfo + (if subdir != "" then "/" else "") + subdir + "/flake.nix"); - - inputs = builtins.mapAttrs - (inputName: inputSpec: allNodes.${resolveInput inputSpec}) - (node.inputs or {}); - - # Resolve a input spec into a node name. An input spec is - # either a node name, or a 'follows' path from the root - # node. - resolveInput = inputSpec: - if builtins.isList inputSpec - then getInputByPath lockFile.root inputSpec - else inputSpec; - - # Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the - # root node, returning the final node. - getInputByPath = nodeName: path: - if path == [] - then nodeName - else - getInputByPath - # Since this could be a 'follows' input, call resolveInput. - (resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path}) - (builtins.tail path); - - outputs = flake.outputs (inputs // { self = result; }); - - result = outputs // sourceInfo // { inherit inputs; inherit outputs; inherit sourceInfo; }; - in - if node.flake or true then - assert builtins.isFunction flake.outputs; - result - else - sourceInfo - ) - lockFile.nodes; - - result = - if !(builtins.pathExists lockFilePath) - then callLocklessFlake rootSrc - else if lockFile.version == 4 - then callFlake4 rootSrc (lockFile.inputs) - else if lockFile.version >= 5 && lockFile.version <= 7 - then allNodes.${lockFile.root} - else throw "lock file '${lockFilePath}' has unsupported version ${toString lockFile.version}"; - -in - rec { - defaultNix = - result - // (if result ? defaultPackage.${system} then { default = result.defaultPackage.${system}; } else {}); - - shellNix = - defaultNix - // (if result ? devShell.${system} then { default = result.devShell.${system}; } else {}); - } diff --git a/lib/flake-compat.nix b/lib/flake-compat.nix deleted file mode 100644 index d3daa10..0000000 --- a/lib/flake-compat.nix +++ /dev/null @@ -1,8 +0,0 @@ -src: -(import ./flake-compat-patched.nix { inherit src; }).defaultNix -# Until https://github.com/edolstra/flake-compat/pull/18 is accepted -# (import ( -# fetchTarball { -# url = "https://github.com/edolstra/flake-compat/archive/99f1c2157fba4bfe6211a321fd0ee43199025dbf.tar.gz"; -# sha256 = "0x2jn3vrawwv9xp15674wjz9pixwjyj3j771izayl962zziivbx2"; -# }) { inherit src; }).defaultNix diff --git a/lib/node-env.nix b/lib/node-env.nix deleted file mode 100644 index 720e0cc..0000000 --- a/lib/node-env.nix +++ /dev/null @@ -1,542 +0,0 @@ -# This file originates from node2nix - -{stdenv, nodejs, python2, utillinux, libtool, runCommand, writeTextFile}: - -let - python = if nodejs ? python then nodejs.python else python2; - - # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise - tarWrapper = runCommand "tarWrapper" {} '' - mkdir -p $out/bin - - cat > $out/bin/tar <> $out/nix-support/hydra-build-products - ''; - }; - - includeDependencies = {dependencies}: - stdenv.lib.optionalString (dependencies != []) - (stdenv.lib.concatMapStrings (dependency: - '' - # Bundle the dependencies of the package - mkdir -p node_modules - cd node_modules - - # Only include dependencies if they don't exist. They may also be bundled in the package. - if [ ! -e "${dependency.name}" ] - then - ${composePackage dependency} - fi - - cd .. - '' - ) dependencies); - - # Recursively composes the dependencies of a package - composePackage = { name, packageName, src, dependencies ? [], ... }@args: - '' - DIR=$(pwd) - cd $TMPDIR - - unpackFile ${src} - - # Make the base dir in which the target dependency resides first - mkdir -p "$(dirname "$DIR/${packageName}")" - - if [ -f "${src}" ] - then - # Figure out what directory has been unpacked - packageDir="$(find . -maxdepth 1 -type d | tail -1)" - - # Restore write permissions to make building work - find "$packageDir" -type d -print0 | xargs -0 chmod u+x - chmod -R u+w "$packageDir" - - # Move the extracted tarball into the output folder - mv "$packageDir" "$DIR/${packageName}" - elif [ -d "${src}" ] - then - # Get a stripped name (without hash) of the source directory. - # On old nixpkgs it's already set internally. - if [ -z "$strippedName" ] - then - strippedName="$(stripHash ${src})" - fi - - # Restore write permissions to make building work - chmod -R u+w "$strippedName" - - # Move the extracted directory into the output folder - mv "$strippedName" "$DIR/${packageName}" - fi - - # Unset the stripped name to not confuse the next unpack step - unset strippedName - - # Include the dependencies of the package - cd "$DIR/${packageName}" - ${includeDependencies { inherit dependencies; }} - cd .. - ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} - ''; - - pinpointDependencies = {dependencies, production}: - let - pinpointDependenciesFromPackageJSON = writeTextFile { - name = "pinpointDependencies.js"; - text = '' - var fs = require('fs'); - var path = require('path'); - - function resolveDependencyVersion(location, name) { - if(location == process.env['NIX_STORE']) { - return null; - } else { - var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json"); - - if(fs.existsSync(dependencyPackageJSON)) { - var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON)); - - if(dependencyPackageObj.name == name) { - return dependencyPackageObj.version; - } - } else { - return resolveDependencyVersion(path.resolve(location, ".."), name); - } - } - } - - function replaceDependencies(dependencies) { - if(typeof dependencies == "object" && dependencies !== null) { - for(var dependency in dependencies) { - var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency); - - if(resolvedVersion === null) { - process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n"); - } else { - dependencies[dependency] = resolvedVersion; - } - } - } - } - - /* Read the package.json configuration */ - var packageObj = JSON.parse(fs.readFileSync('./package.json')); - - /* Pinpoint all dependencies */ - replaceDependencies(packageObj.dependencies); - if(process.argv[2] == "development") { - replaceDependencies(packageObj.devDependencies); - } - replaceDependencies(packageObj.optionalDependencies); - - /* Write the fixed package.json file */ - fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2)); - ''; - }; - in - '' - node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"} - - ${stdenv.lib.optionalString (dependencies != []) - '' - if [ -d node_modules ] - then - cd node_modules - ${stdenv.lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies} - cd .. - fi - ''} - ''; - - # Recursively traverses all dependencies of a package and pinpoints all - # dependencies in the package.json file to the versions that are actually - # being used. - - pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args: - '' - if [ -d "${packageName}" ] - then - cd "${packageName}" - ${pinpointDependencies { inherit dependencies production; }} - cd .. - ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} - fi - ''; - - # Extract the Node.js source code which is used to compile packages with - # native bindings - nodeSources = runCommand "node-sources" {} '' - tar --no-same-owner --no-same-permissions -xf ${nodejs.src} - mv node-* $out - ''; - - # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty) - addIntegrityFieldsScript = writeTextFile { - name = "addintegrityfields.js"; - text = '' - var fs = require('fs'); - var path = require('path'); - - function augmentDependencies(baseDir, dependencies) { - for(var dependencyName in dependencies) { - var dependency = dependencies[dependencyName]; - - // Open package.json and augment metadata fields - var packageJSONDir = path.join(baseDir, "node_modules", dependencyName); - var packageJSONPath = path.join(packageJSONDir, "package.json"); - - if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored - console.log("Adding metadata fields to: "+packageJSONPath); - var packageObj = JSON.parse(fs.readFileSync(packageJSONPath)); - - if(dependency.integrity) { - packageObj["_integrity"] = dependency.integrity; - } else { - packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads. - } - - packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories. - fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2)); - } - - // Augment transitive dependencies - if(dependency.dependencies !== undefined) { - augmentDependencies(packageJSONDir, dependency.dependencies); - } - } - } - - if(fs.existsSync("./package-lock.json")) { - var packageLock = JSON.parse(fs.readFileSync("./package-lock.json")); - - if(packageLock.lockfileVersion !== 1) { - process.stderr.write("Sorry, I only understand lock file version 1!\n"); - process.exit(1); - } - - if(packageLock.dependencies !== undefined) { - augmentDependencies(".", packageLock.dependencies); - } - } - ''; - }; - - # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes - reconstructPackageLock = writeTextFile { - name = "addintegrityfields.js"; - text = '' - var fs = require('fs'); - var path = require('path'); - - var packageObj = JSON.parse(fs.readFileSync("package.json")); - - var lockObj = { - name: packageObj.name, - version: packageObj.version, - lockfileVersion: 1, - requires: true, - dependencies: {} - }; - - function augmentPackageJSON(filePath, dependencies) { - var packageJSON = path.join(filePath, "package.json"); - if(fs.existsSync(packageJSON)) { - var packageObj = JSON.parse(fs.readFileSync(packageJSON)); - dependencies[packageObj.name] = { - version: packageObj.version, - integrity: "sha1-000000000000000000000000000=", - dependencies: {} - }; - processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies); - } - } - - function processDependencies(dir, dependencies) { - if(fs.existsSync(dir)) { - var files = fs.readdirSync(dir); - - files.forEach(function(entry) { - var filePath = path.join(dir, entry); - var stats = fs.statSync(filePath); - - if(stats.isDirectory()) { - if(entry.substr(0, 1) == "@") { - // When we encounter a namespace folder, augment all packages belonging to the scope - var pkgFiles = fs.readdirSync(filePath); - - pkgFiles.forEach(function(entry) { - if(stats.isDirectory()) { - var pkgFilePath = path.join(filePath, entry); - augmentPackageJSON(pkgFilePath, dependencies); - } - }); - } else { - augmentPackageJSON(filePath, dependencies); - } - } - }); - } - } - - processDependencies("node_modules", lockObj.dependencies); - - fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2)); - ''; - }; - - # Builds and composes an NPM package including all its dependencies - buildNodePackage = - { name - , packageName - , version - , dependencies ? [] - , buildInputs ? [] - , production ? true - , npmFlags ? "" - , dontNpmInstall ? false - , bypassCache ? false - , preRebuild ? "" - , dontStrip ? true - , unpackPhase ? "true" - , buildPhase ? "true" - , ... }@args: - - let - forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; - extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ]; - in - stdenv.mkDerivation ({ - name = "node-${name}-${version}"; - buildInputs = [ tarWrapper python nodejs ] - ++ stdenv.lib.optional (stdenv.isLinux) utillinux - ++ stdenv.lib.optional (stdenv.isDarwin) libtool - ++ buildInputs; - - inherit dontStrip; # Stripping may fail a build for some package deployments - inherit dontNpmInstall preRebuild unpackPhase buildPhase; - - compositionScript = composePackage args; - pinpointDependenciesScript = pinpointDependenciesOfPackage args; - - passAsFile = [ "compositionScript" "pinpointDependenciesScript" ]; - - installPhase = '' - # Create and enter a root node_modules/ folder - mkdir -p $out/lib/node_modules - cd $out/lib/node_modules - - # Compose the package and all its dependencies - source $compositionScriptPath - - # Pinpoint the versions of all dependencies to the ones that are actually being used - echo "pinpointing versions of dependencies..." - source $pinpointDependenciesScriptPath - - # Patch the shebangs of the bundled modules to prevent them from - # calling executables outside the Nix store as much as possible - patchShebangs . - - # Deploy the Node.js package by running npm install. Since the - # dependencies have been provided already by ourselves, it should not - # attempt to install them again, which is good, because we want to make - # it Nix's responsibility. If it needs to install any dependencies - # anyway (e.g. because the dependency parameters are - # incomplete/incorrect), it fails. - # - # The other responsibilities of NPM are kept -- version checks, build - # steps, postprocessing etc. - - export HOME=$TMPDIR - cd "${packageName}" - runHook preRebuild - - ${stdenv.lib.optionalString bypassCache '' - if [ ! -f package-lock.json ] - then - echo "No package-lock.json file found, reconstructing..." - node ${reconstructPackageLock} - fi - - node ${addIntegrityFieldsScript} - ''} - - npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild - - if [ "$dontNpmInstall" != "1" ] - then - # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. - rm -f npm-shrinkwrap.json - - npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install - fi - - # Create symlink to the deployed executable folder, if applicable - if [ -d "$out/lib/node_modules/.bin" ] - then - ln -s $out/lib/node_modules/.bin $out/bin - fi - - # Create symlinks to the deployed manual page folders, if applicable - if [ -d "$out/lib/node_modules/${packageName}/man" ] - then - mkdir -p $out/share - for dir in "$out/lib/node_modules/${packageName}/man/"* - do - mkdir -p $out/share/man/$(basename "$dir") - for page in "$dir"/* - do - ln -s $page $out/share/man/$(basename "$dir") - done - done - fi - - # Run post install hook, if provided - runHook postInstall - ''; - } // extraArgs); - - # Builds a development shell - buildNodeShell = - { name - , packageName - , version - , src - , dependencies ? [] - , buildInputs ? [] - , production ? true - , npmFlags ? "" - , dontNpmInstall ? false - , bypassCache ? false - , dontStrip ? true - , unpackPhase ? "true" - , buildPhase ? "true" - , ... }@args: - - let - forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; - - extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ]; - - nodeDependencies = stdenv.mkDerivation ({ - name = "node-dependencies-${name}-${version}"; - - buildInputs = [ tarWrapper python nodejs ] - ++ stdenv.lib.optional (stdenv.isLinux) utillinux - ++ stdenv.lib.optional (stdenv.isDarwin) libtool - ++ buildInputs; - - inherit dontStrip; # Stripping may fail a build for some package deployments - inherit dontNpmInstall unpackPhase buildPhase; - - includeScript = includeDependencies { inherit dependencies; }; - pinpointDependenciesScript = pinpointDependenciesOfPackage args; - - passAsFile = [ "includeScript" "pinpointDependenciesScript" ]; - - installPhase = '' - mkdir -p $out/${packageName} - cd $out/${packageName} - - source $includeScriptPath - - # Create fake package.json to make the npm commands work properly - cp ${src}/package.json . - chmod 644 package.json - ${stdenv.lib.optionalString bypassCache '' - if [ -f ${src}/package-lock.json ] - then - cp ${src}/package-lock.json . - fi - ''} - - # Pinpoint the versions of all dependencies to the ones that are actually being used - echo "pinpointing versions of dependencies..." - cd .. - ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} - - source $pinpointDependenciesScriptPath - cd ${packageName} - - # Patch the shebangs of the bundled modules to prevent them from - # calling executables outside the Nix store as much as possible - patchShebangs . - - export HOME=$PWD - - ${stdenv.lib.optionalString bypassCache '' - if [ ! -f package-lock.json ] - then - echo "No package-lock.json file found, reconstructing..." - node ${reconstructPackageLock} - fi - - node ${addIntegrityFieldsScript} - ''} - - npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild - - ${stdenv.lib.optionalString (!dontNpmInstall) '' - # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. - rm -f npm-shrinkwrap.json - - npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install - ''} - - cd .. - ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} - - mv ${packageName} lib - ln -s $out/lib/node_modules/.bin $out/bin - ''; - } // extraArgs); - in - stdenv.mkDerivation { - name = "node-shell-${name}-${version}"; - - buildInputs = [ python nodejs ] ++ stdenv.lib.optional (stdenv.isLinux) utillinux ++ buildInputs; - buildCommand = '' - mkdir -p $out/bin - cat > $out/bin/shell <