nodejs-granular-v3: init (copy from nodejs-granular)

This commit is contained in:
DavHau 2023-09-05 18:44:00 +02:00
parent d7a037e723
commit 0d3be890a7
10 changed files with 898 additions and 0 deletions

View File

@ -0,0 +1,30 @@
{
jq,
moreutils,
}: ''
echo "executing buildPhaseNodejs"
# execute install command
if [ -n "$buildScript" ]; then
if [ -f "$buildScript" ]; then
$buildScript
else
eval "$buildScript"
fi
# by default, only for top level packages, `npm run build` is executed
elif [ -n "$runBuild" ] && [ "$(jq '.scripts.build' ./package.json)" != "null" ]; then
npm run build
else
if [ "$(jq '.scripts.preinstall' ./package.json)" != "null" ]; then
npm --production --offline --nodedir=$nodeSources run preinstall
fi
if [ "$(jq '.scripts.install' ./package.json)" != "null" ]; then
npm --production --offline --nodedir=$nodeSources run install
fi
if [ "$(jq '.scripts.postinstall' ./package.json)" != "null" ]; then
npm --production --offline --nodedir=$nodeSources run postinstall
fi
fi
''

View File

@ -0,0 +1,39 @@
{
lib,
nodeDeps,
}: ''
runHook preConfigure
# symlink sub dependencies as well as this imitates npm better
python $installDeps
echo "Symlinking transitive executables to $nodeModules/.bin"
for dep in ${toString nodeDeps}; do
binDir=$dep/lib/node_modules/.bin
if [ -e $binDir ]; then
for bin in $(ls $binDir/); do\
if [ ! -e $nodeModules/.bin ]; then
mkdir -p $nodeModules/.bin
fi
# symlink might have been already created by install-deps.py
# if installMethod=copy was selected
if [ ! -L $nodeModules/.bin/$bin ]; then
ln -s $binDir/$bin $nodeModules/.bin/$bin
else
echo "won't overwrite existing symlink $nodeModules/.bin/$bin. current target: $(readlink $nodeModules/.bin/$bin)"
fi
done
fi
done
# add bin path entries collected by python script
export PATH="$PATH:$nodeModules/.bin"
# add dependencies to NODE_PATH
export NODE_PATH="$NODE_PATH:$nodeModules/$packageName/node_modules"
export HOME=$TMPDIR
runHook postConfigure
''

View File

@ -0,0 +1,301 @@
{
config,
lib,
dream2nix,
...
}: let
l = lib // builtins;
cfg = config.nodejs-granular;
fetchDreamLockSources =
import ../../../lib/internal/fetchDreamLockSources.nix
{inherit lib;};
getDreamLockSource = import ../../../lib/internal/getDreamLockSource.nix {inherit lib;};
readDreamLock = import ../../../lib/internal/readDreamLock.nix {inherit lib;};
hashPath = import ../../../lib/internal/hashPath.nix {
inherit lib;
inherit (config.deps) runCommandLocal nix;
};
hashFile = import ../../../lib/internal/hashFile.nix {
inherit lib;
inherit (config.deps) runCommandLocal nix;
};
# fetchers
fetchers = {
git = import ../../../lib/internal/fetchers/git {
inherit hashPath;
inherit (config.deps) fetchgit;
};
http = import ../../../lib/internal/fetchers/http {
inherit hashFile lib;
inherit (config.deps.stdenv) mkDerivation;
inherit (config.deps) fetchurl;
};
};
dreamLockLoaded =
readDreamLock {inherit (config.nodejs-package-lock) dreamLock;};
dreamLockInterface = dreamLockLoaded.interface;
inherit (dreamLockInterface) defaultPackageName defaultPackageVersion;
fetchedSources = fetchDreamLockSources {
inherit (dreamLockInterface) defaultPackageName defaultPackageVersion;
inherit (dreamLockLoaded.lock) sources;
inherit fetchers;
};
# name: version: -> store-path
getSource = getDreamLockSource fetchedSources;
inherit
(dreamLockInterface)
getDependencies # name: version: -> [ {name=; version=; } ]
# Attributes
subsystemAttrs # attrset
packageVersions
;
isMainPackage = name: version:
(dreamLockInterface.packages."${name}" or null) == version;
nodejs = config.deps.nodejs;
nodeSources = config.deps.runCommandLocal "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
nodejsDeps =
lib.mapAttrs
(name: versions:
lib.genAttrs
versions
(version:
makeDependencyModule name version))
packageVersions;
# Generates a derivation for a specific package name + version
makeDependencyModule = name: version: {config, ...}: {
imports = [
(commonModule name version)
];
name = lib.replaceStrings ["@" "/"] ["__at__" "__slash__"] name;
inherit version;
env = {
packageName = name;
};
mkDerivation = {
src = getSource name version;
/*
This prevents nixpkg's setup.sh to run make during build and install
phases.
Dependencies from npmjs.org are delivered pre-built and cleaned,
therefore running `make` usually leads to errors.
The problem with this hack is it can prevent setup-hooks from setting
buildPhase and installPhase because those are already defined here.
*/
buildPhase = "runHook preBuild && runHook postBuild";
installPhase = "runHook preInstall && runHook postInstall";
};
};
commonModule = name: version: {config, ...}: let
deps = getDependencies name version;
nodeDeps =
lib.forEach
deps
(dep: cfg.deps."${dep.name}"."${dep.version}".public);
passthruDeps =
l.listToAttrs
(l.forEach deps
(dep:
l.nameValuePair
dep.name
cfg.deps."${dep.name}"."${dep.version}".public));
dependenciesJson =
l.toJSON
(lib.listToAttrs
(l.map
(dep: lib.nameValuePair dep.name dep.version)
deps));
in {
deps = {nixpkgs, ...}:
l.mapAttrs (_: l.mkDefault) {
inherit
(nixpkgs)
jq
makeWrapper
moreutils
nodejs
python3
stdenv
;
};
mkDerivation = {
meta =
subsystemAttrs.meta
// {
license =
l.map (name: l.licenses.${name}) subsystemAttrs.meta.license;
};
passthru.dependencies = passthruDeps;
# prevents running into ulimits
passAsFile = ["dependenciesJson" "nodeDeps"];
nativeBuildInputs = [
config.deps.makeWrapper
config.deps.jq
config.deps.nodejs
];
buildInputs = with config.deps; [jq nodejs python3];
preConfigurePhases = ["patchPhaseNodejs"];
preBuildPhases = ["buildPhaseNodejs"];
preInstallPhases = ["installPhaseNodejs"];
dontStrip = true;
# TODO: upstream fix to nixpkgs
# example which requires this:
# https://registry.npmjs.org/react-window-infinite-loader/-/react-window-infinite-loader-1.0.7.tgz
unpackCmd =
if
(config.mkDerivation.src or null != null)
&& (lib.hasSuffix ".tgz" config.mkDerivation.src)
then "tar --delay-directory-restore -xf $src"
else null;
unpackPhase = import ./unpackPhase.nix {};
# - installs dependencies into the node_modules directory
# - adds executables of direct node module dependencies to PATH
# - adds the current node module to NODE_PATH
# - sets HOME=$TMPDIR, as this is required by some npm scripts
# TODO: don't install dev dependencies. Load into NODE_PATH instead
configurePhase = import ./configurePhase.nix {
inherit lib nodeDeps;
};
};
env = {
inherit
dependenciesJson
nodeDeps
nodeSources
;
inherit
(config.nodejs-granular)
buildScript
installMethod
runBuild
;
# The python script wich is executed in this phase:
# - ensures that the package is compatible to the current system
# - ensures the main version in package.json matches the expected
# - pins dependency versions in package.json
# (some npm commands might otherwise trigger networking)
# - creates symlinks for executables declared in package.json
# Apart from that:
# - Any usage of 'link:' in package.json is replaced with 'file:'
# - If package-lock.json exists, it is deleted, as it might conflict
# with the parent package-lock.json.
# costs performance and doesn't seem beneficial in most scenarios
patchPhaseNodejs = ''
# delete package-lock.json as it can lead to conflicts
rm -f package-lock.json
# repair 'link:' -> 'file:'
mv $nodeModules/$packageName/package.json $nodeModules/$packageName/package.json.old
cat $nodeModules/$packageName/package.json.old | sed 's!link:!file\:!g' > $nodeModules/$packageName/package.json
rm $nodeModules/$packageName/package.json.old
# run python script (see comment above):
cp package.json package.json.bak
python $fixPackage \
|| \
# exit code 3 -> the package is incompatible to the current platform
# -> Let the build succeed, but don't create lib/node_modules
if [ "$?" == "3" ]; then
mkdir -p $out
echo "Not compatible with system $system" > $out/error
exit 0
else
exit 1
fi
'';
# Runs the install command which defaults to 'npm run postinstall'.
# Allows using custom install command by overriding 'buildScript'.
buildPhaseNodejs = import ./buildPhase.nix {
inherit (config.deps) jq moreutils;
};
# Symlinks executables and manual pages to correct directories
installPhaseNodejs = import ./installPhase.nix {
inherit (config.deps) stdenv;
};
# python script to modify some metadata to support installation
# (see comments below on d2nPatchPhase)
fixPackage = "${./fix-package.py}";
# script to install (symlink or copy) dependencies.
installDeps = "${./install-deps.py}";
# python script to link bin entries from package.json
linkBins = "${./link-bins.py}";
};
nodejs-granular = {
/*
For top-level packages install dependencies as full copies, as this
reduces errors with build tooling that doesn't cope well with
symlinking.
*/
installMethod = l.mkOptionDefault "symlink";
# only run build on the main package
runBuild = l.mkOptionDefault (isMainPackage name config.version);
# can be overridden to define alternative install command
# (defaults to 'npm run postinstall')
buildScript = l.mkOptionDefault null;
};
};
in {
imports = [
./interface.nix
dream2nix.modules.dream2nix.mkDerivation
(commonModule defaultPackageName defaultPackageVersion)
];
deps = {nixpkgs, ...}:
l.mapAttrs (_: l.mkDefault) {
inherit (nixpkgs) mkShell;
};
env = {
packageName = config.name;
};
mkDerivation = {
passthru.devShell = import ./devShell.nix {
inherit (config.deps) nodejs mkShell;
inherit (config.env) packageName;
pkg = config.public;
};
};
nodejs-granular = {
deps = nodejsDeps;
runBuild = l.mkDefault true;
installMethod = l.mkDefault "copy";
};
}

View File

@ -0,0 +1,69 @@
/*
devShell allowing for good interop with npm
The shellHook always overwrites existing ./node_modules with a full
flat copy of all transitive dependencies produced by dream2nix from
the lock file.
This allows good interop with npm. npm is still needed to update or
add dependencies. npm can write to the ./node_modules without
any issues and add or replace dependencies.
If npm modifies ./node_modules, then its contents will be a mix of
dream2nix installed packages and npm installed packages until the
devShell is re-entered and dream2nix overwrites the ./node_modules
with a fully reproducible copy again.
*/
{
mkShell,
nodejs,
packageName,
pkg,
}:
mkShell {
buildInputs = [
nodejs
];
shellHook = let
/*
This uses the existig package derivation, and modifies it, to
disable all phases but the one which creates the ./node_modules.
The result is a derivation only generating the node_modules and
.bin directories.
TODO: This is be a bit hacky and could be abstracted better
TODO: Don't always delete all of ./node_modules. Only overwrite
missing or changed modules.
*/
nodeModulesDrv = pkg.overrideAttrs (old: {
installMethod = "copy";
dontPatch = true;
dontBuild = true;
dontInstall = true;
dontFixup = true;
# the configurePhase fails if these variables are not set
d2nPatchPhase = ''
nodeModules=$out/lib/node_modules
mkdir -p $nodeModules/$packageName
cd $nodeModules/$packageName
'';
});
nodeModulesDir = "${nodeModulesDrv}/lib/node_modules/${packageName}/node_modules";
binDir = "${nodeModulesDrv}/lib/node_modules/.bin";
in ''
# re-create the ./node_modules directory
rm -rf ./node_modules
mkdir -p ./node_modules/.bin
cp -r ${nodeModulesDir}/* ./node_modules/
for executablePath in ${binDir}/*; do
binaryName=$(basename $executablePath)
target=$(realpath $executablePath)
echo linking binary $binaryName to nix store: $target
ln -s $target ./node_modules/.bin/$binaryName
done
chmod -R +w ./node_modules
export PATH="$PATH:$(realpath ./node_modules)/.bin"
'';
}

View File

@ -0,0 +1,74 @@
import json
import os
import pathlib
import sys
with open(os.environ.get("dependenciesJsonPath")) as f:
available_deps = json.load(f)
with open("package.json", encoding="utf-8-sig") as f:
package_json = json.load(f)
changed = False
# fail if platform incompatible
if "os" in package_json:
platform = sys.platform
if platform not in package_json["os"] or f"!{platform}" in package_json["os"]:
print(
f"Package is not compatible with current platform '{platform}'",
file=sys.stderr,
)
exit(3)
# replace version
# If it is a github dependency referred by revision,
# we can not rely on the version inside the package.json.
# In case of an 'unknown' version coming from the dream lock,
# do not override the version from package.json
version = os.environ.get("version")
if version not in ["unknown", package_json.get("version")]:
print(
"WARNING: The version of this package defined by its package.json "
"doesn't match the version expected by dream2nix."
"\n -> Replacing version in package.json: "
f"{package_json.get('version')} -> {version}",
file=sys.stderr,
)
changed = True
package_json["version"] = version
# pinpoint exact versions
# This is mostly needed to replace git references with exact versions,
# as NPM install will otherwise re-fetch these
if "dependencies" in package_json:
dependencies = package_json["dependencies"]
# dependencies can be a list or dict
for pname in dependencies:
if (
"bundledDependencies" in package_json
and pname in package_json["bundledDependencies"]
):
continue
if pname not in available_deps:
print(
f"WARNING: Dependency {pname} wanted but not available. Ignoring.",
file=sys.stderr,
)
continue
version = "unknown" if isinstance(dependencies, list) else dependencies[pname]
if available_deps[pname] != version:
version = available_deps[pname]
changed = True
print(
f"package.json: Pinning version '{version}' to '{available_deps[pname]}'"
f" for dependency '{pname}'",
file=sys.stderr,
)
# write changes to package.json
if changed:
with open("package.json", "w") as f:
json.dump(package_json, f, indent=2)

View File

@ -0,0 +1,219 @@
import json
import os
import pathlib
import shutil
import subprocess as sp
import sys
pname = os.environ.get("packageName")
version = os.environ.get("version")
bin_dir = f"{os.path.abspath('..')}/.bin"
root = f"{os.path.abspath('.')}/node_modules"
package_json_cache = {}
with open(os.environ.get("nodeDepsPath")) as f:
nodeDeps = f.read().split()
def get_package_json(path):
if path not in package_json_cache:
if not os.path.isfile(f"{path}/package.json"):
return None
with open(f"{path}/package.json", encoding="utf-8-sig") as f:
package_json_cache[path] = json.load(f)
return package_json_cache[path]
def install_direct_dependencies():
if not os.path.isdir(root):
os.mkdir(root)
with open(os.environ.get("nodeDepsPath")) as f:
deps = f.read().split()
for dep in deps:
if os.path.isdir(f"{dep}/lib/node_modules"):
for module in os.listdir(f"{dep}/lib/node_modules"):
# ignore hidden directories
if module[0] == ".":
continue
if module[0] == "@":
for submodule in os.listdir(f"{dep}/lib/node_modules/{module}"):
pathlib.Path(f"{root}/{module}").mkdir(exist_ok=True)
print(f"installing: {module}/{submodule}")
origin = os.path.realpath(
f"{dep}/lib/node_modules/{module}/{submodule}"
)
if not os.path.exists(f"{root}/{module}/{submodule}"):
os.symlink(origin, f"{root}/{module}/{submodule}")
else:
print(f"installing: {module}")
origin = os.path.realpath(f"{dep}/lib/node_modules/{module}")
if not os.path.isdir(f"{root}/{module}"):
os.symlink(origin, f"{root}/{module}")
else:
print(f"already exists: {root}/{module}")
def collect_dependencies(root, depth):
if not os.path.isdir(root):
return []
dirs = os.listdir(root)
currentDeps = []
for d in dirs:
if d.rpartition("/")[-1].startswith("@"):
subdirs = os.listdir(f"{root}/{d}")
for sd in subdirs:
cur_dir = f"{root}/{d}/{sd}"
currentDeps.append(f"{cur_dir}")
else:
cur_dir = f"{root}/{d}"
currentDeps.append(cur_dir)
if depth == 0:
return currentDeps
else:
depsOfDeps = map(
lambda dep: collect_dependencies(f"{dep}/node_modules", depth - 1),
currentDeps,
)
result = []
for deps in depsOfDeps:
result += deps
return result
def symlink_sub_dependencies():
for dep in collect_dependencies(root, 1):
# compute module path
d1, d2 = dep.split("/")[-2:]
if d1.startswith("@"):
path = f"{root}/{d1}/{d2}"
else:
path = f"{root}/{d2}"
# check for collision
if os.path.isdir(path):
continue
# create parent dir
pathlib.Path(os.path.dirname(path)).mkdir(parents=True, exist_ok=True)
# symlink dependency
os.symlink(os.path.realpath(dep), path)
# create symlinks for executables (bin entries from package.json)
def symlink_bin(bin_dir, package_location, package_json, force=False):
if package_json and "bin" in package_json and package_json["bin"]:
bin = package_json["bin"]
def link(name, relpath):
source = f"{bin_dir}/{name}"
sourceDir = os.path.dirname(source)
# create parent dir
pathlib.Path(sourceDir).mkdir(parents=True, exist_ok=True)
dest = os.path.relpath(f"{package_location}/{relpath}", sourceDir)
print(f"symlinking executable. dest: {dest}; source: {source}")
if force and os.path.lexists(source):
os.remove(source)
if not os.path.lexists(source):
os.symlink(dest, source)
if isinstance(bin, str):
name = package_json["name"].split("/")[-1]
link(name, bin)
else:
for name, relpath in bin.items():
link(name, relpath)
# checks if dependency is already installed in the current or parent dir.
def dependency_satisfied(root, pname, version):
if root == "/":
return False
parent = os.path.dirname(root)
if os.path.isdir(f"{root}/{pname}"):
package_json_file = f"{root}/{pname}/package.json"
if os.path.isfile(package_json_file):
if version == get_package_json(f"{root}/{pname}").get("version"):
return True
return dependency_satisfied(parent, pname, version)
# transforms symlinked dependencies into real copies
def symlinks_to_copies(node_modules):
sp.run(f"chmod +wx {node_modules}".split())
for dep in collect_dependencies(node_modules, 0):
# only handle symlinks to directories
if not os.path.islink(dep) or os.path.isfile(dep):
continue
d1, d2 = dep.split("/")[-2:]
if d1[0] == "@":
pname = f"{d1}/{d2}"
sp.run(f"chmod +wx {node_modules}/{d1}".split())
else:
pname = d2
package_json = get_package_json(dep)
if package_json is not None:
version = package_json["version"]
if dependency_satisfied(os.path.dirname(node_modules), pname, version):
os.remove(dep)
continue
print(f"copying {dep}")
os.rename(dep, f"{dep}.bac")
os.mkdir(dep)
contents = os.listdir(f"{dep}.bac")
if contents != []:
for node in contents:
if os.path.isdir(f"{dep}.bac/{node}"):
shutil.copytree(f"{dep}.bac/{node}", f"{dep}/{node}", symlinks=True)
if os.path.isdir(f"{dep}/node_modules"):
symlinks_to_copies(f"{dep}/node_modules")
else:
shutil.copy(f"{dep}.bac/{node}", f"{dep}/{node}")
os.remove(f"{dep}.bac")
symlink_bin(f"{bin_dir}", dep, package_json)
def symlink_direct_bins():
deps = []
package_json_file = get_package_json(f"{os.path.abspath('.')}")
if package_json_file:
if (
"devDependencies" in package_json_file
and package_json_file["devDependencies"]
):
for dep, _ in package_json_file["devDependencies"].items():
deps.append(dep)
if "dependencies" in package_json_file and package_json_file["dependencies"]:
for dep, _ in package_json_file["dependencies"].items():
deps.append(dep)
for name in deps:
package_location = f"{root}/{name}"
package_json = get_package_json(package_location)
symlink_bin(f"{bin_dir}", package_location, package_json, force=True)
# install direct deps
install_direct_dependencies()
# symlink non-colliding deps
symlink_sub_dependencies()
# symlinks to copies
if os.environ.get("installMethod") == "copy":
symlinks_to_copies(root)
# symlink direct deps bins
symlink_direct_bins()

View File

@ -0,0 +1,30 @@
{
stdenv,
# this function needs the following arguments via env
# packageName,
# nodeModules,
}: ''
echo "executing installPhaseNodejs"
mkdir -p $out/lib
cp -r $nodeModules $out/lib/node_modules
nodeModules=$out/lib/node_modules
cd "$nodeModules/$packageName"
echo "Symlinking bin entries from package.json"
python $linkBins
echo "Symlinking manual pages"
if [ -d "$nodeModules/$packageName/man" ]
then
mkdir -p $out/share
for dir in "$nodeModules/$packageName/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
''

View File

@ -0,0 +1,47 @@
{
config,
lib,
dream2nix,
packageSets,
...
}: let
l = lib // builtins;
t = l.types;
in {
options.nodejs-granular = l.mapAttrs (_: l.mkOption) {
buildScript = {
type = t.nullOr (t.oneOf [t.str t.path t.package]);
description = ''
A command or script to execute instead of `npm run build`.
Is only executed if `runBuild = true`.
'';
};
installMethod = {
type = t.enum [
"symlink"
"copy"
];
description = ''
Strategy to use for populating ./node_modules.
Symlinking is quicker, but often introduces compatibility issues with bundlers like webpack and other build tools.
Copying is slow, but more reliable;
'';
};
runBuild = {
type = t.bool;
description = ''
Whether to run a package's build script (aka. `npm run build`)
'';
};
deps = {
type = t.attrsOf (t.attrsOf (t.submodule {
imports = [
dream2nix.modules.dream2nix.core
dream2nix.modules.dream2nix.mkDerivation
./interface.nix
];
_module.args = {inherit dream2nix packageSets;};
}));
};
};
}

View File

@ -0,0 +1,43 @@
import json
import os
import pathlib
with open("package.json", encoding="utf-8-sig") as f:
package_json = json.load(f)
out = os.environ.get("out")
# create symlinks for executables (bin entries from package.json)
def symlink_bin(bin_dir, package_json):
if "bin" in package_json and package_json["bin"]:
bin = package_json["bin"]
def link(name, relpath):
source = f"{bin_dir}/{name}"
sourceDir = os.path.dirname(source)
# make target executable
os.chmod(relpath, 0o777)
# create parent dir
pathlib.Path(sourceDir).mkdir(parents=True, exist_ok=True)
dest = os.path.relpath(relpath, sourceDir)
print(f"symlinking executable. dest: {dest}; source: {source}")
# if a bin with this name exists, overwrite
if os.path.lexists(source):
os.remove(source)
os.symlink(dest, source)
if isinstance(bin, str):
name = package_json["name"].split("/")[-1]
link(name, bin)
else:
for name, relpath in bin.items():
link(name, relpath)
# symlink current packages executables to $nodeModules/.bin
symlink_bin(f"{out}/lib/node_modules/.bin/", package_json)
# symlink current packages executables to $out/bin
symlink_bin(f"{out}/bin/", package_json)

View File

@ -0,0 +1,46 @@
{}: let
in ''
runHook preUnpack
nodeModules=$(realpath ./package)
export sourceRoot="$nodeModules/$packageName"
# sometimes tarballs do not end with .tar.??
unpackFallback(){
local fn="$1"
tar xf "$fn"
}
unpackCmdHooks+=(unpackFallback)
unpackFile $src
# Make the base dir in which the target dependency resides in first
mkdir -p "$(dirname "$sourceRoot")"
# install source
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w -- "$packageDir"
# Move the extracted tarball into the output folder
mv -- "$packageDir" "$sourceRoot"
elif [ -d "$src" ]
then
strippedName="$(stripHash $src)"
# Restore write permissions
chmod -R u+w -- "$strippedName"
# Move the extracted directory into the output folder
mv -- "$strippedName" "$sourceRoot"
fi
runHook postUnpack
''