Merge pull request #513 from nix-community/port-nodejs

Port nodejs to drv-parts (v1)
This commit is contained in:
DavHau 2023-05-29 19:50:30 +02:00 committed by GitHub
commit 7d21a047a6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
43 changed files with 2796 additions and 23 deletions

2
src/fetchers/readme.md Normal file
View File

@ -0,0 +1,2 @@
These fetchers have been duplicated to /v1/nix/lib/internal/fetchers and are now maintained there.
This copy is kept because some legacy code still depends on it.

View File

@ -10,7 +10,7 @@
fetcherModules =
l.genAttrs
fetcherNames
(name: import "${fetchersDir}/${name}" config);
(name: import "${fetchersDir}/${name}" (config.pkgs // config));
in {
config = {
fetchers = fetcherModules;

View File

@ -0,0 +1,37 @@
# This is currently only used for legacy modules ported to v1.
# The dream-lock concept might be deprecated together with this module at some
# point.
{lib, ...}: let
l = builtins // lib;
mkDiscovereredProject = {
name,
relPath,
subsystem,
subsystemInfo,
translators,
}: {
inherit
name
relPath
subsystem
subsystemInfo
translators
;
};
mkPathSource = {
path,
rootName,
rootVersion,
} @ args:
args
// {
type = "path";
};
in {
inherit
mkDiscovereredProject
mkPathSource
;
}

View File

@ -0,0 +1,74 @@
# This is currently only used for legacy modules ported to v1.
# The dream-lock concept might be deprecated together with this module at some
# point.
{lib, ...}: let
l = builtins // lib;
fetchSource = {
source,
extract ? false,
fetchers,
}: let
fetcher = fetchers."${source.type}";
fetcherArgs = l.removeAttrs source ["dir" "hash" "type"];
fetcherOutputs = fetcher.outputs fetcherArgs;
maybeArchive = fetcherOutputs.fetched (source.hash or null);
in
if source ? dir
then "${maybeArchive}/${source.dir}"
else maybeArchive;
fetchDreamLockSources = {
# sources attrset from dream lock
defaultPackageName,
defaultPackageVersion,
sources,
fetchers,
sourceRoot ? null,
...
}: let
l = lib // builtins;
fetchedSources =
l.mapAttrs
(name: versions:
l.mapAttrs
(version: source:
if source.type == "unknown"
then "unknown"
else if source.type == "path"
then let
path =
if l.isStorePath (l.concatStringsSep "/" (l.take 4 (l.splitString "/" source.path)))
then source.path
else if name == source.rootName && version == source.rootVersion
then throw "source for ${name}@${version} is referencing itself"
else if source.rootName != null && source.rootVersion != null
then "${fetchedSources."${source.rootName}"."${source.rootVersion}"}/${source.path}"
else if sourceRoot != null
then "${sourceRoot}/${source.path}"
else throw "${name}-${version}: cannot determine path source";
in
l.path {
inherit path;
name = l.strings.sanitizeDerivationName "${name}-${version}-source";
}
else if fetchers ? "${source.type}"
then
fetchSource {
inherit fetchers;
source =
source
// {
pname = source.pname or name;
version = source.version or version;
};
}
else throw "unsupported source type '${source.type}'")
versions)
sources;
# attrset: pname -> path of downloaded source
in
fetchedSources;
in
fetchDreamLockSources

View File

@ -0,0 +1,38 @@
{
pkgs,
lib,
utils,
...
}: {
inputs = [
"url"
];
outputs = {url, ...}: let
b = builtins;
in {
calcHash = algo:
utils.hashFile algo (b.fetchTarball {
inherit url;
});
fetched = hash: let
drv =
if hash != null && lib.stringLength hash == 40
then
pkgs.fetchzip {
inherit url;
sha1 = hash;
}
else
pkgs.fetchzip {
inherit url hash;
};
drvSanitized = drv.overrideAttrs (old: {
name = lib.strings.sanitizeDerivationName old.name;
});
in
drvSanitized;
};
}

View File

@ -0,0 +1,38 @@
{
pkgs,
utils,
...
}: {
inputs = ["pname" "version"];
versionField = "version";
outputs = {
pname,
version,
...
}: let
b = builtins;
# See https://github.com/rust-lang/crates.io-index/blob/master/config.json#L2
url = "https://crates.io/api/v1/crates/${pname}/${version}/download";
in {
calcHash = algo:
utils.hashFile algo (b.fetchurl {
inherit url;
});
fetched = hash: let
fetched = pkgs.fetchurl {
inherit url;
sha256 = hash;
name = "download-${pname}-${version}";
};
in
pkgs.runCommandLocal "unpack-${pname}-${version}" {}
''
mkdir -p $out
tar --strip-components 1 -xzf ${fetched} -C $out
echo '{"package":"${hash}","files":{}}' > $out/.cargo-checksum.json
'';
};
}

View File

@ -0,0 +1,49 @@
{
lib,
mkDerivation,
...
}: let
l = builtins // lib;
# TODO is this really needed? Seems to make builds slower, why not unpack + build?
extractSource = {
source,
dir ? "",
name ? null,
} @ args:
mkDerivation {
name = "${(args.name or source.name or "")}-extracted";
src = source;
inherit dir;
phases = ["unpackPhase"];
dontInstall = true;
dontFixup = true;
# Allow to access the original output of the FOD.
# Some builders like python require the original archive.
passthru.original = source;
unpackCmd =
if l.hasSuffix ".tgz" (source.name or "${source}")
then ''
tar --delay-directory-restore -xf $src
# set executable flag only on directories
chmod -R +X .
''
else null;
# sometimes tarballs do not end with .tar.??
preUnpack = ''
unpackFallback(){
local fn="$1"
tar xf "$fn"
}
unpackCmdHooks+=(unpackFallback)
'';
postUnpack = ''
echo postUnpack
mv "$sourceRoot/$dir" $out
exit
'';
};
in
extractSource

View File

@ -0,0 +1,90 @@
{
utils ? null,
hashPath ? utils.hashPath,
fetchgit,
...
}: let
b = builtins;
# check if a string is a git ref
isGitRef = b.match "refs/(heads|tags)/.*";
# check if a string is a git rev
isGitRev = b.match "[a-f0-9]*";
in {
inputs = [
"url"
"rev"
];
versionField = "rev";
outputs = {
url,
rev,
submodules ? true,
...
} @ inp: let
isRevGitRef = isGitRef rev;
hasGitRef = inp.ref or null != null;
in
if isRevGitRef == null && isGitRev rev == null
then
throw ''
invalid git rev: ${rev}
rev must either be a sha1 revision or "refs/heads/branch-name" or "refs/tags/tag-name"
''
else if hasGitRef && isGitRef inp.ref == null
then
throw ''
invalid git ref: ${inp.ref or null}
ref must be in either "refs/heads/branch-name" or "refs/tags/tag-name" format
''
else let
b = builtins;
refAndRev =
# if the source specifies a ref, then we add both the ref and rev
if hasGitRef
then {inherit (inp) rev ref;}
# otherwise check if the rev is a ref, if it is add to ref
else if isRevGitRef != null
then {ref = inp.rev;}
# if the rev isn't a ref, then it is a rev, so add it there
else {rev = inp.rev;};
in {
calcHash = algo:
hashPath algo
(b.fetchGit
(refAndRev
// {
inherit url;
# disable fetching all refs if the source specifies a ref
allRefs = ! hasGitRef;
inherit submodules;
}));
# git can either be verified via revision or hash.
# In case revision is used for verification, `hash` will be null.
fetched = hash:
if hash == null
then
if ! refAndRev ? rev
then throw "Cannot fetch git repo without integrity. Specify at least 'rev' or 'sha256'"
else
b.fetchGit
(refAndRev
// {
inherit url;
allRefs = true;
inherit submodules;
})
else
fetchgit
(refAndRev
// {
inherit url;
fetchSubmodules = submodules;
sha256 = hash;
});
};
}

View File

@ -0,0 +1,35 @@
{
pkgs,
lib,
utils,
...
}: {
inputs = [
"owner"
"repo"
"rev"
];
versionField = "rev";
defaultUpdater = "githubNewestReleaseTag";
outputs = {
owner,
repo,
rev,
...
}: let
b = builtins;
in {
calcHash = algo:
utils.hashPath algo (b.fetchTarball {
url = "https://github.com/${owner}/${repo}/tarball/${rev}";
});
fetched = hash:
pkgs.fetchFromGitHub {
inherit owner repo rev hash;
};
};
}

View File

@ -0,0 +1,32 @@
{
pkgs,
utils,
...
}: {
inputs = [
"owner"
"repo"
"rev"
];
versionField = "rev";
outputs = {
owner,
repo,
rev,
...
}: let
b = builtins;
in {
calcHash = algo:
utils.hashPath algo (b.fetchTarball {
url = "https://gitlab.com/${owner}/${repo}/-/archive/${rev}/${repo}-${rev}.tar.gz";
});
fetched = hash:
pkgs.fetchFromGitLab {
inherit owner repo rev hash;
};
};
}

View File

@ -0,0 +1,48 @@
{
utils ? null,
fetchurl,
lib,
hashFile ? utils.hashFile,
mkDerivation,
extractSource ?
import ../extractSource.nix {
inherit lib mkDerivation;
},
...
}: {
inputs = [
"url"
];
outputs = {url, ...}: let
b = builtins;
in {
calcHash = algo:
hashFile algo (b.fetchurl {
inherit url;
});
fetched = hash: let
drv =
if hash != null && lib.stringLength hash == 40
then
fetchurl {
inherit url;
sha1 = hash;
}
else
fetchurl {
inherit url hash;
};
drvSanitized = drv.overrideAttrs (old: {
name = lib.strings.sanitizeDerivationName old.name;
});
extracted = extractSource {
source = drvSanitized;
};
in
extracted;
};
}

View File

@ -0,0 +1,76 @@
{
pkgs,
lib,
utils,
mkDerivation,
extractSource ?
import ../extractSource.nix {
inherit lib mkDerivation;
},
...
}: let
b = builtins;
in rec {
inputs = ["pname" "version"];
versionField = "version";
defaultUpdater = "npmNewestReleaseVersion";
# becuase some node packages contain submodules like `@hhhtj/draw.io`
# the amount of arguments can vary and a custom parser is needed
parseParams = params:
if b.length params == b.length inputs
then
lib.listToAttrs
(lib.forEach
(lib.range 0 ((lib.length inputs) - 1))
(
idx:
lib.nameValuePair
(lib.elemAt inputs idx)
(lib.elemAt params idx)
))
else if b.length params == (b.length inputs) + 1
then
parseParams [
"${b.elemAt params 0}/${b.elemAt params 1}"
(b.elemAt params 2)
]
else
throw ''
Wrong number of arguments provided in shortcut for fetcher 'npm'
Should be npm:${lib.concatStringsSep "/" inputs}
'';
# defaultUpdater = "";
outputs = {
pname,
version,
}: let
b = builtins;
submodule = lib.last (lib.splitString "/" pname);
url = "https://registry.npmjs.org/${pname}/-/${submodule}-${version}.tgz";
in {
calcHash = algo:
utils.hashPath algo (
b.fetchurl {inherit url;}
);
fetched = hash: let
source =
(pkgs.fetchurl {
inherit url;
sha256 = hash;
})
.overrideAttrs (old: {
outputHashMode = "recursive";
});
in
extractSource {
inherit source;
};
};
}

View File

@ -0,0 +1,11 @@
{utils, ...}: {
inputs = [
"path"
];
outputs = {path, ...}: {
calcHash = algo: utils.hashPath "${path}";
fetched = hash: "${path}";
};
}

View File

@ -0,0 +1,39 @@
{
lib,
pkgs,
mkDerivation,
extractSource ?
import ../extractSource.nix {
inherit lib mkDerivation;
},
...
}: {
inputs = ["pname" "version"];
versionField = "version";
defaultUpdater = "pypiNewestReleaseVersion";
outputs = {
pname,
version,
extension ? "tar.gz",
}: let
b = builtins;
firstChar = builtins.substring 0 1 pname;
url =
"https://files.pythonhosted.org/packages/source/"
+ "${firstChar}/${pname}/${pname}-${version}.${extension}";
in {
fetched = hash: let
source = pkgs.fetchurl {
inherit url;
sha256 = hash;
};
in
extractSource {
inherit source;
};
};
}

View File

@ -0,0 +1,37 @@
{
pkgs,
utils,
lib,
...
}: {
inputs = ["filename"];
versionField = "version";
defaultUpdater = "pypiNewestReleaseVersion";
outputs = {
filename,
pname,
version,
}: {
fetched = hash:
pkgs.runCommand
filename
{
buildInputs = [
pkgs.curl
pkgs.cacert
pkgs.jq
];
outputHash = hash;
outputHashAlgo = "sha256";
outputHashMode = "flat";
inherit filename pname version;
}
''
url=$(curl "https://pypi.org/pypi/$pname/json" | jq -r ".releases.\"$version\"[] | select(.filename == \"$filename\") | .url")
curl $url --output $out
'';
};
}

View File

@ -0,0 +1,28 @@
# This is currently only used for legacy modules ported to v1.
# The dream-lock concept might be deprecated together with this module at some
# point.
{lib, ...}: let
l = builtins // lib;
getDreamLockSource = fetchedSources: pname: version:
if
fetchedSources
? "${pname}"."${version}"
&& fetchedSources."${pname}"."${version}" != "unknown"
then fetchedSources."${pname}"."${version}"
else
throw ''
The source for ${pname}#${version} is not defined.
This can be fixed via an override. Example:
```
dream2nix.make[Flake]Outputs {
...
sourceOverrides = oldSources: {
"${pname}"."${version}" = builtins.fetchurl { ... };
};
...
}
```
'';
in
getDreamLockSource

View File

@ -0,0 +1,17 @@
{
lib,
runCommandLocal,
nix,
...
}: let
l = builtins // lib;
# hash a file via `nix hash file`
hashFile = algo: path: let
hashFile = runCommandLocal "hash-${algo}" {} ''
${nix}/bin/nix --option experimental-features nix-command hash file ${path} | tr --delete '\n' > $out
'';
in
l.readFile hashFile;
in
hashFile

View File

@ -0,0 +1,17 @@
{
lib,
runCommandLocal,
nix,
...
}: let
l = builtins // lib;
# hash the contents of a path via `nix hash path`
hashPath = algo: path: let
hashPath = runCommandLocal "hash-${algo}" {} ''
${nix}/bin/nix --option experimental-features nix-command hash path ${path} | tr --delete '\n' > $out
'';
in
l.readFile hashPath;
in
hashPath

View File

@ -0,0 +1,81 @@
{
lib,
parseSpdxId,
}: let
l = lib // builtins;
in rec {
getMetaFromPackageJson = packageJson:
{license = parseSpdxId (packageJson.license or "");}
// (
l.filterAttrs
(n: v: l.any (on: n == on) ["description" "homepage"])
packageJson
);
getPackageJsonDeps = packageJson: noDev:
(packageJson.dependencies or {})
// (lib.optionalAttrs (! noDev) (packageJson.devDependencies or {}));
getWorkspaceLockFile = tree: workspaceParent: fname: let
# returns the parsed package-lock.json for a given project
dirRelPath = workspaceParent;
packageJson =
(tree.getNodeFromPath "${dirRelPath}/package.json").jsonContent;
hasNoDependencies =
((packageJson.dependencies or {}) == {})
&& ((packageJson.devDependencies or {}) == {})
&& (! packageJson ? workspaces);
in
if hasNoDependencies
then null
else tree.getNodeFromPath "${dirRelPath}/${fname}";
getWorkspacePackageJson = tree: workspaces:
l.genAttrs
workspaces
(wsRelPath:
(tree.getNodeFromPath "${wsRelPath}/package.json").jsonContent);
getWorkspacePackages = tree: workspaces:
lib.mapAttrs'
(wsRelPath: json:
l.nameValuePair
json.name
json.version)
(getWorkspacePackageJson tree workspaces);
identifyGitUrl = url:
l.hasPrefix "git+" url
|| l.match ''^github:.*/.*#.*'' url != null;
parseGitUrl = url: let
githubMatch = l.match ''^github:(.*)/(.*)#(.*)$'' url;
in
if githubMatch != null
then let
owner = l.elemAt githubMatch 0;
repo = l.elemAt githubMatch 1;
rev = l.elemAt githubMatch 2;
in {
url = "https://github.com/${owner}/${repo}";
inherit rev;
}
else let
splitUrlRev = l.splitString "#" url;
rev = l.last splitUrlRev;
urlOnly = l.head splitUrlRev;
in
if l.hasPrefix "git+ssh://" urlOnly
then {
inherit rev;
url = "https://${(l.last (l.splitString "@" url))}";
}
else if l.hasPrefix "git+https://" urlOnly
then {
inherit rev;
url = l.removePrefix "git+" urlOnly;
}
else throw "Cannot parse git url: ${url}";
}

View File

@ -0,0 +1,21 @@
{lib, ...}: let
l = builtins // lib;
idToLicenseKey =
l.mapAttrs'
(n: v: l.nameValuePair (l.toLower (v.spdxId or v.fullName or n)) n)
l.licenses;
# Parses a string like "Unlicense OR MIT" to `["unlicense" "mit"]`
# TODO: this does not parse `AND` or `WITH` or paranthesis, so it is
# pretty hacky in how it works. But for most cases this should be okay.
parseSpdxId = _id: let
# some spdx ids might have paranthesis around them
id = l.removePrefix "(" (l.removeSuffix ")" _id);
licenseStrings = l.map l.toLower (l.splitString " OR " id);
_licenses = l.map (string: idToLicenseKey.${string} or null) licenseStrings;
licenses = l.filter (license: license != null) _licenses;
in
licenses;
in
parseSpdxId

View File

@ -0,0 +1,126 @@
{lib, ...}: let
l = builtins // lib;
/*
Transforms a source tree into a nix attrset for simplicity and performance.
It's simpler to traverse an attrset than having to readDir manually.
It's more performant because it allows to parse a json or toml by accessing
attributes like `.jsonContent` or `.tomlContent` which ensures that the file
is only parsed once, even if the parsed content is used in multiple places.
produces this structure:
{
files = {
"package.json" = {
relPath = "package.json"
fullPath = "${source}/package.json"
content = ;
jsonContent = ;
tomlContent = ;
}
};
directories = {
"packages" = {
relPath = "packages";
fullPath = "${source}/packages";
files = {
};
directories = {
};
};
};
}
*/
prepareSourceTree = {
source,
depth ? 10,
}:
prepareSourceTreeInternal source "" "" depth;
readTextFile = file: l.replaceStrings ["\r\n"] ["\n"] (l.readFile file);
prepareSourceTreeInternal = sourceRoot: relPath: name: depth: let
relPath' = relPath;
fullPath' = "${toString sourceRoot}/${relPath}";
current = l.readDir fullPath';
fileNames =
l.filterAttrs (n: v: v == "regular") current;
directoryNames =
l.filterAttrs (n: v: v == "directory") current;
makeNewPath = prefix: name:
if prefix == ""
then name
else "${prefix}/${name}";
directories =
l.mapAttrs
(dname: _:
prepareSourceTreeInternal
sourceRoot
(makeNewPath relPath dname)
dname
(depth - 1))
directoryNames;
files =
l.mapAttrs
(fname: _: rec {
name = fname;
fullPath = "${fullPath'}/${fname}";
relPath = makeNewPath relPath' fname;
content = readTextFile fullPath;
jsonContent = l.fromJSON content;
tomlContent = l.fromTOML content;
})
fileNames;
# returns the tree object of the given sub-path
getNodeFromPath = path: let
cleanPath = l.removePrefix "/" path;
pathSplit = l.splitString "/" cleanPath;
dirSplit = l.init pathSplit;
leaf = l.last pathSplit;
error = throw ''
Failed while trying to navigate to ${path} from ${fullPath'}
'';
dirAttrPath =
l.init
(l.concatMap
(x: [x] ++ ["directories"])
dirSplit);
dir =
if (l.length dirSplit == 0) || dirAttrPath == [""]
then self
else if ! l.hasAttrByPath dirAttrPath directories
then error
else l.getAttrFromPath dirAttrPath directories;
in
if path == ""
then self
else if dir ? directories."${leaf}"
then dir.directories."${leaf}"
else if dir ? files."${leaf}"
then dir.files."${leaf}"
else error;
self =
{
inherit files getNodeFromPath name relPath;
fullPath = fullPath';
}
# stop recursion if depth is reached
// (l.optionalAttrs (depth > 0) {
inherit directories;
});
in
self;
in
prepareSourceTree

View File

@ -0,0 +1,193 @@
# This is currently only used for legacy modules ported to v1.
# The dream-lock concept might be deprecated together with this module at some
# point.
{lib, ...}: let
l = builtins // lib;
listDirs = path: l.attrNames (l.filterAttrs (n: v: v == "directory") (builtins.readDir path));
subDreamLockNames = dreamLockFile: let
dir = l.dirOf dreamLockFile;
directories = listDirs dir;
dreamLockDirs =
l.filter
(d: l.pathExists "${dir}/${d}/dream-lock.json")
directories;
in
dreamLockDirs;
/*
Ensures that there is an entry in dependencies for each source.
This allows translators to omit creating dream-locks with empty
dependency graph.
*/
extendWithEmptyGraph = dreamLockDecomp: let
emptyDependencyGraph =
l.mapAttrs
(name: versions:
l.mapAttrs
(version: source: [])
versions)
dreamLockDecomp.sources;
dependencyGraph =
l.recursiveUpdate
emptyDependencyGraph
dreamLockDecomp.dependencies;
lock =
dreamLockDecomp
// {
dependencies = dependencyGraph;
};
in
lock;
decompressDependencyGraph = compGraph:
l.mapAttrs
(name: versions:
l.mapAttrs
(version: deps:
map
(dep: {
name = l.elemAt dep 0;
version = l.elemAt dep 1;
})
deps)
versions)
compGraph;
decompressDreamLock = comp: let
dependencyGraphDecomp =
decompressDependencyGraph (comp.dependencies or {});
cyclicDependencies =
decompressDependencyGraph (comp.cyclicDependencies or {});
in
comp
// {
decompressed = true;
cyclicDependencies = cyclicDependencies;
dependencies = dependencyGraphDecomp;
};
readDreamLock = {dreamLock} @ args: let
isFile =
l.isPath dreamLock
|| l.isString dreamLock
|| l.isDerivation dreamLock;
lockMaybeCompressed =
if isFile
then l.fromJSON (l.readFile dreamLock)
else dreamLock;
lockRaw =
if lockMaybeCompressed.decompressed or false
then lockMaybeCompressed
else decompressDreamLock lockMaybeCompressed;
lock = extendWithEmptyGraph lockRaw;
subDreamLocks =
if ! isFile
then {}
else let
dir = l.dirOf dreamLock;
in
l.genAttrs
(subDreamLockNames dreamLock)
(d:
readDreamLock
{dreamLock = "${dir}/${d}/dream-lock.json";});
packages = lock._generic.packages;
defaultPackageName = lock._generic.defaultPackage;
defaultPackageVersion = packages."${defaultPackageName}";
subsystemAttrs = lock._subsystem;
sources = lock.sources;
dependencyGraph = lock.dependencies;
allDependencies = let
candidatesList =
l.unique
(l.flatten
(l.mapAttrsToList
(name: versions:
l.flatten (l.attrValues versions))
dependencyGraph));
in
l.foldl'
(all: new:
all
// {
"${new.name}" = all.${new.name} or [] ++ [new.version];
})
{}
candidatesList;
allDependants =
l.mapAttrs
(name: versions: l.attrNames versions)
dependencyGraph;
packageVersions =
l.zipAttrsWith
(name: versions: l.unique (l.flatten versions))
[
allDependants
allDependencies
];
cyclicDependencies = lock.cyclicDependencies;
getSourceSpec = pname: version:
sources."${pname}"."${version}"
or (
throw "The source spec for ${pname}#${version} is not defined in lockfile."
);
getDependencies = pname: version:
l.filter
(dep: ! l.elem dep cyclicDependencies."${pname}"."${version}" or [])
dependencyGraph."${pname}"."${version}" or [];
getCyclicDependencies = pname: version:
cyclicDependencies."${pname}"."${version}" or [];
getRoot = pname: version: let
spec = getSourceSpec pname version;
in
if
(pname == defaultPackageName && version == defaultPackageVersion)
|| spec.type != "path"
then {inherit pname version;}
else {
pname = spec.rootName;
version = spec.rootVersion;
};
in {
inherit lock;
interface = {
inherit
defaultPackageName
defaultPackageVersion
subsystemAttrs
getCyclicDependencies
getDependencies
getSourceSpec
getRoot
packages
packageVersions
subDreamLocks
;
};
};
in
readDreamLock

View File

@ -0,0 +1,266 @@
# This is currently only used for legacy modules ported to v1.
# The dream-lock concept might be deprecated together with this module at some
# point.
{lib, ...}: let
l = builtins // lib;
nameVersionPair = name: version: {inherit name version;};
overrideWarning = fields: args:
l.filterAttrs (
name: _:
if l.any (field: name == field) fields
then
l.warn ''
you are trying to pass a "${name}" key from your source
constructor, this will be overridden with a value passed
by dream2nix.
''
false
else true
)
args;
simpleTranslate = func: let
final =
func
{
inherit getDepByNameVer;
inherit dependenciesByOriginalID;
};
getDepByNameVer = name: version:
final.allDependencies."${name}"."${version}" or null;
dependenciesByOriginalID =
l.foldl'
(result: pkgData:
l.recursiveUpdate result {
"${final.getOriginalID pkgData}" = pkgData;
})
{}
serializedPackagesList;
serializedPackagesList = final.serializePackages final.inputData;
dreamLockData = magic final;
magic = {
# values
defaultPackage,
inputData,
location ? "",
mainPackageDependencies,
packages,
subsystemName,
subsystemAttrs,
translatorName,
# functions
serializePackages,
getName,
getVersion,
getSourceType,
sourceConstructors,
createMissingSource ? (name: version: throw "Cannot find source for ${name}:${version}"),
getDependencies ? null,
getOriginalID ? null,
mainPackageSource ? {type = "unknown";},
}: let
allDependencies =
l.foldl'
(result: pkgData:
l.recursiveUpdate result {
"${getName pkgData}" = {
"${getVersion pkgData}" = pkgData;
};
})
{}
serializedPackagesList;
sources =
l.foldl'
(result: pkgData: let
pkgName = getName pkgData;
pkgVersion = getVersion pkgData;
type = getSourceType pkgData;
constructedArgs = sourceConstructors."${type}" pkgData;
constructedArgsKeep =
overrideWarning ["pname" "version"] constructedArgs;
constructedSource =
constructedArgsKeep
// {
inherit type;
pname = pkgName;
version = pkgVersion;
};
skip =
(type == "path")
&& l.isStorePath (l.removeSuffix "/" constructedArgs.path);
in
if skip
then result
else
l.recursiveUpdate result {
"${pkgName}" = {
"${pkgVersion}" =
l.removeAttrs constructedSource ["pname" "version"];
};
})
{}
serializedPackagesList;
dependencyGraph = let
depGraph =
l.mapAttrs
(name: versions:
l.mapAttrs
(version: pkgData: getDependencies pkgData)
versions)
allDependencies;
in
depGraph
// {
"${defaultPackage}" =
depGraph."${defaultPackage}"
or {}
// {
"${packages."${defaultPackage}"}" = mainPackageDependencies;
};
};
allDependencyKeys = let
depsWithDuplicates =
l.flatten
(l.flatten
(l.mapAttrsToList
(name: versions: l.attrValues versions)
dependencyGraph));
in
l.unique depsWithDuplicates;
missingDependencies =
l.flatten
(l.forEach allDependencyKeys
(dep:
if sources ? "${dep.name}"."${dep.version}"
then []
else dep));
generatedSources =
if missingDependencies == []
then {}
else
l.listToAttrs
(l.map
(dep:
l.nameValuePair
"${dep.name}"
{
"${dep.version}" =
createMissingSource dep.name dep.version;
})
missingDependencies);
allSources =
l.recursiveUpdate sources generatedSources;
cyclicDependencies =
# TODO: inefficient! Implement some kind of early cutoff
let
findCycles = node: prevNodes: cycles: let
children = dependencyGraph."${node.name}"."${node.version}";
cyclicChildren =
l.filter
(child: prevNodes ? "${child.name}#${child.version}")
children;
nonCyclicChildren =
l.filter
(child: ! prevNodes ? "${child.name}#${child.version}")
children;
cycles' =
cycles
++ (l.map (child: {
from = node;
to = child;
})
cyclicChildren);
# use set for efficient lookups
prevNodes' =
prevNodes
// {"${node.name}#${node.version}" = null;};
in
if nonCyclicChildren == []
then cycles'
else
l.flatten
(l.map
(child: findCycles child prevNodes' cycles')
nonCyclicChildren);
cyclesList =
findCycles
(nameVersionPair defaultPackage packages."${defaultPackage}")
{}
[];
in
l.foldl'
(cycles: cycle: (
let
existing =
cycles."${cycle.from.name}"."${cycle.from.version}"
or [];
reverse =
cycles."${cycle.to.name}"."${cycle.to.version}"
or [];
in
# if edge or reverse edge already in cycles, do nothing
if
l.elem cycle.from reverse
|| l.elem cycle.to existing
then cycles
else
l.recursiveUpdate
cycles
{
"${cycle.from.name}"."${cycle.from.version}" =
existing ++ [cycle.to];
}
))
{}
cyclesList;
in
{
decompressed = true;
_generic = {
inherit
defaultPackage
location
packages
;
subsystem = subsystemName;
sourcesAggregatedHash = null;
};
# build system specific attributes
_subsystem = subsystemAttrs;
inherit cyclicDependencies;
sources = allSources;
}
// (l.optionalAttrs
(getDependencies != null)
{dependencies = dependencyGraph;});
in
dreamLockData;
in
simpleTranslate

View File

@ -6,6 +6,7 @@
l = lib // builtins;
t = l.types;
in {
options = {
options = l.mapAttrs (_: l.mkOption) {
# put options here
};
}

View File

@ -24,6 +24,7 @@
.stdout.strip())
lock_path_rel = Path('${cfg.lockFileRel}') # noqa: E501
lock_path = repo_path / lock_path_rel.relative_to(lock_path_rel.anchor)
lock_path.parent.mkdir(parents=True, exist_ok=True)
def run_refresh_script(script):
@ -31,13 +32,15 @@
subprocess.run(
[script],
check=True, shell=True, env={"out": out_file.name})
return json.load(out_file)
# open the file again via its name (it might have been replaced)
with open(out_file.name) as out:
return json.load(out)
def run_refresh_scripts(refresh_scripts):
"""
recursively iterate over a nested dict and replace all values,
executable scripts, with the content of their $out files.
executable scripts, with the content of their $out$out files.
"""
for name, value in refresh_scripts.items():
refresh_scripts[name] = run_refresh_script(value["script"])
@ -48,6 +51,7 @@
with open(lock_path, 'w') as out_file:
json.dump(lock_data, out_file, indent=2)
print(f"lock file written to {out_file.name}")
print("Add this file to git if flakes is used.")
'';
computeFODHash = fod: let
@ -102,7 +106,7 @@
The lock file ${cfg.repoRoot}${cfg.lockFileRel} for drv-parts module '${config.name}' is missing.
To update it using flakes:
nix run -L .#${config.name}.config.lock.refresh
To update is without flakes:
To update it without flakes:
bash -c $(nix-build ${config.lock.refresh.drvPath} --no-link)/bin/refresh
'';
@ -110,7 +114,7 @@
The lock file ${cfg.repoRoot}${cfg.lockFileRel} for drv-parts module '${config.name}' does not contain field `${field}`.
To update it using flakes:
nix run -L .#${config.name}.config.lock.refresh
To update is without flakes:
To update it without flakes:
bash -c $(nix-build ${config.lock.refresh.drvPath} --no-link)/bin/refresh
'';

View File

@ -0,0 +1,32 @@
{
jq,
moreutils,
}: ''
runHook preBuild
# execute install command
if [ -n "$buildScript" ]; then
if [ -f "$buildScript" ]; then
$buildScript
else
eval "$buildScript"
fi
# by default, only for top level packages, `npm run build` is executed
elif [ -n "$runBuild" ] && [ "$(jq '.scripts.build' ./package.json)" != "null" ]; then
npm run build
else
if [ "$(jq '.scripts.preinstall' ./package.json)" != "null" ]; then
npm --production --offline --nodedir=$nodeSources run preinstall
fi
if [ "$(jq '.scripts.install' ./package.json)" != "null" ]; then
npm --production --offline --nodedir=$nodeSources run install
fi
if [ "$(jq '.scripts.postinstall' ./package.json)" != "null" ]; then
npm --production --offline --nodedir=$nodeSources run postinstall
fi
fi
runHook postBuild
''

View File

@ -0,0 +1,39 @@
{
lib,
nodeDeps,
}: ''
runHook preConfigure
# symlink sub dependencies as well as this imitates npm better
python $installDeps
echo "Symlinking transitive executables to $nodeModules/.bin"
for dep in ${toString nodeDeps}; do
binDir=$dep/lib/node_modules/.bin
if [ -e $binDir ]; then
for bin in $(ls $binDir/); do\
if [ ! -e $nodeModules/.bin ]; then
mkdir -p $nodeModules/.bin
fi
# symlink might have been already created by install-deps.py
# if installMethod=copy was selected
if [ ! -L $nodeModules/.bin/$bin ]; then
ln -s $binDir/$bin $nodeModules/.bin/$bin
else
echo "won't overwrite existing symlink $nodeModules/.bin/$bin. current target: $(readlink $nodeModules/.bin/$bin)"
fi
done
fi
done
# add bin path entries collected by python script
export PATH="$PATH:$nodeModules/.bin"
# add dependencies to NODE_PATH
export NODE_PATH="$NODE_PATH:$nodeModules/$packageName/node_modules"
export HOME=$TMPDIR
runHook postConfigure
''

View File

@ -0,0 +1,273 @@
{
config,
lib,
dream2nix,
...
}: let
l = lib // builtins;
cfg = config.nodejs-granular;
fetchDreamLockSources =
import ../../../lib/internal/fetchDreamLockSources.nix
{inherit lib;};
getDreamLockSource = import ../../../lib/internal/getDreamLockSource.nix {inherit lib;};
readDreamLock = import ../../../lib/internal/readDreamLock.nix {inherit lib;};
hashPath = import ../../../lib/internal/hashPath.nix {
inherit lib;
inherit (config.deps) runCommandLocal nix;
};
hashFile = import ../../../lib/internal/hashFile.nix {
inherit lib;
inherit (config.deps) runCommandLocal nix;
};
# fetchers
fetchers = {
git = import ../../../lib/internal/fetchers/git {
inherit hashPath;
inherit (config.deps) fetchgit;
};
http = import ../../../lib/internal/fetchers/http {
inherit hashFile lib;
inherit (config.deps.stdenv) mkDerivation;
inherit (config.deps) fetchurl;
};
};
dreamLockLoaded =
readDreamLock {inherit (config.nodejs-package-lock) dreamLock;};
dreamLockInterface = dreamLockLoaded.interface;
fetchedSources = fetchDreamLockSources {
inherit (dreamLockInterface) defaultPackageName defaultPackageVersion;
inherit (dreamLockLoaded.lock) sources;
inherit fetchers;
};
# name: version: -> store-path
getSource = getDreamLockSource fetchedSources;
inherit
(dreamLockInterface)
getDependencies # name: version: -> [ {name=; version=; } ]
# Attributes
subsystemAttrs # attrset
packageVersions
;
isMainPackage = name: version:
(dreamLockInterface.packages."${name}" or null) == version;
nodejs = config.deps.nodejs;
nodeSources = config.deps.runCommandLocal "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
nodejsDeps =
lib.mapAttrs
(name: versions:
lib.genAttrs
versions
(version:
makePackage name version))
packageVersions;
# Generates a derivation for a specific package name + version
makePackage = name: version: {config, ...}: {
imports = [
(commonModule name)
];
name = lib.replaceStrings ["@" "/"] ["__at__" "__slash__"] name;
inherit version;
env = {
packageName = name;
};
mkDerivation = {
src = getSource name version;
};
};
commonModule = name: {config, ...}: let
deps = getDependencies name config.version;
nodeDeps =
lib.forEach
deps
(dep: cfg.nodejsDeps."${dep.name}"."${dep.version}".public);
passthruDeps =
l.listToAttrs
(l.forEach deps
(dep:
l.nameValuePair
dep.name
cfg.nodejsDeps."${dep.name}"."${dep.version}".public));
dependenciesJson =
l.toJSON
(lib.listToAttrs
(l.map
(dep: lib.nameValuePair dep.name dep.version)
deps));
in {
deps = {nixpkgs, ...}:
l.mapAttrs (_: l.mkDefault) {
inherit
(nixpkgs)
jq
makeWrapper
moreutils
nodejs
python3
stdenv
;
};
mkDerivation = {
meta =
subsystemAttrs.meta
// {
license =
l.map (name: l.licenses.${name}) subsystemAttrs.meta.license;
};
passthru.dependencies = passthruDeps;
# prevents running into ulimits
passAsFile = ["dependenciesJson" "nodeDeps"];
nativeBuildInputs = [config.deps.makeWrapper];
buildInputs = with config.deps; [jq nodejs python3];
preConfigurePhases = ["d2nPatchPhase"];
dontStrip = true;
# TODO: upstream fix to nixpkgs
# example which requires this:
# https://registry.npmjs.org/react-window-infinite-loader/-/react-window-infinite-loader-1.0.7.tgz
unpackCmd =
if lib.hasSuffix ".tgz" config.mkDerivation.src
then "tar --delay-directory-restore -xf $src"
else null;
unpackPhase = import ./unpackPhase.nix {};
# - installs dependencies into the node_modules directory
# - adds executables of direct node module dependencies to PATH
# - adds the current node module to NODE_PATH
# - sets HOME=$TMPDIR, as this is required by some npm scripts
# TODO: don't install dev dependencies. Load into NODE_PATH instead
configurePhase = import ./configurePhase.nix {
inherit lib nodeDeps;
};
# Runs the install command which defaults to 'npm run postinstall'.
# Allows using custom install command by overriding 'buildScript'.
buildPhase = import ./buildPhase.nix {
inherit (config.deps) jq moreutils;
};
# Symlinks executables and manual pages to correct directories
installPhase = import ./installPhase.nix {
inherit (config.deps) stdenv;
};
};
env = {
inherit
dependenciesJson
nodeDeps
nodeSources
;
# The python script wich is executed in this phase:
# - ensures that the package is compatible to the current system
# - ensures the main version in package.json matches the expected
# - pins dependency versions in package.json
# (some npm commands might otherwise trigger networking)
# - creates symlinks for executables declared in package.json
# Apart from that:
# - Any usage of 'link:' in package.json is replaced with 'file:'
# - If package-lock.json exists, it is deleted, as it might conflict
# with the parent package-lock.json.
# costs performance and doesn't seem beneficial in most scenarios
d2nPatchPhase = ''
# delete package-lock.json as it can lead to conflicts
rm -f package-lock.json
# repair 'link:' -> 'file:'
mv $nodeModules/$packageName/package.json $nodeModules/$packageName/package.json.old
cat $nodeModules/$packageName/package.json.old | sed 's!link:!file\:!g' > $nodeModules/$packageName/package.json
rm $nodeModules/$packageName/package.json.old
# run python script (see comment above):
cp package.json package.json.bak
python $fixPackage \
|| \
# exit code 3 -> the package is incompatible to the current platform
# -> Let the build succeed, but don't create lib/node_modules
if [ "$?" == "3" ]; then
mkdir -p $out
echo "Not compatible with system $system" > $out/error
exit 0
else
exit 1
fi
'';
# python script to modify some metadata to support installation
# (see comments below on d2nPatchPhase)
fixPackage = "${./fix-package.py}";
# script to install (symlink or copy) dependencies.
installDeps = "${./install-deps.py}";
# python script to link bin entries from package.json
linkBins = "${./link-bins.py}";
};
nodejs-granular = {
/*
For top-level packages install dependencies as full copies, as this
reduces errors with build tooling that doesn't cope well with
symlinking.
*/
installMethod =
if isMainPackage name config.version
then "copy"
else "symlink";
# only run build on the main package
runBuild = isMainPackage name config.version;
# can be overridden to define alternative install command
# (defaults to 'npm run postinstall')
buildScript = null;
};
};
in {
imports = [
./interface.nix
dream2nix.modules.drv-parts.mkDerivation
(commonModule config.name)
];
deps = {nixpkgs, ...}: {
inherit (nixpkgs) mkShell;
};
env = {
packageName = config.name;
};
mkDerivation = {
passthru.devShell = import ./devShell.nix {
inherit (config.deps) nodejs mkShell;
inherit (config.env) packageName;
pkg = config.public;
};
};
nodejs-granular = {
inherit nodejsDeps;
};
}

View File

@ -0,0 +1,69 @@
/*
devShell allowing for good interop with npm
The shellHook always overwrites existing ./node_modules with a full
flat copy of all transitive dependencies produced by dream2nix from
the lock file.
This allows good interop with npm. npm is still needed to update or
add dependencies. npm can write to the ./node_modules without
any issues and add or replace dependencies.
If npm modifies ./node_modules, then its contents will be a mix of
dream2nix installed packages and npm installed packages until the
devShell is re-entered and dream2nix overwrites the ./node_modules
with a fully reproducible copy again.
*/
{
mkShell,
nodejs,
packageName,
pkg,
}:
mkShell {
buildInputs = [
nodejs
];
shellHook = let
/*
This uses the existig package derivation, and modifies it, to
disable all phases but the one which creates the ./node_modules.
The result is a derivation only generating the node_modules and
.bin directories.
TODO: This is be a bit hacky and could be abstracted better
TODO: Don't always delete all of ./node_modules. Only overwrite
missing or changed modules.
*/
nodeModulesDrv = pkg.overrideAttrs (old: {
installMethod = "copy";
dontPatch = true;
dontBuild = true;
dontInstall = true;
dontFixup = true;
# the configurePhase fails if these variables are not set
d2nPatchPhase = ''
nodeModules=$out/lib/node_modules
mkdir -p $nodeModules/$packageName
cd $nodeModules/$packageName
'';
});
nodeModulesDir = "${nodeModulesDrv}/lib/node_modules/${packageName}/node_modules";
binDir = "${nodeModulesDrv}/lib/node_modules/.bin";
in ''
# re-create the ./node_modules directory
rm -rf ./node_modules
mkdir -p ./node_modules/.bin
cp -r ${nodeModulesDir}/* ./node_modules/
for executablePath in ${binDir}/*; do
binaryName=$(basename $executablePath)
target=$(realpath $executablePath)
echo linking binary $binaryName to nix store: $target
ln -s $target ./node_modules/.bin/$binaryName
done
chmod -R +w ./node_modules
export PATH="$PATH:$(realpath ./node_modules)/.bin"
'';
}

View File

@ -0,0 +1,74 @@
import json
import os
import pathlib
import sys
with open(os.environ.get("dependenciesJsonPath")) as f:
available_deps = json.load(f)
with open("package.json", encoding="utf-8-sig") as f:
package_json = json.load(f)
changed = False
# fail if platform incompatible
if "os" in package_json:
platform = sys.platform
if platform not in package_json["os"] or f"!{platform}" in package_json["os"]:
print(
f"Package is not compatible with current platform '{platform}'",
file=sys.stderr,
)
exit(3)
# replace version
# If it is a github dependency referred by revision,
# we can not rely on the version inside the package.json.
# In case of an 'unknown' version coming from the dream lock,
# do not override the version from package.json
version = os.environ.get("version")
if version not in ["unknown", package_json.get("version")]:
print(
"WARNING: The version of this package defined by its package.json "
"doesn't match the version expected by dream2nix."
"\n -> Replacing version in package.json: "
f"{package_json.get('version')} -> {version}",
file=sys.stderr,
)
changed = True
package_json["version"] = version
# pinpoint exact versions
# This is mostly needed to replace git references with exact versions,
# as NPM install will otherwise re-fetch these
if "dependencies" in package_json:
dependencies = package_json["dependencies"]
# dependencies can be a list or dict
for pname in dependencies:
if (
"bundledDependencies" in package_json
and pname in package_json["bundledDependencies"]
):
continue
if pname not in available_deps:
print(
f"WARNING: Dependency {pname} wanted but not available. Ignoring.",
file=sys.stderr,
)
continue
version = "unknown" if isinstance(dependencies, list) else dependencies[pname]
if available_deps[pname] != version:
version = available_deps[pname]
changed = True
print(
f"package.json: Pinning version '{version}' to '{available_deps[pname]}'"
f" for dependency '{pname}'",
file=sys.stderr,
)
# write changes to package.json
if changed:
with open("package.json", "w") as f:
json.dump(package_json, f, indent=2)

View File

@ -0,0 +1,220 @@
import json
import os
import pathlib
import shutil
import subprocess as sp
import sys
pname = os.environ.get("packageName")
version = os.environ.get("version")
bin_dir = f"{os.path.abspath('..')}/.bin"
root = f"{os.path.abspath('.')}/node_modules"
package_json_cache = {}
with open(os.environ.get("nodeDepsPath")) as f:
nodeDeps = f.read().split()
def get_package_json(path):
if path not in package_json_cache:
if not os.path.isfile(f"{path}/package.json"):
return None
with open(f"{path}/package.json", encoding="utf-8-sig") as f:
package_json_cache[path] = json.load(f)
return package_json_cache[path]
def install_direct_dependencies():
if not os.path.isdir(root):
os.mkdir(root)
with open(os.environ.get("nodeDepsPath")) as f:
deps = f.read().split()
for dep in deps:
if os.path.isdir(f"{dep}/lib/node_modules"):
for module in os.listdir(f"{dep}/lib/node_modules"):
# ignore hidden directories
if module[0] == ".":
continue
if module[0] == "@":
for submodule in os.listdir(f"{dep}/lib/node_modules/{module}"):
pathlib.Path(f"{root}/{module}").mkdir(exist_ok=True)
print(f"installing: {module}/{submodule}")
origin = os.path.realpath(
f"{dep}/lib/node_modules/{module}/{submodule}"
)
if not os.path.exists(f"{root}/{module}/{submodule}"):
os.symlink(origin, f"{root}/{module}/{submodule}")
else:
print(f"installing: {module}")
origin = os.path.realpath(f"{dep}/lib/node_modules/{module}")
if not os.path.isdir(f"{root}/{module}"):
os.symlink(origin, f"{root}/{module}")
else:
print(f"already exists: {root}/{module}")
def collect_dependencies(root, depth):
if not os.path.isdir(root):
return []
dirs = os.listdir(root)
currentDeps = []
for d in dirs:
if d.rpartition("/")[-1].startswith("@"):
subdirs = os.listdir(f"{root}/{d}")
for sd in subdirs:
cur_dir = f"{root}/{d}/{sd}"
currentDeps.append(f"{cur_dir}")
else:
cur_dir = f"{root}/{d}"
currentDeps.append(cur_dir)
if depth == 0:
return currentDeps
else:
depsOfDeps = map(
lambda dep: collect_dependencies(f"{dep}/node_modules", depth - 1),
currentDeps,
)
result = []
for deps in depsOfDeps:
result += deps
return result
def symlink_sub_dependencies():
for dep in collect_dependencies(root, 1):
# compute module path
d1, d2 = dep.split("/")[-2:]
if d1.startswith("@"):
path = f"{root}/{d1}/{d2}"
else:
path = f"{root}/{d2}"
# check for collision
if os.path.isdir(path):
continue
# create parent dir
pathlib.Path(os.path.dirname(path)).mkdir(parents=True, exist_ok=True)
# symlink dependency
os.symlink(os.path.realpath(dep), path)
# create symlinks for executables (bin entries from package.json)
def symlink_bin(bin_dir, package_location, package_json, force=False):
if package_json and "bin" in package_json and package_json["bin"]:
bin = package_json["bin"]
def link(name, relpath):
source = f"{bin_dir}/{name}"
sourceDir = os.path.dirname(source)
# create parent dir
pathlib.Path(sourceDir).mkdir(parents=True, exist_ok=True)
dest = os.path.relpath(f"{package_location}/{relpath}", sourceDir)
print(f"symlinking executable. dest: {dest}; source: {source}")
if force and os.path.lexists(source):
os.remove(source)
if not os.path.lexists(source):
os.symlink(dest, source)
if isinstance(bin, str):
name = package_json["name"].split("/")[-1]
link(name, bin)
else:
for name, relpath in bin.items():
link(name, relpath)
# checks if dependency is already installed in the current or parent dir.
def dependency_satisfied(root, pname, version):
if root == "/":
return False
parent = os.path.dirname(root)
if os.path.isdir(f"{root}/{pname}"):
package_json_file = f"{root}/{pname}/package.json"
if os.path.isfile(package_json_file):
if version == get_package_json(f"{root}/{pname}").get("version"):
return True
return dependency_satisfied(parent, pname, version)
# transforms symlinked dependencies into real copies
def symlinks_to_copies(node_modules):
sp.run(f"chmod +wx {node_modules}".split())
for dep in collect_dependencies(node_modules, 0):
# only handle symlinks to directories
if not os.path.islink(dep) or os.path.isfile(dep):
continue
d1, d2 = dep.split("/")[-2:]
if d1[0] == "@":
pname = f"{d1}/{d2}"
sp.run(f"chmod +wx {node_modules}/{d1}".split())
else:
pname = d2
package_json = get_package_json(dep)
if package_json is not None:
version = package_json["version"]
if dependency_satisfied(os.path.dirname(node_modules), pname, version):
os.remove(dep)
continue
print(f"copying {dep}")
os.rename(dep, f"{dep}.bac")
os.mkdir(dep)
contents = os.listdir(f"{dep}.bac")
if contents != []:
for node in contents:
if os.path.isdir(f"{dep}.bac/{node}"):
shutil.copytree(f"{dep}.bac/{node}", f"{dep}/{node}", symlinks=True)
if os.path.isdir(f"{dep}/node_modules"):
symlinks_to_copies(f"{dep}/node_modules")
else:
shutil.copy(f"{dep}.bac/{node}", f"{dep}/{node}")
os.remove(f"{dep}.bac")
symlink_bin(f"{bin_dir}", dep, package_json)
def symlink_direct_bins():
deps = []
package_json_file = get_package_json(f"{os.path.abspath('.')}")
if package_json_file:
if (
"devDependencies" in package_json_file
and package_json_file["devDependencies"]
):
for dep, _ in package_json_file["devDependencies"].items():
deps.append(dep)
if "dependencies" in package_json_file and package_json_file["dependencies"]:
for dep, _ in package_json_file["dependencies"].items():
deps.append(dep)
for name in deps:
package_location = f"{root}/{name}"
package_json = get_package_json(package_location)
symlink_bin(f"{bin_dir}", package_location, package_json, force=True)
# install direct deps
install_direct_dependencies()
# symlink non-colliding deps
symlink_sub_dependencies()
# symlinks to copies
if os.environ.get("installMethod") == "copy":
symlinks_to_copies(root)
# symlink direct deps bins
symlink_direct_bins()

View File

@ -0,0 +1,31 @@
{
stdenv,
# this function needs the following arguments via env
# packageName,
# nodeModules,
}: ''
runHook preInstall
mkdir -p $out/lib
cp -r $nodeModules $out/lib/node_modules
nodeModules=$out/lib/node_modules
cd "$nodeModules/$packageName"
echo "Symlinking bin entries from package.json"
python $linkBins
echo "Symlinking manual pages"
if [ -d "$nodeModules/$packageName/man" ]
then
mkdir -p $out/share
for dir in "$nodeModules/$packageName/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
runHook postInstall
''

View File

@ -0,0 +1,47 @@
{
config,
lib,
dream2nix,
packageSets,
...
}: let
l = lib // builtins;
t = l.types;
in {
options.nodejs-granular = l.mapAttrs (_: l.mkOption) {
buildScript = {
type = t.nullOr (t.oneOf [t.str t.path t.package]);
description = ''
A command or script to execute instead of `npm run build`.
Is only executed if `runBuild = true`.
'';
};
installMethod = {
type = t.enum [
"symlink"
"copy"
];
description = ''
Strategy to use for populating ./node_modules.
Symlinking is quicker, but often introduces compatibility issues with bundlers like webpack and other build tools.
Copying is slow, but more reliable;
'';
};
runBuild = {
type = t.bool;
description = ''
Whether to run a package's build script (aka. `npm run build`)
'';
};
nodejsDeps = {
type = t.attrsOf (t.attrsOf (t.submodule {
imports = [
dream2nix.modules.drv-parts.core
dream2nix.modules.drv-parts.mkDerivation
./interface.nix
];
_module.args = {inherit dream2nix packageSets;};
}));
};
};
}

View File

@ -0,0 +1,42 @@
import json
import os
import pathlib
with open("package.json", encoding="utf-8-sig") as f:
package_json = json.load(f)
out = os.environ.get("out")
# create symlinks for executables (bin entries from package.json)
def symlink_bin(bin_dir, package_json):
if "bin" in package_json and package_json["bin"]:
bin = package_json["bin"]
def link(name, relpath):
source = f"{bin_dir}/{name}"
sourceDir = os.path.dirname(source)
# make target executable
os.chmod(relpath, 0o777)
# create parent dir
pathlib.Path(sourceDir).mkdir(parents=True, exist_ok=True)
dest = os.path.relpath(relpath, sourceDir)
print(f"symlinking executable. dest: {dest}; source: {source}")
# if a bin with this name exists, overwrite
if os.path.lexists(source):
os.remove(source)
os.symlink(dest, source)
if isinstance(bin, str):
name = package_json["name"].split("/")[-1]
link(name, bin)
else:
for name, relpath in bin.items():
link(name, relpath)
# symlink current packages executables to $nodeModules/.bin
symlink_bin(f"{out}/lib/node_modules/.bin/", package_json)
# symlink current packages executables to $out/bin
symlink_bin(f"{out}/bin/", package_json)

View File

@ -0,0 +1,46 @@
{}: let
in ''
runHook preUnpack
nodeModules=$(realpath ./package)
export sourceRoot="$nodeModules/$packageName"
# sometimes tarballs do not end with .tar.??
unpackFallback(){
local fn="$1"
tar xf "$fn"
}
unpackCmdHooks+=(unpackFallback)
unpackFile $src
# Make the base dir in which the target dependency resides in first
mkdir -p "$(dirname "$sourceRoot")"
# install source
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w -- "$packageDir"
# Move the extracted tarball into the output folder
mv -- "$packageDir" "$sourceRoot"
elif [ -d "$src" ]
then
strippedName="$(stripHash $src)"
# Restore write permissions
chmod -R u+w -- "$strippedName"
# Move the extracted directory into the output folder
mv -- "$strippedName" "$sourceRoot"
fi
runHook postUnpack
''

View File

@ -0,0 +1,70 @@
{
config,
lib,
...
}: let
l = lib // builtins;
cfg = config.nodejs-package-json;
writers = import ../../../pkgs/writers {
inherit lib;
inherit
(config.deps)
bash
coreutils
gawk
path
writeScript
writeScriptBin
;
};
npmArgs = l.concatStringsSep " " (map (arg: "'${arg}'") cfg.npmArgs);
in {
imports = [
./interface.nix
../nodejs-package-lock
../lock
];
config = {
deps = {nixpkgs, ...}:
l.mapAttrs (_: l.mkDefault) {
inherit
(nixpkgs)
bash
coreutils
gawk
path
writeScript
writeScriptBin
;
inherit (nixpkgs.nodePackages) npm;
};
lock.fields.package-lock.script =
writers.writePureShellScript
(with config.deps; [
coreutils
npm
])
''
source=${cfg.source}
pushd $TMPDIR
cp -r $source/* ./
chmod -R +w ./
rm -f package-lock.json
npm install --package-lock-only ${npmArgs}
mv package-lock.json $out
popd
'';
nodejs-package-lock = {
packageLockFile = null;
packageLock = l.mkForce config.lock.content.package-lock;
};
};
}

View File

@ -0,0 +1,24 @@
{
config,
lib,
...
}: let
l = lib // builtins;
t = l.types;
in {
imports = [
../nodejs-package-lock/interface.nix
];
options.nodejs-package-json = l.mapAttrs (_: l.mkOption) {
source = {
type = t.either t.path t.package;
description = "Source of the package";
default = config.mkDerivation.src;
};
npmArgs = {
type = t.listOf t.str;
description = "extra arguments to pass to 'npm install'";
default = [];
};
};
}

View File

@ -0,0 +1,53 @@
{
config,
lib,
...
}: let
l = lib // builtins;
cfg = config.nodejs-package-lock;
dreamLockUtils = import ../../../lib/internal/dreamLockUtils.nix {inherit lib;};
nodejsUtils = import ../../../lib/internal/nodejsUtils.nix {inherit lib parseSpdxId;};
parseSpdxId = import ../../../lib/internal/parseSpdxId.nix {inherit lib;};
prepareSourceTree = import ../../../lib/internal/prepareSourceTree.nix {inherit lib;};
simpleTranslate = import ../../../lib/internal/simpleTranslate.nix {inherit lib;};
translate = import ./translate.nix {
inherit lib dreamLockUtils nodejsUtils parseSpdxId simpleTranslate;
};
dreamLock = translate {
projectName = config.name;
projectRelPath = "";
workspaces = [];
workspaceParent = "";
source = cfg.src;
tree = prepareSourceTree {source = cfg.source;};
noDev = ! cfg.withDevDependencies;
nodejs = "unknown";
inherit (cfg) packageJson packageLock;
};
in {
imports = [
./interface.nix
];
# declare external dependencies
deps = {nixpkgs, ...}: {
inherit
(nixpkgs)
fetchgit
fetchurl
nix
runCommandLocal
;
};
nodejs-package-lock = {
inherit dreamLock;
packageJson = l.fromJSON (l.readFile cfg.packageJsonFile);
packageLock =
if cfg.packageLockFile != null
then l.fromJSON (l.readFile cfg.packageLockFile)
else lib.mkDefault {};
};
}

View File

@ -0,0 +1,60 @@
{
config,
options,
lib,
...
}: let
l = lib // builtins;
t = l.types;
cfg = config.nodejs-package-lock;
in {
options.nodejs-package-lock = l.mapAttrs (_: l.mkOption) {
dreamLock = {
type = t.attrs;
internal = true;
description = "The content of the dream2nix generated lock file";
};
packageJsonFile = {
type = t.path;
description = ''
The package.json file to use.
'';
default = cfg.source + /package.json;
};
packageJson = {
type = t.attrs;
description = "The content of the package.json";
};
packageLockFile = {
type = t.nullOr t.path;
description = ''
The package.json file to use.
'';
default = cfg.source + /package-lock.json;
};
packageLock = {
type = t.attrs;
description = "The content of the package-lock.json";
};
source = {
type = t.either t.path t.package;
description = "Source of the package";
default = config.mkDerivation.src;
};
withDevDependencies = {
type = t.bool;
default = true;
description = ''
Whether to include development dependencies.
Usually it's a bad idea to disable this, as development dependencies can contain important build time dependencies.
'';
};
workspaces = {
type = t.listOf t.str;
description = ''
Workspaces to include.
Defaults to the ones defined in package.json.
'';
};
};
}

View File

@ -0,0 +1,269 @@
{
lib,
nodejsUtils,
dreamLockUtils,
simpleTranslate,
...
}: let
l = lib // builtins;
getPackageLockPath = tree: workspaceParent: let
parent = workspaceParent;
node = tree.getNodeFromPath parent;
in
if node.files ? "npm-shrinkwrap.json"
then "npm-shrinkwrap.json"
else "package-lock.json";
translate = {
projectName,
projectRelPath,
workspaces ? [],
workspaceParent ? projectRelPath,
source,
tree,
# translator args
noDev,
nodejs,
packageLock,
packageJson,
...
} @ args: let
b = builtins;
noDev = args.noDev;
name = projectName;
tree = args.tree.getNodeFromPath projectRelPath;
relPath = projectRelPath;
source = "${args.source}/${relPath}";
packageVersion = packageJson.version or "unknown";
packageLockDeps =
if packageLock.lockfileVersion < 3
then packageLock.dependencies or {}
else
throw ''
package-lock.json files with version greater than 2 are not supported.
'';
rootDependencies = packageLockDeps;
parsedDependencies = packageLockDeps;
identifyGitSource = dependencyObject:
# TODO: when integrity is there, and git url is github then use tarball instead
# ! (dependencyObject ? integrity) &&
nodejsUtils.identifyGitUrl dependencyObject.version;
getVersion = dependencyObject: let
# example: "version": "npm:@tailwindcss/postcss7-compat@2.2.4",
npmMatch = b.match ''^npm:.*@(.*)$'' dependencyObject.version;
in
if npmMatch != null
then b.elemAt npmMatch 0
else if identifyGitSource dependencyObject
then "0.0.0-rc.${b.substring 0 8 (nodejsUtils.parseGitUrl dependencyObject.version).rev}"
else if lib.hasPrefix "file:" dependencyObject.version
then let
path = getPath dependencyObject;
in
if ! (l.pathExists "${source}/${path}/package.json")
then
throw ''
The lock file references a sub-package residing at '${source}/${path}',
but that directory doesn't exist or doesn't contain a package.json
The reason might be that devDependencies are not included in this package release.
Possible solutions:
- get full package source via git and translate from there
- disable devDependencies by passing `noDev` to the translator
''
else
(
b.fromJSON
(b.readFile "${source}/${path}/package.json")
)
.version
else if lib.hasPrefix "https://" dependencyObject.version
then "unknown"
else dependencyObject.version;
getPath = dependencyObject:
lib.removePrefix "file:" dependencyObject.version;
pinVersions = dependencies: parentScopeDeps:
lib.mapAttrs
(
pname: pdata: let
selfScopeDeps = parentScopeDeps // dependencies;
requires = pdata.requires or {};
dependencies = pdata.dependencies or {};
# this was required to in order to fix .#resolveImpure for this projet:
# https://gitlab.com/Shinobi-Systems/Shinobi/-/commit/a2faa40ab0e9952ff6a7fcf682534171614180c1
filteredRequires =
l.filterAttrs
(name: spec:
if selfScopeDeps ? ${name}
then true
else
l.trace
''
WARNING: could not find dependency ${name} in ${getPackageLockPath args.tree workspaceParent}
This might be expected for bundled dependencies of sub-dependencies.
''
false)
requires;
in
pdata
// {
depsExact =
lib.forEach
(lib.attrNames filteredRequires)
(reqName: {
name = reqName;
version = getVersion selfScopeDeps."${reqName}";
});
dependencies = pinVersions dependencies selfScopeDeps;
}
)
dependencies;
pinnedRootDeps =
pinVersions rootDependencies rootDependencies;
createMissingSource = name: version: {
type = "http";
url = "https://registry.npmjs.org/${name}/-/${name}-${version}.tgz";
};
in
simpleTranslate
({
getDepByNameVer,
dependenciesByOriginalID,
...
}: rec {
translatorName = name;
location = relPath;
# values
inputData = pinnedRootDeps;
defaultPackage = projectName;
packages =
{"${defaultPackage}" = packageVersion;}
// (nodejsUtils.getWorkspacePackages tree workspaces);
mainPackageDependencies =
lib.mapAttrsToList
(pname: pdata: {
name = pname;
version = getVersion pdata;
})
(lib.filterAttrs
(pname: pdata: ! (pdata.dev or false) || ! noDev)
parsedDependencies);
subsystemName = "nodejs";
subsystemAttrs = {
nodejsVersion = b.toString args.nodejs;
meta = nodejsUtils.getMetaFromPackageJson packageJson;
};
# functions
serializePackages = inputData: let
serialize = inputData:
lib.mapAttrsToList # returns list of lists
(pname: pdata:
[
(pdata
// {
inherit pname;
depsExact =
lib.filter
(req: (! (pdata.dependencies."${req.name}".bundled or false)))
pdata.depsExact or {};
})
]
++ (lib.optionals (pdata ? dependencies)
(lib.flatten
(serialize
(lib.filterAttrs
(pname: data: ! data.bundled or false)
pdata.dependencies)))))
inputData;
in
lib.filter
(pdata:
! noDev || ! (pdata.dev or false))
(lib.flatten (serialize inputData));
getName = dependencyObject: dependencyObject.pname;
inherit getVersion;
getSourceType = dependencyObject:
if identifyGitSource dependencyObject
then "git"
else if
(lib.hasPrefix "file:" dependencyObject.version)
|| (
(! lib.hasPrefix "https://" dependencyObject.version)
&& (! dependencyObject ? resolved)
)
then "path"
else "http";
sourceConstructors = {
git = dependencyObject:
nodejsUtils.parseGitUrl dependencyObject.version;
http = dependencyObject:
if lib.hasPrefix "https://" dependencyObject.version
then rec {
version = getVersion dependencyObject;
url = dependencyObject.version;
hash = dependencyObject.integrity;
}
else if dependencyObject.resolved == false
then
(createMissingSource
(getName dependencyObject)
(getVersion dependencyObject))
// {
hash = dependencyObject.integrity;
}
else rec {
url = dependencyObject.resolved;
hash = dependencyObject.integrity;
};
path = dependencyObject:
# in case of an entry with missing resolved field
if ! lib.hasPrefix "file:" dependencyObject.version
then
dreamLockUtils.mkPathSource {
path = let
module = l.elemAt (l.splitString "/" dependencyObject.pname) 0;
in "node_modules/${module}";
rootName = projectName;
rootVersion = packageVersion;
}
# in case of a "file:" entry
else
dreamLockUtils.mkPathSource {
path = getPath dependencyObject;
rootName = projectName;
rootVersion = packageVersion;
};
};
getDependencies = dependencyObject:
dependencyObject.depsExact;
});
in
translate

View File

@ -0,0 +1,37 @@
{
lib,
config,
drv-parts,
...
}: let
l = lib // builtins;
in {
imports = [
drv-parts.modules.drv-parts.mkDerivation
../../drv-parts/dream2nix-legacy
];
dream2nix-legacy = {
subsystem = "nodejs";
translator = "yarn-lock";
builder = "granular-nodejs";
subsystemInfo = {
nodejs = "16";
noDev = false;
};
source = config.deps.fetchFromGitHub {
owner = "prettier";
repo = "prettier";
rev = config.version;
sha256 = "sha256-gHFzUjTHsEcxTJtFflqSOCthKW4Wa+ypuTeGxodmh0o=";
};
};
deps = {nixpkgs, ...}: {
inherit (nixpkgs) fetchFromGitHub;
inherit (nixpkgs) stdenv;
};
name = l.mkForce "prettier";
version = l.mkForce "2.8.7";
}

View File

@ -0,0 +1,32 @@
{
lib,
config,
...
}: let
l = lib // builtins;
in {
imports = [
../../drv-parts/nodejs-package-json
../../drv-parts/nodejs-granular
];
mkDerivation = {
src = config.deps.fetchFromGitHub {
owner = "prettier";
repo = "prettier";
rev = config.version;
sha256 = "sha256-gHFzUjTHsEcxTJtFflqSOCthKW4Wa+ypuTeGxodmh0o=";
};
};
deps = {nixpkgs, ...}: {
inherit
(nixpkgs)
fetchFromGitHub
stdenv
;
};
name = l.mkForce "prettier";
version = l.mkForce "2.8.7";
}

View File

@ -1,35 +1,30 @@
{
lib,
config,
drv-parts,
...
}: let
l = lib // builtins;
in {
imports = [
drv-parts.modules.drv-parts.mkDerivation
../../drv-parts/dream2nix-legacy
../../drv-parts/nodejs-package-lock
../../drv-parts/nodejs-granular
];
dream2nix-legacy = {
subsystem = "nodejs";
translator = "yarn-lock";
builder = "granular-nodejs";
subsystemInfo = {
nodejs = "16";
noDev = false;
};
source = config.deps.fetchFromGitHub {
owner = "prettier";
mkDerivation = {
src = config.deps.fetchFromGitHub {
owner = "davhau";
repo = "prettier";
rev = config.version;
sha256 = "sha256-gHFzUjTHsEcxTJtFflqSOCthKW4Wa+ypuTeGxodmh0o=";
rev = "2.8.7-package-lock";
sha256 = "sha256-zo+WRV3VHja8/noC+iPydtbte93s5GGc3cYaQgNhlEY=";
};
};
deps = {nixpkgs, ...}: {
inherit (nixpkgs) fetchFromGitHub;
inherit (nixpkgs) stdenv;
inherit
(nixpkgs)
fetchFromGitHub
stdenv
;
};
name = l.mkForce "prettier";