nodejs: init new builder

This commit is contained in:
hsjobeki 2022-12-28 13:47:35 +01:00
parent 40eb491b8a
commit ef809889ef
16 changed files with 971 additions and 6 deletions

1
.gitignore vendored
View File

@ -4,3 +4,4 @@
/.pre-commit-config.yaml
result
interpreter
__pycache__

View File

@ -0,0 +1,341 @@
{
pkgs,
lib,
...
}: {
type = "pure";
build = {
### FUNCTIONS
# AttrSet -> Bool -> AttrSet -> [x]
getCyclicDependencies, # name: version: -> [ {name=; version=; } ]
getDependencies, # name: version: -> [ {name=; version=; } ]
# function that returns a nix-store-path, where a single dependency from the lockfile has been fetched to.
getSource, # name: version: -> store-path
# to get information about the original source spec
getSourceSpec, # name: version: -> {type="git"; url=""; hash="";}
### ATTRIBUTES
subsystemAttrs, # attrset
defaultPackageName, # string
defaultPackageVersion, # string
# all exported (top-level) package names and versions
# attrset of pname -> version,
packages,
# all existing package names and versions
# attrset of pname -> versions,
# where versions is a list of version strings
packageVersions,
# function which applies overrides to a package
# It must be applied by the builder to each individual derivation
# Example:
# produceDerivation name (mkDerivation {...})
produceDerivation,
...
}: let
l = lib // builtins;
b = builtins;
inherit (pkgs) stdenv python3 python310Packages makeWrapper jq;
nodejsVersion = subsystemAttrs.nodejsVersion;
defaultNodejsVersion = "14";
isMainPackage = name: version:
(packages."${name}" or null) == version;
nodejs =
if !(l.isString nodejsVersion)
then pkgs."nodejs-${defaultNodejsVersion}_x"
else
pkgs."nodejs-${nodejsVersion}_x"
or (throw "Could not find nodejs version '${nodejsVersion}' in pkgs");
# e.g.
# {
# "@babel/core": ["1.0.0","2.0.0"]
# ...
# }
# is mapped to
# allPackages = {
# "@babel/core": {"1.0.0": pkg-derivation, "2.0.0": pkg-derivation }
# ...
# }
allPackages =
lib.mapAttrs
(
name: versions:
# genAttrs takes ["1.0.0, 2.0.0"] returns -> {"1.0.0": makePackage name version}
# makePackage: produceDerivation: name name (stdenv.mkDerivation {...})
# returns {"1.0.0": pkg-derivation, "2.0.0": pkg-derivation }
lib.genAttrs
versions
(version: (mkNodeModule name version))
)
packageVersions;
# our builder, written in python. We have huge complexity with how npm builds node_modules
nodejsBuilder = python310Packages.buildPythonApplication {
pname = "builder";
version = "0.1.0";
src = ./nodejs_builder;
format = "pyproject";
nativeBuildInputs = with python310Packages; [poetry mypy flake8 black semantic-version];
propagatedBuildInputs = with python310Packages; [node-semver];
doCheck = false;
meta = {
description = "Custom builder";
};
};
mkNodeModule = name: version: let
pname = lib.replaceStrings ["@" "/"] ["__at__" "__slash__"] (name + "@" + version);
deps = getDependencies name version;
resolveChildren = {
name, #a
version, #1.1.2
rootVersions,
# {
# "packageNameA": "1.0.0",
# "packageNameB": "2.0.0"
# }
}: let
directDeps = getDependencies name version;
installLocally = name: version: !(rootVersions ? ${name}) || (rootVersions.${name} != version);
locallyRequiredDeps = b.filter (d: installLocally d.name d.version) directDeps;
localDepsAttrs = b.listToAttrs (l.map (dep: l.nameValuePair dep.name dep.version) locallyRequiredDeps);
newRootVersions = rootVersions // localDepsAttrs;
localDeps =
l.mapAttrs
(
name: version: {
inherit version;
dependencies = resolveChildren {
inherit name version;
rootVersions = newRootVersions;
};
}
)
localDepsAttrs;
in
localDeps;
pickVersion = name: versions: directDepsAttrs.${name} or (l.head (l.sort (a: b: l.compareVersions a b == 1) versions));
packageVersions' = l.mapAttrs (n: v: l.unique v) packageVersions;
rootPackages = l.mapAttrs (name: versions: pickVersion name versions) packageVersions';
directDeps = getDependencies name version;
directDepsAttrs = l.listToAttrs (b.map (dep: l.nameValuePair dep.name dep.version) directDeps);
nodeModulesTree =
l.mapAttrs (
name: version: let
dependencies = resolveChildren {
inherit name version;
rootVersions = rootPackages;
};
in {
inherit version dependencies;
}
)
(l.filterAttrs (n: v: n != name) rootPackages);
nmTreeJSON = b.toJSON nodeModulesTree;
depsTree = let
getDeps = deps: (b.foldl'
(
deps: dep:
deps
// {
${dep.name} =
(deps.${dep.name} or {})
// {
${dep.version} =
(deps.${dep.name}.${dep.version} or {})
// {
deps = getDeps (getDependencies dep.name dep.version);
derivation = allPackages.${dep.name}.${dep.version}.lib;
};
};
}
)
{}
deps);
in (getDeps deps);
depsTreeJSON = b.toJSON depsTree;
src = getSource name version;
pkg = produceDerivation name (
stdenv.mkDerivation
{
inherit nmTreeJSON depsTreeJSON;
passAsFile = ["nmTreeJSON" "depsTreeJSON"];
inherit pname version src;
nativeBuildInputs = [makeWrapper];
buildInputs = [jq nodejs python3];
outputs = ["out" "lib" "deps"];
inherit (pkgs) system;
packageName = pname;
name = pname;
installMethod =
if isMainPackage name version
then "copy"
else "symlink";
unpackCmd =
if lib.hasSuffix ".tgz" src
then "tar --delay-directory-restore -xf $src"
else null;
preConfigurePhases = ["d2nPatchPhase" "d2nCheckPhase"];
unpackPhase = import ./unpackPhase.nix {};
# nodejs expects HOME to be set
d2nPatchPhase = ''
export HOME=$TMPDIR
'';
# pre-checks:
# - platform compatibility (os + arch must match)
d2nCheckPhase = ''
# exit code 3 -> the package is incompatible to the current platform
# -> Let the build succeed, but don't create node_modules
${nodejsBuilder}/bin/d2nCheck \
|| \
if [ "$?" == "3" ]; then
mkdir -p $out
mkdir -p $lib
mkdir -p $deps
echo "Not compatible with system $system" > $lib/error
exit 0
else
exit 1
fi
'';
# create the node_modules folder
# - uses symlinks as default
# - symlink the .bin
# - add PATH to .bin
configurePhase = ''
runHook preConfigure
${nodejsBuilder}/bin/d2nNodeModules
export PATH="$PATH:node_modules/.bin"
runHook postConfigure
'';
# only build the main package
# deps only get unpacked, installed, patched, etc
dontBuild = ! (isMainPackage name version);
isMain = isMainPackage name version;
# Build:
# npm run build
# custom build commands for:
# - electron apps
# fallback to npm lifecycle hooks, if no build script is present
buildPhase = ''
runHook preBuild
if [ "$(jq '.scripts.build' ./package.json)" != "null" ];
then
echo "running npm run build...."
npm run build
fi
runHook postBuild
'';
# copy node_modules
# - symlink .bin
# - symlink manual pages
# - dream2nix copies node_modules folder if it is the top-level package
installPhase = ''
runHook preInstall
# remove the symlink (node_modules -> /build/node_modules)
rm node_modules || true
if [ -n "$isMain" ];
then
echo ----------------------------- copying node_modules into root package---------------------
# mkdir -p $out/node_modules
# cp -r /build/node_modules $out
# cp ./package-lock.json $out/node_modules/.package-lock.json || true
else
if [ "$(jq '.scripts.preinstall' ./package.json)" != "null" ]; then
npm --production --offline --nodedir=$nodeSources run preinstall
fi
if [ "$(jq '.scripts.install' ./package.json)" != "null" ]; then
npm --production --offline --nodedir=$nodeSources run install
fi
if [ "$(jq '.scripts.postinstall' ./package.json)" != "null" ]; then
npm --production --offline --nodedir=$nodeSources run postinstall
fi
fi
### TODO:
# $out
# - $out/lib/pkg-content -> $lib ...(extracted tgz)
# - $out/lib/node_modules -> $deps
# - $out/bin
# $deps
# - $deps/node_modules
# $lib
# - pkg-content (extracted + install scripts runned)
# copy everything to $out
cp -r . $lib
mkdir -p $deps/node_modules
mkdir -p $out/bin
mkdir -p $out/lib
ln -s $lib $out/lib/pkg-content
ln -s $deps/node_modules $out/lib/node_modules
runHook postInstall
'';
}
);
in
pkg;
mainPackages =
b.foldl'
(ps: p: ps // p)
{}
(lib.mapAttrsToList
(name: version: {
"${name}"."${version}" = allPackages."${name}"."${version}";
})
packages);
in {
packages = mainPackages;
};
}

View File

@ -0,0 +1,2 @@
[flake8]
max-line-length = 88

View File

@ -0,0 +1,5 @@
[mypy]
python_version = 3.10
check_untyped_defs = True
ignore_missing_imports = True
strict_optional = True

View File

@ -0,0 +1,89 @@
import platform as p
import sys
from typing import Any, Literal, Union
from .logger import logger
from .package import get_package_json
NodeArch = Literal[
"x32",
"x64",
"arm",
"arm64",
"s390",
"s390x",
"mipsel",
"ia32",
"mips",
"ppc",
"ppc64",
]
# a map containing some 'uname' mappings into the node 'process.arch' values.
arch_map: dict[str, NodeArch] = {
"i386": "x32",
"i686": "x32",
"x86_64": "x64",
"aarch64_be": "arm64",
"aarch64": "arm64",
"armv8b": "arm64",
"armv8l": "arm64",
"mips64": "mips",
"ppcle": "ppc64",
}
def check_platform() -> bool:
platform: str = sys.platform # 'linux','darwin',...
arch: str = p.machine()
package_json = get_package_json()
# try to translate the value into some known node cpu.
# returns the unparsed string, as fallback as the arch_map is not complete.
node_arch: Union[NodeArch, str] = arch_map.get(arch, arch)
is_compatible = True
if package_json and (
not _is_os_supported(package_json, platform)
or not _is_arch_supported(package_json, node_arch)
):
logger.info(
f"\
Package is not compatible with current platform '{platform}' or cpu '{node_arch}'"
)
is_compatible = False
return is_compatible
def _is_arch_supported(package_json: dict[str, Any], arch: str) -> bool:
"""
Checks whether the current cpu architecture is supported
Reads the package.json, npm states:
architecture can be declared cpu=["x64"] as supported
Or be excluded with '!' -> cpu=["!arm"]
"""
if "cpu" in package_json:
supports = package_json["cpu"]
if arch not in supports or f"!{arch}" in supports:
return False
# return true by default
# because not every project defines 'cpu' in package.json
return True
def _is_os_supported(package_json: dict[str, Any], platform: str) -> bool:
"""
Checks whether the current system is supported
Reads the package.json, npm states:
Systems can be declared os=["linux"] as supported
Or be excluded with '!' -> os=["!linux"]
"""
if "os" in package_json:
supports = package_json["os"]
if platform not in supports or f"!{platform}" in supports:
return False
# return true by default
# because not every project defines 'os' in package.json
return True

View File

@ -0,0 +1,5 @@
from pathlib import Path
root = Path("/build")
node_modules = root / Path("node_modules")
bin_dir = node_modules / Path(".bin")

View File

@ -0,0 +1,135 @@
from dataclasses import dataclass
from typing import Any, Callable, Literal, Optional, TypedDict, Union
from .logger import logger
@dataclass
class Dependency:
name: str
version: str
derivation: str
parent: Union["Dependency", None] = None
dependencies: Union[dict[str, Any], None] = None
def repr(self: "Dependency") -> str:
return f"{self.name}@{self.version}"
def get_all_deps(all_deps: dict[str, Any], name: str, version: str) -> list[str]:
"""
Returns all dependencies. as flattened list
"""
def is_found(acc: Any, dep: Dependency, dep_tree: Optional[DepsTree]) -> bool:
return not bool(acc)
def find_exact_dependency(
acc: Any, dep: Dependency, dep_tree: Optional[DepsTree]
) -> Any:
if acc:
return acc
if dep.repr() == f"{name}@{version}":
return dep_tree
return None
subtree = recurse_deps_tree(
all_deps, find_exact_dependency, acc=None, pred=is_found, order="top-down"
)
def flatten(acc: Any, dep: Dependency, dep_tree: Optional[DepsTree]) -> Any:
acc.append(dep.repr())
return acc
flattened: list[str] = []
if subtree:
flattened = recurse_deps_tree(subtree, flatten, [])
return flattened
class Meta(TypedDict):
derivation: str
deps: Optional[dict[str, dict[str, Any]]]
DepsTree = dict[str, dict[str, Meta]]
def recurse_deps_tree(
deps: DepsTree,
cb: Callable[[Any, Dependency, Optional[DepsTree]], Any],
acc: Any,
parent: Union[Dependency, None] = None,
order: Literal["bottom-up", "top-down"] = "bottom-up",
pred: Optional[Callable[[Any, Dependency, Optional[DepsTree]], bool]] = None,
):
"""
Generic function that traverses the dependency tree and calls
'cb' on every node in the tree
Parameters
----------
deps : DepsTree
The nested tree of dependencies, that will be iterated through.
cb : Callable[[Any, Dependency, Optional[DepsTree]], Any]
takes an accumulator (like 'fold' )
acc : Any
The initial value for the accumulator passed to 'cb'
parent : Dependency
The parent dependency, defaults to None, is set automatically during recursion
order : Literal["bottom-up", "top-down"]
The order in which the callback gets called: "bottom-up" or "top-down"
pred : Callable[[Any, Dependency, Optional[DepsTree]], bool]
Like 'cb' but returns a bool that will stop recursion if False
Returns
-------
acc
the last return value from 'cb: Callable'
"""
dependencies: list[Dependency] = []
for name, versions in deps.items():
for version, meta in versions.items():
nested_deps = meta["deps"]
derivation = meta["derivation"]
direct_dependency = Dependency(
name=name,
version=version,
derivation=derivation,
parent=parent,
dependencies=nested_deps,
)
dependencies.append(direct_dependency)
for dependency in dependencies:
if order == "top-down":
acc = cb(acc, dependency, dependency.dependencies)
if dependency.dependencies:
stop = False
if pred is not None:
stop = not pred(acc, dependency, dependency.dependencies)
if not stop:
acc = recurse_deps_tree(
dependency.dependencies,
cb,
acc=acc,
parent=dependency,
order=order,
)
else:
logger.debug(
f"stopped recursing the dependency tree at {dependency.repr()}\
-> because the predicate function returned 'False'"
)
return acc
if order == "bottom-up":
acc = cb(acc, dependency, dependency.dependencies)
return acc

View File

@ -0,0 +1,46 @@
import logging
class Formatter(logging.Formatter):
"""
Logging colored formatter,
adapted from https://stackoverflow.com/a/56944256/3638629
"""
grey = "\x1b[38;21m"
blue = "\x1b[38;5;39m"
yellow = "\x1b[38;5;226m"
red = "\x1b[38;5;196m"
bold_red = "\x1b[31;1m"
reset = "\x1b[0m"
def __init__(self, fmt):
super().__init__()
self.fmt = fmt
self.FORMATS = {
logging.DEBUG: self.grey + self.fmt + self.reset,
logging.INFO: self.blue + self.fmt + self.reset,
logging.WARNING: self.yellow + self.fmt + self.reset,
logging.ERROR: self.red + self.fmt + self.reset,
logging.CRITICAL: self.bold_red + self.fmt + self.reset,
}
def format(self, record):
log_fmt = self.FORMATS.get(record.levelno)
formatter = logging.Formatter(log_fmt)
return formatter.format(record)
# Create custom logger logging all five levels
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# Define format for logs
fmt = "%(levelname)8s | %(message)s"
# Create stdout handler for logging to the console (logs all five levels)
stdout_handler = logging.StreamHandler()
stdout_handler.setLevel(logging.DEBUG)
stdout_handler.setFormatter(Formatter(fmt))
logger.addHandler(stdout_handler)

View File

@ -0,0 +1,48 @@
import os
from enum import Enum
from typing import Any, Optional, TypedDict
from .logger import logger
env: dict[str, str] = os.environ.copy()
def is_main_package() -> bool:
"""Returns True or False depending on the 'isMain' env variable."""
return bool(get_env().get("isMain"))
def get_env() -> dict[str, Any]:
"""Returns a copy of alle the current env variables"""
return env
class SelfInfo(TypedDict):
name: str
version: str
def get_self() -> SelfInfo:
return {
"name": get_env().get("pname", "unknown"),
"version": get_env().get("version", "unknown"),
}
class InstallMethod(Enum):
copy = "copy"
symlink = "symlink"
def get_install_method() -> InstallMethod:
"""Returns the value of 'installMethod'"""
install_method: Optional[str] = get_env().get("installMethod")
try:
return InstallMethod(install_method)
except ValueError:
logger.error(
f"\
Unknown install method: '{install_method}'. Choose: \
{', '.join([ e.value for e in InstallMethod])}"
)
exit(1)

View File

@ -0,0 +1,119 @@
import os
import shutil
from pathlib import Path
from typing import Any, Optional, TypedDict
from .config import root
from .dependencies import Dependency, DepsTree, get_all_deps, recurse_deps_tree
from .logger import logger
from .module import InstallMethod, get_install_method, get_self
from .package import (NodeModulesPackage, NodeModulesTree, create_binary,
get_all_deps_tree, get_bins, get_node_modules_tree,
get_package_json)
def _create_package_from_derivation(
dep: Dependency,
path: Path,
):
node_modules = root / path
target = node_modules / Path(dep.name)
bin_dir = node_modules / Path(".bin")
target.parent.mkdir(parents=True, exist_ok=True)
install_method = get_install_method()
if not get_package_json(Path(dep.derivation)):
logger.debug(f"{dep.repr()} is not a package. Skipping installation")
return
# check if there is already the right package installed
if not get_package_json(target):
if install_method == InstallMethod.copy:
# shutil.copytree(dep.derivation, target)
target.mkdir(parents=True, exist_ok=True)
for entry in os.listdir(dep.derivation):
if (dep.derivation / Path(entry)).is_dir():
shutil.copytree(dep.derivation / Path(entry), target / Path(entry))
else:
shutil.copy(dep.derivation / Path(entry), target / Path(entry))
os.system(f"chmod -R +w {target}")
elif install_method == InstallMethod.symlink:
target.mkdir(parents=True, exist_ok=True)
for entry in os.listdir(dep.derivation):
(target / Path(entry)).symlink_to(dep.derivation / Path(entry))
binaries = get_bins(dep)
for name, rel_path in binaries.items():
create_binary(bin_dir / Path(name), Path(dep.name) / rel_path)
class Passthrough(TypedDict):
all_deps: dict[str, Dependency]
flat_deps: list[str]
def _make_folders_rec(
node_modules_tree: NodeModulesPackage,
passthrough: Passthrough,
path: Path = Path("node_modules"),
):
name: str
meta: NodeModulesTree
for name, meta in node_modules_tree.items():
version = meta["version"]
dependencies: Optional[NodeModulesPackage] = meta.get("dependencies", None)
found_dependency = passthrough["all_deps"].get(f"{name}@{version}")
if found_dependency:
source = found_dependency.derivation
dep = Dependency(
name,
version,
source,
dependencies=dependencies,
)
if dependencies:
_make_folders_rec(
node_modules_tree=dependencies,
passthrough=passthrough,
path=path / Path(f"{name}/node_modules"),
)
_create_package_from_derivation(
dep,
path,
)
else:
if f"{name}@{version}" in passthrough["flat_deps"]:
logger.critical(f"{name}@{version} required but not found")
def create_node_modules():
def collect_dependency(
acc: Any, dep: Dependency, dep_tree: Optional[DepsTree]
) -> Any:
identifier = dep.repr()
if identifier not in acc.keys():
acc[identifier] = dep
return acc
nm_tree = get_node_modules_tree()
all_deps = get_all_deps_tree()
collected: dict[str, Dependency] = recurse_deps_tree(
all_deps, collect_dependency, acc={}
)
root_pkg = get_self()
flat_deps: list[str] = get_all_deps(all_deps, root_pkg["name"], root_pkg["version"])
_make_folders_rec(
nm_tree,
passthrough={"all_deps": collected, "flat_deps": flat_deps},
)
os.symlink(root / Path("node_modules"), "node_modules")

View File

@ -0,0 +1,85 @@
import json
import os
from pathlib import Path
from typing import Any, Optional, TypedDict, Union
from .dependencies import Dependency, DepsTree
from .module import get_env
package_json_cache = {}
def get_package_json(path: Path = Path("")) -> Union[dict[str, Any], None]:
if path not in package_json_cache:
if not os.path.isfile(path / Path("package.json")):
# there is no package.json in the folder
return None
with open(f"{path}/package.json", encoding="utf-8-sig") as f:
package_json_cache[path] = json.load(f)
return package_json_cache[path]
def has_scripts(
package_json: dict[str, Any],
lifecycle_scripts: list[str] = [
"preinstall",
"install",
"postinstall",
],
):
return (
package_json
and package_json.get("scripts")
and (set(package_json.get("scripts", {}).keys()) & set(lifecycle_scripts))
)
def get_bins(dep: Dependency) -> dict[str, Path]:
package_json = get_package_json(Path(dep.derivation))
bins: dict[str, Path] = {}
if package_json and "bin" in package_json and package_json["bin"]:
binary = package_json["bin"]
if isinstance(binary, str):
name = package_json["name"].split("/")[-1]
bins[name] = Path(binary)
else:
for name, relpath in binary.items():
bins[name] = Path(relpath)
return bins
def create_binary(target: Path, source: Path):
target.parent.mkdir(parents=True, exist_ok=True)
if not os.path.lexists(target):
target.symlink_to(Path("..") / source)
def get_all_deps_tree() -> DepsTree:
deps = {}
dependenciesJsonPath = get_env().get("depsTreeJSONPath")
if dependenciesJsonPath:
with open(dependenciesJsonPath) as f:
deps = json.load(f)
return deps
class NodeModulesTree(TypedDict):
version: str
# mypy does not allow recursive types yet.
# The real type is:
# Optional[dict[str, NodeModulesPackage]]
dependencies: Optional[dict[str, Any]]
NodeModulesPackage = dict[str, NodeModulesTree]
def get_node_modules_tree() -> dict[str, Any]:
tree = {}
dependenciesJsonPath = get_env().get("nmTreeJSONPath")
if dependenciesJsonPath:
with open(dependenciesJsonPath) as f:
tree = json.load(f)
return tree

View File

@ -0,0 +1,24 @@
from .lib.checks import check_platform
from .lib.module import is_main_package
from .lib.node_modules import create_node_modules
from .lib.package import get_package_json, has_scripts
def check():
"""
Checks if the package can be installed.
- platform must have compatible: os + cpu
"""
if not check_platform():
exit(3)
def d2nNodeModules():
"""
generate the node_modules folder.
- on main packages
- on packages with scripts that could need them
"""
package_json = get_package_json()
if is_main_package() or has_scripts(package_json):
create_node_modules()

View File

@ -0,0 +1,22 @@
[tool.poetry]
name = "nodejs_builder"
version = "0.1.0"
description = ""
authors = ["hsjobeki <hsjobeki@gmail.com>"]
[tool.poetry.scripts]
d2nNodeModules = 'nodejs_builder.main:d2nNodeModules'
d2nCheck = 'nodejs_builder.main:check'
[tool.poetry.dependencies]
python = "^3.10"
semantic-version = "^2.9.0"
node-semver = "^0.8.1"
[tool.poetry.dev-dependencies]
mypy = "^0.991"
black = "flake8"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View File

@ -0,0 +1,48 @@
{}:
# TODO: upstream fix to nixpkgs
# example which requires this:
# https://registry.npmjs.org/react-window-infinite-loader/-/react-window-infinite-loader-1.0.7.tgz
''
runHook preUnpack
export sourceRoot="$packageName"
# sometimes tarballs do not end with .tar.??
unpackFallback(){
local fn="$1"
tar xf "$fn"
}
unpackCmdHooks+=(unpackFallback)
unpackFile $src
# Make the base dir in which the target dependency resides in first
mkdir -p "$(dirname "$sourceRoot")"
# install source
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w -- "$packageDir"
# Move the extracted tarball into the output folder
mv -- "$packageDir" "$sourceRoot"
elif [ -d "$src" ]
then
strippedName="$(stripHash $src)"
# Restore write permissions
chmod -R u+w -- "$strippedName"
# Move the extracted directory into the output folder
mv -- "$strippedName" "$sourceRoot"
fi
runHook postUnpack
''

View File

@ -51,12 +51,7 @@
rootDependencies = packageLockDeps;
packageJsonDeps = nodejsUtils.getPackageJsonDeps packageJson noDev;
parsedDependencies =
l.filterAttrs
(name: dep: packageJsonDeps ? "${name}")
packageLockDeps;
parsedDependencies = packageLockDeps;
identifyGitSource = dependencyObject:
# TODO: when integrity is there, and git url is github then use tarball instead