Merge pull request #6 from DavHau/dev

nodejs support + framework improvements
This commit is contained in:
DavHau 2021-09-22 15:20:37 +01:00 committed by GitHub
commit 1bb950f967
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 775 additions and 150 deletions

View File

@ -163,9 +163,6 @@ Potery uses `pyproject.toml` and `poetry.lock` to lock dependencies
// this indicates which builder must be used
"buildSystem": "python",
// versioning the format to ensure builder compatibility
"buildSystemFormatVersion": 1,
// translator which generated this file
// (not relevant for building)
"producedBy": "translator-poetry-1",

View File

@ -15,6 +15,22 @@
"type": "indirect"
}
},
"node2nix": {
"flake": false,
"locked": {
"lastModified": 1613594272,
"narHash": "sha256-fcnPNexhowSkatLSl+0dat5oDaWKH53Pg+VKrE8+x+Q=",
"owner": "svanderburg",
"repo": "node2nix",
"rev": "0c94281ea98f1b17532176106f90f909aa133704",
"type": "github"
},
"original": {
"owner": "svanderburg",
"repo": "node2nix",
"type": "github"
}
},
"npmlock2nix": {
"flake": false,
"locked": {
@ -34,6 +50,7 @@
"root": {
"inputs": {
"nixpkgs": "nixpkgs",
"node2nix": "node2nix",
"npmlock2nix": "npmlock2nix"
}
}

View File

@ -3,10 +3,11 @@
inputs = {
nixpkgs.url = "nixpkgs/nixos-unstable";
node2nix = { url = "github:svanderburg/node2nix"; flake = false; };
npmlock2nix = { url = "github:nix-community/npmlock2nix"; flake = false; };
};
outputs = { self, nixpkgs, npmlock2nix }:
outputs = { self, nixpkgs, node2nix, npmlock2nix }:
let
lib = nixpkgs.lib;
@ -20,12 +21,16 @@
overlays = [ self.overlay ];
});
externalSourcesFor = forAllSystems (system: nixpkgsFor."${system}".runCommand "dream2nix-vendored" {} ''
mkdir -p $out/{npmlock2nix,node2nix}
cp ${npmlock2nix}/{internal.nix,LICENSE} $out/npmlock2nix/
cp ${node2nix}/{nix/node-env.nix,LICENSE} $out/node2nix/
'');
dream2nixFor = forAllSystems (system: import ./src rec {
pkgs = nixpkgsFor."${system}";
externalSources = pkgs.runCommand "dream2nix-imported" {} ''
mkdir -p $out/npmlock2nix
cp ${npmlock2nix}/{internal.nix,LICENSE} $out/npmlock2nix/
'';
externalSources = externalSourcesFor."${system}";
inherit lib;
});
in
@ -48,8 +53,13 @@
});
devShell = forAllSystems (system: nixpkgsFor."${system}".mkShell {
buildInputs = with nixpkgsFor."${system}"; [
cntr
nixUnstable
];
shellHook = ''
export NIX_PATH=nixpkgs=${nixpkgs}
export d2nExternalSources=${externalSourcesFor."${system}"}
'';
});
};

11
notes/node2nix.md Normal file
View File

@ -0,0 +1,11 @@
## problems with original node2nix implementation
### Bad caching
- packages are all unpacked at once inside a build instead of in individual derivations
### Bad build performance
- unpacking is done sequentially
- pinpointing deps is done sequentially
### build time dependencies unavailable
- packages are not available during build (could be fixed by setting NODE_PATH and installing in correct order)

View File

@ -3,20 +3,23 @@
"requests": {
"url": "https://download.pypi.org/requests/2.28.0",
"hash": "000000000000000000000000000000000000000",
"version": "1.2.3",
"type": "fetchurl"
},
"certifi": {
"url": "https://download.pypi.org/certifi/2.0",
"hash": "000000000000000000000000000000000000000",
"version": "2.3.4",
"type": "fetchurl"
}
},
"generic": {
"buildSystem": "python",
"buildSystemFormatVersion": 1,
"producedBy": "translator-poetry-1",
"rootPackage": "requests",
"translatedBy": "python.pure.poetry",
"translatorParams": "--flag_application",
"mainPackage": "requests",
"dependencyCyclesRemoved": true,
"dependencyGraph": {
"requests": [
"certifi"
@ -32,4 +35,4 @@
"certifi": "wheel"
}
}
}
}

View File

@ -27,6 +27,12 @@
}
},
"allOf": [
{
"if": {
"properties": { "type": { "const": "unknown" } }
},
"then": { "properties": {} }
},
{
"if": {
"properties": { "type": { "const": "fetchurl" } }
@ -68,7 +74,6 @@
"type": "object",
"properties": {
"buildSystem": { "type": "string" },
"buildSystemFormatVersion": { "type": "integer" },
"producedBy": { "type": "string" },
"dependencyGraph": {
"type": "object",

View File

@ -0,0 +1,5 @@
{
"buildSystem": {
"nodejsVersion": 14
}
}

View File

@ -1,6 +1,10 @@
{
"inputPaths": [
"./some_project_src/requirements.txt",
"./some_project_src/requirements-dev.txt"
"inputDirectories": [
"./some_project_src"
],
"inputFiles": [
"./a/b/c/requirements.txt",
"./a/b/c/requirements-dev.txt"
]
}

View File

@ -6,6 +6,8 @@ import subprocess as sp
import sys
import tempfile
import networkx as nx
with open (os.environ.get("translatorsJsonFile")) as f:
translators = json.load(f)
@ -13,18 +15,46 @@ with open (os.environ.get("translatorsJsonFile")) as f:
def strip_hashes_from_lock(lock):
for source in lock['sources'].values():
del source['hash']
if 'hash' in source:
del source['hash']
def list(args):
def order_dict(d):
return {k: order_dict(v) if isinstance(v, dict) else v
for k, v in sorted(d.items())}
def list_translators(args):
out = "Available translators per build system"
for subsystem, trans_types in translators.items():
displayed = []
for trans_type, translators_ in trans_types.items():
for translator in translators_:
displayed.append(f"{trans_type}.{translator}")
for trans_name, translator in translators_.items():
lines = tuple(
f"{trans_type}.{trans_name}",
)
if translator:
lines += (
f"\n special args:",
)
for argName, argData in translator.items():
if argData['type'] == 'argument':
lines += (
f"\n --arg_{argName} {{value}}",
f"\n description: {argData['description']}",
f"\n default: {argData['default']}",
f"\n examples: {', '.join(argData['examples'])}",
)
elif argData['type'] == 'flag':
lines += (
f"\n --flag_{argName}",
f"\n description: {argData['description']}",
)
else:
raise Exception(f"Unknown type '{argData['type']}' of argument '{arg_Name}'")
displayed.append(''.join(lines))
nl = '\n'
out += f"\n - {subsystem}.{f'{nl} - {subsystem}.'.join(displayed)}"
out += f"\n\n - {subsystem}.{f'{nl} - {subsystem}.'.join(displayed)}"
print(out)
@ -32,7 +62,25 @@ def translate(args):
dream2nix_src = os.environ.get("dream2nixSrc")
files = args.input
inputPaths = args.input
# collect special args
specialArgs = {}
for argName, argVal in vars(args).items():
if argName.startswith("arg_"):
specialArgs[argName[4:]] = argVal
elif argName.startswith("flag_"):
specialArgs[argName[5:]] = True
# check if all inputs exist
for path in inputPaths:
if not os.path.exists(path):
raise Exception(f"Input path '{path}' does not exist")
inputFiles = list(filter(lambda p: os.path.isfile(p), inputPaths))
inputFiles = list(map(lambda p:os.path.realpath(p), inputFiles))
inputDirectories = list(filter(lambda p: os.path.isdir(p), inputPaths))
inputDirectories = list(map(lambda p:os.path.realpath(p), inputDirectories))
# determine output directory
if os.path.isdir(args.output):
@ -43,10 +91,12 @@ def translate(args):
# translator arguments
translatorInput = dict(
inputPaths=files,
inputFiles=inputFiles,
inputDirectories=inputDirectories,
outputFile=output,
selector=args.translator or "",
)
translatorInput.update(specialArgs)
# remove output file if exists
if os.path.exists(output):
@ -60,19 +110,45 @@ def translate(args):
env.update(dict(
FUNC_ARGS=inputJsonFile.name
))
procBuild = sp.run(
procEval = sp.run(
[
"nix", "build", "--impure", "--expr",
f"(import {dream2nix_src} {{}}).translators.selectTranslatorBin {{}}", "-o", "translator"
"nix", "eval", "--impure", "--raw", "--expr",
f"((import {dream2nix_src} {{}}).translators.selectTranslatorJSON {{}})",
],
capture_output=True,
env=env
)
if procEval.returncode:
print("Selecting translator failed", file=sys.stdout)
print(procEval.stderr.decode(), file=sys.stderr)
exit(1)
# parse data for auto selected translator
resultEval = json.loads(procEval.stdout)
subsystem = resultEval['subsystem']
trans_type = resultEval['type']
trans_name = resultEval['name']
# include default values into input data
translatorInputWithDefaults = resultEval['SpecialArgsDefaults']
translatorInputWithDefaults.update(translatorInput)
json.dump(translatorInputWithDefaults, inputJsonFile, indent=2)
inputJsonFile.seek(0)
# build the translator bin
procBuild = sp.run(
[
"nix", "build", "--impure", "-o", "translator", "--expr",
f"(import {dream2nix_src} {{}}).translators.translators.{subsystem}.{trans_type}.{trans_name}.translateBin",
],
capture_output=True,
)
if procBuild.returncode:
print("Building translator failed", file=sys.stdout)
print(procBuild.stderr.decode(), file=sys.stderr)
exit(1)
# execute translator
translatorPath = os.path.realpath("translator")
os.remove("translator")
sp.run(
@ -86,11 +162,50 @@ def translate(args):
# read produced lock file
with open(output) as f:
lock = json.load(f)
# write translator information to lock file
lock['generic']['translatedBy'] = f"{subsystem}.{trans_type}.{trans_name}"
lock['generic']['translatorParams'] = " ".join(sys.argv[2:])
# calculate combined hash
# clean up dependency graph
# remove empty entries
depGraph = lock['generic']['dependencyGraph']
if 'dependencyGraph' in lock['generic']:
for pname, deps in depGraph.copy().items():
if not deps:
del depGraph[pname]
# remove cyclic dependencies
edges = set()
for pname, deps in depGraph.items():
for dep in deps:
edges.add((pname, dep))
G = nx.DiGraph(sorted(list(edges)))
cycle_count = 0
removed_edges = []
for pname in list(depGraph.keys()):
try:
while True:
cycle = nx.find_cycle(G, pname)
cycle_count += 1
# remove_dependecy(indexed_pkgs, G, cycle[-1][0], cycle[-1][1])
node_from, node_to = cycle[-1][0], cycle[-1][1]
G.remove_edge(node_from, node_to)
depGraph[node_from].remove(node_to)
removed_edges.append((node_from, node_to))
except nx.NetworkXNoCycle:
continue
if removed_edges:
removed_cycles_text = 'Removed Cyclic dependencies:'
for node, removed_node in removed_edges:
removed_cycles_text += f"\n {node} -> {removed_node}"
print(removed_cycles_text)
lock['generic']['dependencyCyclesRemoved'] = True
# calculate combined hash if --combined was specified
if args.combined:
print("Start building FOD for combined sources to get output hash")
print("Building FOD of combined sources to retrieve output hash")
# remove hashes from lock file and init sourcesCombinedHash with emtpy string
strip_hashes_from_lock(lock)
@ -102,7 +217,7 @@ def translate(args):
proc = sp.run(
[
"nix", "build", "--impure", "-L", "--expr",
f"(import {dream2nix_src} {{}}).fetchSources {{ genericLock = {output}; }}"
f"(import {dream2nix_src} {{}}).fetchSources {{ dreamLock = {output}; }}"
],
capture_output=True,
)
@ -118,9 +233,10 @@ def translate(args):
# store the hash in the lock
lock['generic']['sourcesCombinedHash'] = hash
with open(output, 'w') as f:
json.dump(lock, f, indent=2)
# re-write dream.lock
with open(output, 'w') as f:
json.dump(order_dict(lock), f, indent=2)
print(f"Created {output}")
@ -143,7 +259,10 @@ def parse_args():
description="list available translators"
)
list_parser.set_defaults(func=list)
list_parser.set_defaults(func=list_translators)
# PARSER FOR TRNASLATOR
translate_parser = sub.add_parser(
"translate",
@ -173,10 +292,19 @@ def parse_args():
translate_parser.add_argument(
"input",
help="input files containing relevant metadata",
help="input files or directories containing sources and metadata",
nargs="+"
)
# parse special args
# (custom parameters required by individual translators)
parsed, unknown = translate_parser.parse_known_args()
for arg in unknown:
if arg.startswith("--arg_"):
translate_parser.add_argument(arg.split('=')[0])
if arg.startswith("--flag_"):
translate_parser.add_argument(arg.split('=')[0], action='store_true')
args = parser.parse_args()
if not hasattr(args, "func"):

View File

@ -8,17 +8,19 @@
}:
let
callPackage = pkgs.callPackage;
cliPython = pkgs.python3.withPackages (ps: [ ps.networkx ]);
in
{
# the unified translator cli
cli = callPackage ({ python3, writeScript, ... }:
cli = callPackage ({ writeScript, ... }:
writeScript "cli" ''
export d2nExternalSources=${externalSources}
translatorsJsonFile=${translators.translatorsJsonFile} \
dream2nixSrc=${../.} \
${python3}/bin/python ${./cli.py} "$@"
${cliPython}/bin/python ${./cli.py} "$@"
''
) {};
@ -43,6 +45,9 @@ in
mkdir $target/external
cp -r ${externalSources}/* $target/external/
chmod -R +w $target
echo "Installed dream2nix successfully to '$target'."
echo "Please check/modify settings in '$target/config.json'"
''
) {};
}

View File

@ -1,5 +1,6 @@
{
callPackage,
...
}:
{
python = rec {
@ -8,5 +9,12 @@
simpleBuilder = callPackage ./python/simple-builder {};
};
nodejs = rec {
default = node2nix;
node2nix = callPackage ./nodejs/node2nix {};
};
}

View File

@ -0,0 +1,75 @@
# builder imported from node2nix
{
externals,
node2nix ? externals.node2nix,
lib,
pkgs,
...
}:
{
fetchedSources,
dreamLock,
}:
let
mainPackageName = dreamLock.generic.mainPackage;
nodejsVersion = dreamLock.buildSystem.nodejsVersion;
nodejs =
pkgs."nodejs-${builtins.toString nodejsVersion}_x"
or (throw "Could not find nodejs version '${nodejsVersion}' in pkgs");
node2nixEnv = node2nix nodejs;
node2nixDependencies =
let
makeSource = name: {
name = lib.head (lib.splitString "#" name);
packageName = lib.head (lib.splitString "#" name);
version = dreamLock.sources."${name}".version;
src = fetchedSources."${name}";
dependencies =
lib.forEach
(lib.filter
(depName: ! builtins.elem depName dreamLock.generic.dependencyGraph."${mainPackageName}")
(dreamLock.generic.dependencyGraph."${name}" or []))
(dependency:
makeSource dependency
);
};
in
lib.forEach
dreamLock.generic.dependencyGraph."${mainPackageName}"
(dependency: makeSource dependency);
callNode2Nix = funcName: args:
node2nixEnv."${funcName}" rec {
name = mainPackageName;
packageName = name;
version = dreamLock.sources."${mainPackageName}".version;
dependencies = node2nixDependencies;
# buildInputs ? []
# npmFlags ? ""
# dontNpmInstall ? false
# preRebuild ? ""
# dontStrip ? true
# unpackPhase ? "true"
# buildPhase ? "true"
# meta ? {}
production = true;
bypassCache = true;
reconstructLock = true;
src = fetchedSources."${dreamLock.generic.mainPackage}";
}
// args;
in
{
package = callNode2Nix "buildNodePackage" {};
shell = callNode2Nix "buildNodeShell" {};
}

View File

@ -1,26 +1,46 @@
# A very simple single derivation python builder
{
lib,
pkgs,
...
}:
{
fetchedSources,
genericLock,
dreamLock,
}:
let
python = pkgs."${genericLock.buildSystem.pythonAttr}";
python = pkgs."${dreamLock.buildSystem.pythonAttr}";
buildFunc =
if dreamLock.buildSystem.application then
python.pkgs.buildPythonApplication
else
python.pkgs.buildPythonPackage;
mainPackageName = dreamLock.generic.mainPackage;
packageName =
if mainPackageName == null then
if dreamLock.buildSystem.application then
"application"
else
"environment"
else
mainPackageName;
in
python.pkgs.buildPythonPackage {
name = "python-environment";
buildFunc {
name = packageName;
format = "";
src = lib.attrValues fetchedSources;
src = fetchedSources."${toString (mainPackageName)}" or null;
buildInputs = pkgs.pythonManylinuxPackages.manylinux1;
nativeBuildInputs = [ pkgs.autoPatchelfHook python.pkgs.wheelUnpackHook ];
unpackPhase = ''
mkdir dist
for file in $src; do
for file in ${builtins.toString (lib.attrValues fetchedSources)}; do
# pick right most element of path
fname=''${file##*/}
fname=$(stripHash $fname)
@ -31,7 +51,7 @@ python.pkgs.buildPythonPackage {
runHook preInstall
mkdir -p "$out/${python.sitePackages}"
export PYTHONPATH="$out/${python.sitePackages}:$PYTHONPATH"
${python}/bin/python -m pip install ./dist/*.{whl,tar.gz,zip} \
${python}/bin/python -m pip install ./dist/*.{whl,tar.gz,zip} $src \
--no-index \
--no-warn-script-location \
--prefix="$out" \

3
src/config.json Normal file
View File

@ -0,0 +1,3 @@
{
"allowBuiltinFetchers": true
}

View File

@ -1,8 +1,11 @@
{
pkgs ? import <nixpkgs> {},
lib ? pkgs.lib,
externalSources ?
if builtins.getEnv "d2nExternalSources" != "" then
# if called via CLI, load externals via env
if builtins ? getEnv && builtins.getEnv "d2nExternalSources" != "" then
builtins.getEnv "d2nExternalSources"
# load from default dircetory
else
./external,
}:
@ -13,23 +16,35 @@ let
callPackage = f: args: pkgs.callPackage f (args // {
inherit callPackage;
inherit externals;
inherit externalSources;
inherit utils;
});
externals = {
npmlock2nix = pkgs.callPackage "${externalSources}/npmlock2nix/internal.nix" {};
node2nix = nodejs: pkgs.callPackage "${externalSources}/node2nix/node-env.nix" { inherit nodejs; };
};
config = builtins.fromJSON (builtins.readFile ./config.json);
in
rec {
apps = callPackage ./apps { inherit externalSources location translators; };
# apps for CLI and installation
apps = callPackage ./apps { inherit location translators; };
# builder implementaitons for all subsystems
builders = callPackage ./builders {};
fetchers = callPackage ./fetchers {};
# fetcher implementations
fetchers = callPackage ./fetchers {
inherit (config) allowBuiltinFetchers;
};
translators = callPackage ./translators { inherit externalSources externals location; };
# the translator modules and utils for all subsystems
translators = callPackage ./translators { inherit location; };
# the location of the dream2nix framework for self references (update scripts, etc.)
@ -37,58 +52,87 @@ rec {
# automatically find a suitable builder for a given generic lock
findBuilder = genericLock:
findBuilder = dreamLock:
let
buildSystem = genericLock.generic.buildSystem;
buildSystem = dreamLock.generic.buildSystem;
in
builders."${buildSystem}".default;
if ! builders ? "${buildSystem}" then
throw "Could not find any builder for subsystem '${buildSystem}'"
else
builders."${buildSystem}".default;
# detect if granular or combined fetching must be used
findFetcher = genericLock:
if null != genericLock.generic.sourcesCombinedHash then
findFetcher = dreamLock:
if null != dreamLock.generic.sourcesCombinedHash then
fetchers.combinedFetcher
else
fetchers.defaultFetcher;
# automatically parse dream.lock if passed as file
parseLock = lock:
if builtins.isPath lock || builtins.isString lock then
builtins.fromJSON (builtins.readFile lock)
else
lock;
# fetch only sources and do not build
fetchSources =
{
genericLock,
builder ? findBuilder (parseLock genericLock),
fetcher ? findFetcher (parseLock genericLock)
dreamLock,
builder ? findBuilder (parseLock dreamLock),
fetcher ? findFetcher (parseLock dreamLock),
sourceOverrides ? oldSources: {},
allowBuiltinFetchers ? true,
}:
let
# is generic lock is a file, read and parse it
genericLock' = (parseLock genericLock);
# if generic lock is a file, read and parse it
dreamLock' = (parseLock dreamLock);
fetched = fetcher {
sources = genericLock'.sources;
sourcesCombinedHash = genericLock'.generic.sourcesCombinedHash;
inherit allowBuiltinFetchers;
sources = dreamLock'.sources;
sourcesCombinedHash = dreamLock'.generic.sourcesCombinedHash;
};
sourcesToReplace = sourceOverrides fetched.fetchedSources;
sourcesOverridden = lib.mapAttrs (pname: source:
sourcesToReplace."${pname}" or source
) fetched.fetchedSources;
sourcesEnsuredOverridden = lib.mapAttrs (pname: source:
if source == "unknown" then throw ''
Source '${pname}' is unknown. Please override using:
dream2nix.buildPackage {
...
sourceOverrides = oldSources: {
"${pname}" = ...;
};
...
};
''
else source
) sourcesOverridden;
in
fetched;
fetched // {
fetchedSources = sourcesEnsuredOverridden;
};
# automatically build package defined by generic lock
# build package defined by dream.lock
# TODO: rename to riseAndShine
buildPackage =
{
genericLock,
builder ? findBuilder (parseLock genericLock),
fetcher ? findFetcher (parseLock genericLock)
dreamLock,
builder ? findBuilder (parseLock dreamLock),
fetcher ? findFetcher (parseLock dreamLock),
sourceOverrides ? oldSources: {},
allowBuiltinFetchers ? true,
}@args:
let
# is generic lock is a file, read and parse it
genericLock' = (parseLock genericLock);
# if generic lock is a file, read and parse it
dreamLock' = (parseLock dreamLock);
in
builder {
genericLock = genericLock';
dreamLock = dreamLock';
fetchedSources = (fetchSources args).fetchedSources;
};

View File

@ -1,3 +1,6 @@
# this fetcher takes an attrset of sources and combines all contained FODs
# to one large FOD. Non-FOD sources like derivations and store paths are
# not touched
{
defaultFetcher,
@ -20,11 +23,32 @@ let
defaultFetched = (defaultFetcher { inherit sources; }).fetchedSources;
# extract the arguments from the individual fetcher calls
fetcherArgsAll = lib.mapAttrs (pname: fetched:
(fetched.overrideAttrs (args: {
passthru.originalArgs = args;
})).originalArgs
) defaultFetched;
FODArgsAll =
let
FODArgsAll' =
lib.mapAttrs
(pname: fetched:
# handle FOD sources
if lib.all (attr: fetched ? "${attr}") [ "outputHash" "outputHashAlgo" "outputHashMode" ] then
(fetched.overrideAttrs (args: {
passthru.originalArgs = args;
})).originalArgs
# handle unknown sources
else if fetched == "unknown" then
"unknown"
# error out on unknown source types
else
throw ''
Error while generating FOD fetcher for combined sources.
Cannot classify source of '${pname}'.
''
)
defaultFetched;
in
lib.filterAttrs (pname: fetcherArgs: fetcherArgs != "unknown") FODArgsAll';
# convert arbitrary types to string, like nix does with derivation arguments
toString = x:
@ -61,16 +85,17 @@ let
mkdir $out
${lib.concatStringsSep "\n" (lib.mapAttrsToList (pname: fetcherArgs: ''
OUT_ORIG=$out
export out=$OUT_ORIG/${fetcherArgs.name}
mkdir workdir
pushd workdir
${fetchItem pname fetcherArgs}
popd
rm -r workdir
export out=$OUT_ORIG
'') fetcherArgsAll )}
${lib.concatStringsSep "\n" (lib.mapAttrsToList (pname: fetcherArgs:
''
OUT_ORIG=$out
export out=$OUT_ORIG/${fetcherArgs.name}
mkdir workdir
pushd workdir
${fetchItem pname fetcherArgs}
popd
rm -r workdir
export out=$OUT_ORIG
'') FODArgsAll )}
echo "FOD_PATH=$(${nix}/bin/nix hash-path $out)"
'';
@ -80,7 +105,7 @@ let
nativeBuildInputs' = lib.foldl (a: b: a ++ b) [] (
lib.mapAttrsToList
(pname: fetcherArgs: (fetcherArgs.nativeBuildInputs or []))
fetcherArgsAll
FODArgsAll
);
in
stdenv.mkDerivation rec {
@ -101,6 +126,9 @@ in
fetchedSources =
# attrset: pname -> path of downloaded source
lib.genAttrs (lib.attrNames sources) (pname:
"${FODAllSources}/${fetcherArgsAll."${pname}".name}"
if FODArgsAll ? "${pname}" then
"${FODAllSources}/${FODArgsAll."${pname}".name}"
else
defaultFetched."${pname}"
);
}

View File

@ -11,31 +11,77 @@
{
# sources attrset from generic lock
sources,
allowBuiltinFetchers,
...
}:
let
githubMissingHashErrorText = pname: ''
Error: Cannot verify the integrity of the source of '${pname}'
It is a github reference with no hash providedand.
Solve this problem via any of the wollowing ways:
- (alternative 1): allow the use of builtin fetchers (which can verify using git rev).
```
dream2nix.buildPackage {
...
allowBuiltinFetchers = true;
...
}
```
- (alternative 2): add a hash to the source via override
```
dream2nix.buildPackage {
...
sourceOverrides = oldSources: {
"${pname}" = oldSources."${pname}".overrideAttrs (_:{
hash = "";
})
}
...
}
```
'';
in
{
# attrset: pname -> path of downloaded source
fetchedSources = lib.mapAttrs (pname: source:
if source.type == "github" then
fetchFromGitHub {
inherit (source) url owner repo rev;
sha256 = source.hash or null;
}
# handle when no hash is provided
if ! source ? hash then
if allowBuiltinFetchers then
builtins.fetchGit {
inherit (source) rev;
allRefs = true;
url = "https://github.com/${source.owner}/${source.repo}";
}
else
throw githubMissingHashErrorText pname
else
fetchFromGitHub {
inherit (source) url owner repo rev;
hash = source.hash or null;
}
else if source.type == "gitlab" then
fetchFromGitLab {
inherit (source) url owner repo rev;
sha256 = source.hash or null;
hash = source.hash or null;
}
else if source.type == "git" then
fetchgit {
inherit (source) url rev;
sha256 = source.hash or null;
hash = source.hash or null;
}
else if source.type == "fetchurl" then
fetchurl {
inherit (source) url;
sha256 = source.hash or null;
hash = source.hash or null;
}
else if source.type == "unknown" then
"unknown"
else throw "unsupported source type '${source.type}'"
) sources;
}

View File

@ -1,5 +1,9 @@
{
callPackage,
# config
allowBuiltinFetchers,
...
}:
rec {
defaultFetcher = callPackage ./default-fetcher.nix {};

View File

@ -11,19 +11,28 @@
let
lib = pkgs.lib;
callTranslator = subsystem: type: name: file: args:
let
translator = callPackage file (args // {
inherit externals;
translatorName = name;
});
translatorWithBin =
# if the translator is a pure nix translator,
# generate a translatorBin for CLI compatibility
if translator ? translateBin then translator
else translator // {
translateBin = wrapPureTranslator [ subsystem type name ];
};
in
# if the translator is a pure nix translator,
# generate a translatorBin for CLI compatibility
if translator ? translateBin then translator
else translator // {
translateBin = wrapPureTranslator [ subsystem type name ];
translatorWithBin // {
inherit subsystem type name;
translate = args:
translator.translate
((getSpecialArgsDefaults translator.specialArgs or {}) // args);
};
buildSystems = dirNames ./.;
@ -41,7 +50,7 @@ let
bin = pkgs.writeScriptBin "translate" ''
#!${pkgs.bash}/bin/bash
jsonInputFile=$1
jsonInputFile=$(realpath $1)
outputFile=$(${pkgs.jq}/bin/jq '.outputFile' -c -r $jsonInputFile)
export d2nExternalSources=${externalSources}
@ -74,27 +83,37 @@ let
)
);
# json file exposing all existing translators to CLI
# json file exposing all existing translators to CLI including their special args
translatorsJsonFile =
pkgs.writeText
"translators.json"
(builtins.toJSON
(mkTranslatorsSet (subsystem: type:
dirNames (./. + "/${subsystem}/${type}")
let
data = lib.mapAttrsRecursiveCond
(as: !(as ? "translateBin"))
(k: v:
v.specialArgs or {}
)
));
translators;
in
pkgs.writeText "translators.json" (builtins.toJSON data);
# filter translators by compatibility for the given input paths
compatibleTranslators = paths: translators_:
compatibleTranslators =
{
inputDirectories,
inputFiles,
translators,
}:
let
compatible =
lib.mapAttrs (subsystem: types:
lib.mapAttrs (type: translators:
lib.mapAttrs (type: translators_:
lib.filterAttrs (name: translator:
translator ? compatiblePaths && translator.compatiblePaths paths == paths
) translators
translator ? compatiblePaths
&&
translator.compatiblePaths { inherit inputDirectories inputFiles; }
== { inherit inputDirectories inputFiles; }
) translators_
) types
) translators_;
) translators;
in
# purge empty attrsets
lib.filterAttrsRecursive (k: v: v != {}) (lib.filterAttrsRecursive (k: v: v != {}) compatible);
@ -126,25 +145,52 @@ let
# return the correct translator bin for the given input paths
selectTranslatorBin = utils.makeCallableViaEnv (
selectTranslator = utils.makeCallableViaEnv (
{
selector, # like 'python.impure' or 'python.impure.pip'
inputPaths, # input paths to translate
inputDirectories, # input paths to translate
inputFiles, # input paths to translate
...
}:
let
selectedTranslators = reduceTranslatorsBySelector selector translators;
compatTranslators = compatibleTranslators inputPaths selectedTranslators;
compatTranslators = compatibleTranslators {
inherit inputDirectories inputFiles;
translators = selectedTranslators;
};
in
if selectedTranslators == {} then
throw "The selector '${selector}' does not select any known translators"
else if compatTranslators == {} then
throw "Could not find any translator which is compatible to the given inputs: ${builtins.toString inputPaths}"
throw ''
Could not find any translator which is compatible to the given inputs:
- ${builtins.concatStringsSep "\n - " (inputDirectories ++ inputFiles)}
''
else
(lib.head (lib.attrValues (lib.head (lib.attrValues (lib.head (lib.attrValues compatTranslators)))))).translateBin
lib.head (lib.attrValues (lib.head (lib.attrValues (lib.head (lib.attrValues compatTranslators)))))
);
getSpecialArgsDefaults = specialArgsDef:
lib.mapAttrs
(name: def:
if def.type == "flag" then
false
else
def.default
)
specialArgsDef;
selectTranslatorJSON = args:
let
translator = (selectTranslator args);
data = {
SpecialArgsDefaults = getSpecialArgsDefaults (translator.specialArgs or {});
inherit (translator) subsystem type name;
};
in
builtins.toJSON data;
in
{
inherit translators translatorsJsonFile selectTranslatorBin;
inherit translators translatorsJsonFile selectTranslatorJSON;
}

View File

@ -1,31 +1,136 @@
{
lib,
externals,
translatorName,
utils,
...
}:
let
{
translate =
{
inputPaths,
inputDirectories,
inputFiles,
dev,
...
}:
let
parsed = externals.npmlock2nix.readLockfile (builtins.elemAt inputPaths 0);
parsed = externals.npmlock2nix.readLockfile (builtins.elemAt inputFiles 0);
parseGithubDepedency = dependency:
externals.npmlock2nix.parseGitHubRef dependency.version;
getVersion = dependency:
if dependency ? from && dependency ? version then
builtins.substring 0 8 (parseGithubDepedency dependency).rev
else
dependency.version;
pinVersions = dependencies: parentScopeDeps:
lib.mapAttrs
(pname: pdata:
let
selfScopeDeps = parentScopeDeps // (pdata.dependencies or {});
in
pdata // {
depsExact =
if ! pdata ? requires then
[]
else
lib.forEach (lib.attrNames pdata.requires) (reqName:
"${reqName}#${getVersion selfScopeDeps."${reqName}"}"
);
dependencies = pinVersions (pdata.dependencies or {}) selfScopeDeps;
}
)
dependencies;
packageLockWithPinnedVersions = pinVersions parsed.dependencies parsed.dependencies;
# recursively collect dependencies
parseDependencies = dependencies:
lib.mapAttrsToList # returns list of lists
(pname: pdata:
if ! dev && pdata.dev or false then
[]
else
# handle github dependency
if pdata ? from && pdata ? version then
let
githubData = parseGithubDepedency pdata;
in
[ rec {
name = "${pname}#${version}";
version = builtins.substring 0 8 githubData.rev;
owner = githubData.org;
repo = githubData.repo;
rev = githubData.rev;
type = "github";
depsExact = pdata.depsExact;
}]
# handle http(s) dependency
else
[rec {
name = "${pname}#${version}";
version = pdata.version;
url = pdata.resolved;
type = "fetchurl";
hash = pdata.integrity;
depsExact = pdata.depsExact;
}]
++
(lib.optionals (pdata ? dependencies)
(lib.flatten (parseDependencies pdata.dependencies))
)
)
dependencies;
in
{
sources = builtins.mapAttrs (pname: pdata:{
url = pdata.resolved;
type = "fetchurl";
hash = pdata.integrity;
}) parsed.dependencies;
rec {
sources =
let
lockedSources = lib.listToAttrs (
map
(dep: lib.nameValuePair
dep.name
(
if dep.type == "github" then
{ inherit (dep) type version owner repo rev; }
else
{ inherit (dep) type version url hash; }
)
)
(lib.flatten (parseDependencies packageLockWithPinnedVersions))
);
in
# if only a package-lock.json is given, the main source is missing
lockedSources // {
"${parsed.name}" = {
type = "unknown";
version = parsed.version;
};
};
generic = {
buildSystem = "nodejs";
buildSystemFormatVersion = 1;
producedBy = translatorName;
dependencyGraph = null;
mainPackage = parsed.name;
dependencyGraph =
{
"${parsed.name}" =
lib.mapAttrsToList
(pname: pdata: "${pname}#${getVersion pdata}")
(lib.filterAttrs
(pname: pdata: ! (pdata.dev or false) || dev)
parsed.dependencies);
}
//
lib.listToAttrs
(map
(dep: lib.nameValuePair dep.name dep.depsExact)
(lib.flatten (parseDependencies packageLockWithPinnedVersions))
);
sourcesCombinedHash = null;
};
@ -34,10 +139,23 @@ let
};
};
compatiblePaths = paths: utils.compatibleTopLevelPaths ".*(package-lock\\.json)" paths;
compatiblePaths =
{
inputDirectories,
inputFiles,
}@args:
{
inputDirectories = [];
inputFiles =
lib.filter (f: builtins.match ".*(package-lock\\.json)" f != null) args.inputFiles;
};
in
specialArgs = {
{
inherit translate compatiblePaths;
dev = {
description = "include dependencies for development";
type = "flag";
};
};
}

View File

@ -13,6 +13,7 @@
{
# the input format is specified in /specifications/translator-call-example.json
# this script receives a json file including the input paths and specialArgs
translateBin = writeScriptBin "translate" ''
#!${bash}/bin/bash
@ -21,32 +22,75 @@
# accroding to the spec, the translator reads the input from a json file
jsonInput=$1
# extract the 'inputPaths' field from the json
inputPaths=$(${jq}/bin/jq '.inputPaths | .[]' -c -r $jsonInput)
# read the json input
outputFile=$(${jq}/bin/jq '.outputFile' -c -r $jsonInput)
pythonAttr=$(${jq}/bin/jq '.pythonAttr' -c -r $jsonInput)
inputDirectories=$(${jq}/bin/jq '.inputDirectories | .[]' -c -r $jsonInput)
inputFiles=$(${jq}/bin/jq '.inputFiles | .[]' -c -r $jsonInput)
# pip executable
pip=${python3.pkgs.pip}/bin/pip
# build python and pip executables
tmpBuild=$(mktemp -d)
cd $tmpBuild
nix build --impure --expr "(import <nixpkgs> {}).$pythonAttr" -o python
nix build --impure --expr "(import <nixpkgs> {}).$pythonAttr.pkgs.pip" -o pip
cd -
# prepare temporary directory
tmp=translateTmp
rm -rf $tmp
mkdir $tmp
tmp=$(mktemp -d)
# download files according to requirements
$pip download \
$tmpBuild/pip/bin/pip download \
--no-cache \
--dest $tmp \
--progress-bar off \
-r ''${inputPaths/$'\n'/$' -r '}
-r ''${inputFiles/$'\n'/$' -r '}
# generate the generic lock from the downloaded list of files
${python3}/bin/python ${./generate-generic-lock.py} $tmp $outputFile
$tmpBuild/python/bin/python ${./generate-generic-lock.py} $tmp $jsonInput
rm -rf $tmp
rm -rf $tmp $tmpBuild
'';
# from a given list of paths, this function returns all paths which can be processed by this translator
compatiblePaths = paths: utils.compatibleTopLevelPaths ".*(requirements).*\\.txt" paths;
compatiblePaths =
{
inputDirectories,
inputFiles,
}@args:
{
inputDirectories = [];
inputFiles = lib.filter (f: builtins.match ".*(requirements).*\\.txt" f != null) args.inputFiles;
};
# define special args and provide defaults
specialArgs = {
# the python attribute
pythonAttr = {
default = "python3${lib.elemAt (lib.splitString "." python3.version) 1}";
description = "python version to translate for";
examples = [
"python27"
"python39"
"python310"
];
type = "argument";
};
main = {
default = "";
description = "name of the main package";
examples = [
"some-package"
];
type = "argument";
};
application = {
description = "build application instead of package";
type = "flag";
};
};
}

View File

@ -7,7 +7,9 @@ import sys
def main():
directory = sys.argv[1]
output_file = sys.argv[2]
with open(sys.argv[2]) as f:
jsonInput = json.load(f)
packages = {}
@ -42,12 +44,11 @@ def main():
)
# create generic lock
generic_lock = dict(
dream_lock = dict(
sources={},
generic={
"buildSystem": "python",
"buildSystemFormatVersion": 1,
"producedBy": "external-pip",
"mainPackage": None,
# This translator is not aware of the exact dependency graph.
# This restricts us to use a single derivation builder later,
@ -57,6 +58,8 @@ def main():
"sourcesCombinedHash": None,
},
buildSystem={
"main": jsonInput['main'],
"application": jsonInput['application'],
"pythonAttr": f"python{sys.version_info.major}{sys.version_info.minor}",
"sourceFormats":
{pname: data['format'] for pname, data in packages.items()}
@ -65,15 +68,16 @@ def main():
# populate sources of generic lock
for pname, data in packages.items():
generic_lock['sources'][pname] = dict(
dream_lock['sources'][pname] = dict(
url=data['url'],
hash=data['sha256'],
type='fetchurl',
)
# dump generic lock to stdout (json)
with open(output_file, 'w') as lock:
json.dump(generic_lock, lock, indent=2)
print(jsonInput['outputFile'])
with open(jsonInput['outputFile'], 'w') as lock:
json.dump(dream_lock, lock, indent=2)
if __name__ == "__main__":