Merge pull request #29 from DavHau/dev

fix python translator+fetcher+builder
This commit is contained in:
DavHau 2021-10-20 16:41:06 +07:00 committed by GitHub
commit 42287603ca
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 249 additions and 117 deletions

View File

@ -1,5 +1,20 @@
{
"nodes": {
"mach-nix": {
"flake": false,
"locked": {
"lastModified": 1634711045,
"narHash": "sha256-m5A2Ty88NChLyFhXucECj6+AuiMZPHXNbw+9Kcs7F6Y=",
"owner": "DavHau",
"repo": "mach-nix",
"rev": "4433f74a97b94b596fa6cd9b9c0402104aceef5d",
"type": "github"
},
"original": {
"id": "mach-nix",
"type": "indirect"
}
},
"nix-parsec": {
"flake": false,
"locked": {
@ -65,6 +80,7 @@
},
"root": {
"inputs": {
"mach-nix": "mach-nix",
"nix-parsec": "nix-parsec",
"nixpkgs": "nixpkgs",
"node2nix": "node2nix",

View File

@ -7,6 +7,9 @@
# required for translator nodejs/pure/package-lock
nix-parsec = { url = "github:nprindle/nix-parsec"; flake = false; };
# required for translator pip
mach-nix = { url = "mach-nix"; flake = false; };
# required for builder nodejs/node2nix
node2nix = { url = "github:svanderburg/node2nix"; flake = false; };
@ -14,7 +17,7 @@
npmlock2nix = { url = "github:nix-community/npmlock2nix"; flake = false; };
};
outputs = { self, nix-parsec, nixpkgs, node2nix, npmlock2nix }:
outputs = { self, mach-nix, nix-parsec, nixpkgs, node2nix, npmlock2nix }:
let
lib = nixpkgs.lib;
@ -26,7 +29,8 @@
);
externalSourcesFor = forAllSystems (system: pkgs: pkgs.runCommand "dream2nix-vendored" {} ''
mkdir -p $out/{npmlock2nix,node2nix,nix-parsec}
mkdir -p $out/{mach-nix-lib,npmlock2nix,node2nix,nix-parsec}
cp ${mach-nix}/{lib/extractor/{default.nix,distutils.patch,setuptools.patch},LICENSE} $out/mach-nix-lib/
cp ${npmlock2nix}/{internal.nix,LICENSE} $out/npmlock2nix/
cp ${node2nix}/{nix/node-env.nix,LICENSE} $out/node2nix/
cp ${nix-parsec}/{parsec,lexer}.nix $out/nix-parsec/

View File

@ -102,7 +102,7 @@ class PackageCommand(Command):
sourceSpec =\
callNixFunction("fetchers.translateShortcut", shortcut=source)
source =\
buildNixFunction("fetchers.fetchShortcut", shortcut=source)
buildNixFunction("fetchers.fetchShortcut", shortcut=source, extract=True)
# handle source paths
else:
# check if source path exists
@ -118,7 +118,7 @@ class PackageCommand(Command):
sourceSpec =\
sourceDreamLock['sources'][sourceDreamLock['generic']['mainPackage']]
source = \
buildNixFunction("fetchers.fetchSource", source=sourceSpec)
buildNixFunction("fetchers.fetchSource", source=sourceSpec, extract=True)
# select translator
translatorsSorted = sorted(
@ -216,10 +216,10 @@ class PackageCommand(Command):
print(f"Please specify '{arg_name}': {arg['description']}")
specified_extra_args[arg_name] = self.confirm(f"{arg['description']}:", False)
else:
print(
f"Please specify '{arg_name}': {arg['description']}"
f"\nLeave emtpy for default ({arg['default']})")
print(f"Please specify '{arg_name}': {arg['description']}")
print(f"Example values: " + ', '.join(arg['examples']))
if 'default' in arg:
print(f"\nLeave emtpy for default ({arg['default']})")
specified_extra_args[arg_name] = self.ask(f"{arg_name}:", arg.get('default'))
# arguments for calling the translator nix module
@ -280,42 +280,43 @@ class PackageCommand(Command):
# clean up dependency graph
# remove empty entries
depGraph = lock['generic']['dependencyGraph']
if 'dependencyGraph' in lock['generic']:
for pname, deps in depGraph.copy().items():
if not deps:
del depGraph[pname]
depGraph = lock['generic']['dependencyGraph']
if 'dependencyGraph' in lock['generic']:
for pname, deps in depGraph.copy().items():
if not deps:
del depGraph[pname]
# remove cyclic dependencies
edges = set()
for pname, deps in depGraph.items():
for dep in deps:
edges.add((pname, dep))
G = nx.DiGraph(sorted(list(edges)))
cycle_count = 0
removed_edges = []
for pname in list(depGraph.keys()):
try:
while True:
cycle = nx.find_cycle(G, pname)
cycle_count += 1
# remove_dependecy(indexed_pkgs, G, cycle[-1][0], cycle[-1][1])
node_from, node_to = cycle[-1][0], cycle[-1][1]
G.remove_edge(node_from, node_to)
depGraph[node_from].remove(node_to)
removed_edges.append((node_from, node_to))
except nx.NetworkXNoCycle:
continue
lock['generic']['dependenciesRemoved'] = {}
if removed_edges:
# remove cyclic dependencies
edges = set()
for pname, deps in depGraph.items():
for dep in deps:
edges.add((pname, dep))
G = nx.DiGraph(sorted(list(edges)))
cycle_count = 0
removed_edges = []
for pname in list(depGraph.keys()):
try:
while True:
cycle = nx.find_cycle(G, pname)
cycle_count += 1
# remove_dependecy(indexed_pkgs, G, cycle[-1][0], cycle[-1][1])
node_from, node_to = cycle[-1][0], cycle[-1][1]
G.remove_edge(node_from, node_to)
depGraph[node_from].remove(node_to)
removed_edges.append((node_from, node_to))
except nx.NetworkXNoCycle:
continue
lock['generic']['dependenciesRemoved'] = {}
removed_cycles_text = 'Removed Cyclic dependencies:'
for node, removed_node in removed_edges:
removed_cycles_text += f"\n {node} -> {removed_node}"
if node not in lock['generic']['dependenciesRemoved']:
lock['generic']['dependenciesRemoved'][node] = []
lock['generic']['dependenciesRemoved'][node].append(removed_node)
print(removed_cycles_text)
if removed_edges:
lock['generic']['dependenciesRemoved'] = {}
removed_cycles_text = 'Removed Cyclic dependencies:'
for node, removed_node in removed_edges:
removed_cycles_text += f"\n {node} -> {removed_node}"
if node not in lock['generic']['dependenciesRemoved']:
lock['generic']['dependenciesRemoved'][node] = []
lock['generic']['dependenciesRemoved'][node].append(removed_node)
print(removed_cycles_text)
# calculate combined hash if --combined was specified
if combined:

View File

@ -46,7 +46,7 @@ class UpdateCommand(Command):
exit(1)
# find new version
version = self.option('version')
version = self.option('new-version')
if not version:
update_script = buildNixFunction(
"updaters.makeUpdateScript",
@ -55,7 +55,7 @@ class UpdateCommand(Command):
)
update_proc = sp.run([f"{update_script}/bin/run"], capture_output=True)
version = update_proc.stdout.decode().strip()
print(f"\nUpdating to version {version}")
print(f"Updating to version {version}")
cli_py = os.path.abspath(f"{__file__}/../../cli.py")
# delete the hash

View File

@ -30,33 +30,35 @@ let
"environment"
else
mainPackageName;
in
buildFunc {
name = packageName;
format = "";
src = fetchedSources."${toString (mainPackageName)}" or null;
buildInputs = pkgs.pythonManylinuxPackages.manylinux1;
nativeBuildInputs = [ pkgs.autoPatchelfHook python.pkgs.wheelUnpackHook ];
unpackPhase = ''
mkdir dist
for file in ${builtins.toString (lib.attrValues fetchedSources)}; do
# pick right most element of path
fname=''${file##*/}
fname=$(stripHash $fname)
cp $file dist/$fname
done
'';
installPhase = ''
runHook preInstall
mkdir -p "$out/${python.sitePackages}"
export PYTHONPATH="$out/${python.sitePackages}:$PYTHONPATH"
${python}/bin/python -m pip install ./dist/*.{whl,tar.gz,zip} $src \
--no-index \
--no-warn-script-location \
--prefix="$out" \
--no-cache $pipInstallFlags \
--ignore-installed
runHook postInstall
'';
package = buildFunc {
name = packageName;
format = "";
buildInputs = pkgs.pythonManylinuxPackages.manylinux1;
nativeBuildInputs = [ pkgs.autoPatchelfHook python.pkgs.wheelUnpackHook ];
unpackPhase = ''
mkdir dist
for file in ${builtins.toString (lib.attrValues fetchedSources)}; do
# pick right most element of path
fname=''${file##*/}
fname=$(stripHash $fname)
cp $file dist/$fname
done
'';
installPhase = ''
runHook preInstall
mkdir -p "$out/${python.sitePackages}"
export PYTHONPATH="$out/${python.sitePackages}:$PYTHONPATH"
${python}/bin/python -m pip install ./dist/*.{whl,tar.gz,zip} $src \
--no-index \
--no-warn-script-location \
--prefix="$out" \
--no-cache $pipInstallFlags \
--ignore-installed
runHook postInstall
'';
};
in {
inherit package;
}

View File

@ -193,12 +193,16 @@ rec {
# if generic lock is a file, read and parse it
dreamLock' = (parseLock dreamLock);
in
builder ({
inherit packageOverrides;
dreamLock = dreamLock';
fetchedSources = (fetchSources {
inherit dreamLock fetcher sourceOverrides allowBuiltinFetchers;
}).fetchedSources;
} // builderArgs);
builder (
{
dreamLock = dreamLock';
fetchedSources = (fetchSources {
inherit dreamLock fetcher sourceOverrides allowBuiltinFetchers;
}).fetchedSources;
}
// builderArgs
// lib.optionalAttrs (packageOverrides != {}) {
inherit packageOverrides;
});
}

View File

@ -45,7 +45,8 @@ rec {
// (lib.optionalAttrs (! args ? hash) {
hash = fetcherOutputs.calcHash "sha256";
});
# update source spec to different version
updateSource =
{
source,
@ -58,19 +59,30 @@ rec {
in
constructSource (argsKeep // {
version = newVersion;
} // {
"${fetcher.versionField}" = newVersion;
});
fetchSource = { source, }:
# fetch a source defined via a dream lock source spec
fetchSource = { source, extract ? false, }:
let
fetcher = fetchers."${source.type}";
fetcherOutputs = fetcher.outputs source;
maybeArchive = fetcherOutputs.fetched (source.hash or null);
in
fetcherOutputs.fetched (source.hash or null);
fetchShortcut = { shortcut, }:
fetchSource { source = translateShortcut { inherit shortcut; }; };
if extract then
utils.extractSource { source = maybeArchive; }
else
maybeArchive;
# fetch a source define dby a shortcut
fetchShortcut = { shortcut, extract ? false, }:
fetchSource {
source = translateShortcut { inherit shortcut; };
inherit extract;
};
# translate shortcut to dream lock source spec
translateShortcut = { shortcut, }:
let

View File

@ -1,4 +1,5 @@
{
fetchurl,
python3,
utils,
@ -23,13 +24,31 @@
in
{
calcHash = algo: utils.hashPath algo (b.fetchurl {
url = "https://files.pythonhosted.org/packages/${builtins.substring 0 1 pname}/${pname}/${pname}-${version}.${extension}";
});
calcHash = algo: utils.hashPath algo (
let
firstChar = builtins.substring 0 1 pname;
result = b.fetchurl {
url =
"https://files.pythonhosted.org/packages/source/"
+ "${firstChar}/${pname}/${pname}-${version}.${extension}";
};
in
result
);
fetched = hash:
python3.pkgs.fetchPypi {
inherit pname version extension hash;
};
let
firstChar = builtins.substring 0 1 pname;
result = (fetchurl {
url =
"https://files.pythonhosted.org/packages/source/"
+ "${firstChar}/${pname}/${pname}-${version}.${extension}";
sha256 = hash;
}).overrideAttrs (old: {
outputHashMode = "recursive";
});
in
result;
};
}

View File

@ -16,7 +16,8 @@
"fetchurl",
"git",
"github",
"gitlab"
"gitlab",
"pypi-sdist"
]
},
"url": {
@ -63,6 +64,17 @@
"rev": { "type": "string" }
}
}
},
{
"if": {
"properties": { "type": { "const": "pypi-sdist" } }
},
"then": {
"properties": {
"pname": { "type": "string" },
"version": { "type": "string" }
}
}
}
]
}

View File

@ -1,5 +1,6 @@
{
# dream2nix utils
# dream2nix
externalSources,
utils,
bash,
@ -10,6 +11,21 @@
...
}:
let
b = builtins;
machNixExtractor = "${externalSources}/mach-nix-lib/default.nix";
setuptools_shim = ''
import sys, setuptools, tokenize, os; sys.argv[0] = 'setup.py'; __file__='setup.py';
f=getattr(tokenize, 'open', open)(__file__);
code=f.read().replace('\r\n', '\n');
f.close();
exec(compile(code, __file__, 'exec'))
'';
in
{
# the input format is specified in /specifications/translator-call-example.json
@ -24,29 +40,57 @@
# read the json input
outputFile=$(${jq}/bin/jq '.outputFile' -c -r $jsonInput)
inputDirectory=$(${jq}/bin/jq '.inputDirectories | .[0]' -c -r $jsonInput)
pythonAttr=$(${jq}/bin/jq '.pythonAttr' -c -r $jsonInput)
inputDirectories=$(${jq}/bin/jq '.inputDirectories | .[]' -c -r $jsonInput)
inputFiles=$(${jq}/bin/jq '.inputFiles | .[]' -c -r $jsonInput)
application=$(${jq}/bin/jq '.application' -c -r $jsonInput)
# build python and pip executables
tmpBuild=$(mktemp -d)
cd $tmpBuild
nix build --impure --expr "(import <nixpkgs> {}).$pythonAttr" -o python
nix build --impure --expr "(import <nixpkgs> {}).$pythonAttr.pkgs.pip" -o pip
cd -
nix build --show-trace --impure --expr \
"
(import ${machNixExtractor} {}).mkPy
(import <nixpkgs> {}).$pythonAttr
" \
-o $tmpBuild/python
nix build --impure --expr "(import <nixpkgs> {}).$pythonAttr.pkgs.pip" -o $tmpBuild/pip
python=$tmpBuild/python/bin/python
pip=$tmpBuild/pip/bin/pip
# prepare temporary directory
tmp=$(mktemp -d)
# extract python requirements from setup.py
cp -r $inputDirectory $tmpBuild/src
chmod -R +w $tmpBuild/src
cd $tmpBuild/src
chmod +x setup.py || true
echo "extracting dependencies"
out_file=$tmpBuild/python.json \
dump_setup_attrs=y \
PYTHONIOENCODING=utf8 \
LANG=C.utf8 \
$python -c "${setuptools_shim}" install &> $tmpBuild/python.log || true
# extract requirements from json result
$python -c "
import json
result = json.load(open('$tmpBuild/python.json'))
for key in ('install_requires', 'setup_requires'):
if key in result:
print('\n'.join(result[key]))
" > $tmpBuild/computed_requirements
# download files according to requirements
$tmpBuild/pip/bin/pip download \
--no-cache \
--dest $tmp \
--progress-bar off \
-r ''${inputFiles/$'\n'/$' -r '}
-r $tmpBuild/computed_requirements
# -r ''${inputFiles/$'\n'/$' -r '}
# generate the generic lock from the downloaded list of files
$tmpBuild/python/bin/python ${./generate-dream-lock.py} $tmp $jsonInput
MAIN=$(${jq}/bin/jq '.name' -c -r $tmpBuild/python.json) \
$tmpBuild/python/bin/python ${./generate-dream-lock.py} $tmp $jsonInput
rm -rf $tmp $tmpBuild
'';
@ -79,14 +123,6 @@
type = "argument";
};
main = {
description = "name of the main package";
examples = [
"some-package"
];
type = "argument";
};
application = {
description = "build application instead of package";
type = "flag";

View File

@ -2,7 +2,9 @@ from glob import glob
import base64
import hashlib
import json
import os
import sys
import urllib.request
def main():
@ -25,7 +27,12 @@ def main():
# example: charset_normalizer-2.0.4-py3-none-any.whl
if file.endswith('.whl'):
format = 'wheel'
pname, _, pyver, _, _ = file.split('-')
pname, version, _, _, _ = file.split('-')
with urllib.request.urlopen(f'https://pypi.org/pypi/{pname}/json') as f:
releasesForVersion = json.load(f)['releases'][version]
release = next(r for r in releasesForVersion if r['filename'] == file)
pyver = release['python_version']
# example: requests-2.26.0.tar.gz
else:
format = 'sdist'
@ -44,21 +51,18 @@ def main():
)
# create generic lock
# This translator is not aware of the exact dependency graph.
# This restricts us to use a single derivation builder later,
# which will install all packages at once
dream_lock = dict(
sources={},
generic={
"buildSystem": "python",
"mainPackage": None,
# This translator is not aware of the exact dependency graph.
# This restricts us to use a single derivation builder later,
# which will install all packages at once
"dependencyGraph": None,
"mainPackage": os.environ.get('MAIN'),
"sourcesCombinedHash": None,
},
buildSystem={
"main": jsonInput['main'],
"application": jsonInput['application'],
"pythonAttr": f"python{sys.version_info.major}{sys.version_info.minor}",
"sourceFormats":

View File

@ -1,6 +1,7 @@
{
bash,
coreutils,
fetchzip,
lib,
nix,
runCommand,
@ -13,11 +14,11 @@
let
b = builtins;
dreamLockUtils = callPackageDream ./dream-lock.nix {};
overrideUtils = callPackageDream ./override.nix {};
dreamLockUtils = callPackageDream ./dream-lock.nix {};
translatorUtils = callPackageDream ./translator.nix {};
in
@ -82,4 +83,25 @@ rec {
${coreutils}/bin/rm -rf $tmpdir
'';
extractSource =
{
source,
}:
runCommand "${source.name}-extracted"
{
inherit source;
}
# fetchzip can extract tarballs as well
''
if test -d $source; then
ln -s $source $out
else
ln -s \
${(fetchzip { url="file:${source}"; }).overrideAttrs (old: {
outputHash = null;
})} \
$out
fi
'';
}