Merge pull request #485 from nix-community/merge-v1

Init module system based dream2nix (towards V1 API)
This commit is contained in:
DavHau 2023-03-13 19:53:45 +07:00 committed by GitHub
commit 1d36c7f3ce
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
35 changed files with 2814 additions and 75 deletions

View File

@ -1,27 +1,5 @@
{ {
"nodes": { "nodes": {
"alejandra": {
"inputs": {
"fenix": "fenix",
"flakeCompat": "flakeCompat",
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1658427149,
"narHash": "sha256-ToD/1z/q5VHsLMrS2h96vjJoLho59eNRtknOUd19ey8=",
"owner": "kamadorueda",
"repo": "alejandra",
"rev": "f5a22afd2adfb249b4e68e0b33aa1f0fb73fb1be",
"type": "github"
},
"original": {
"owner": "kamadorueda",
"repo": "alejandra",
"type": "github"
}
},
"all-cabal-json": { "all-cabal-json": {
"flake": false, "flake": false,
"locked": { "locked": {
@ -71,25 +49,43 @@
"type": "github" "type": "github"
} }
}, },
"fenix": { "drv-parts": {
"inputs": { "inputs": {
"nixpkgs": [ "flake-compat": "flake-compat",
"alejandra", "flake-parts": [
"nixpkgs" "flake-parts"
], ],
"rust-analyzer-src": "rust-analyzer-src" "nixpkgs": [
"nixpkgs"
]
}, },
"locked": { "locked": {
"lastModified": 1657607339, "lastModified": 1678556855,
"narHash": "sha256-HaqoAwlbVVZH2n4P3jN2FFPMpVuhxDy1poNOR7kzODc=", "narHash": "sha256-uQ8WzJ8e5jYRYPIxjfNZIhTCdWAGzCAKsMHOkZ24do8=",
"owner": "nix-community", "owner": "davhau",
"repo": "fenix", "repo": "drv-parts",
"rev": "b814c83d9e6aa5a28d0cf356ecfdafb2505ad37d", "rev": "2be8135cff33e1f484f12c1548564dea50c769b1",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "nix-community", "owner": "davhau",
"repo": "fenix", "repo": "drv-parts",
"type": "github"
}
},
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1673956053,
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github" "type": "github"
} }
}, },
@ -128,22 +124,6 @@
"type": "github" "type": "github"
} }
}, },
"flakeCompat": {
"flake": false,
"locked": {
"lastModified": 1650374568,
"narHash": "sha256-Z+s0J8/r907g149rllvwhb4pKi8Wam5ij0st8PwAh+E=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "b4a34015c698c7793d592d66adbab377907a2be8",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"ghc-utils": { "ghc-utils": {
"flake": false, "flake": false,
"locked": { "locked": {
@ -222,6 +202,21 @@
"type": "indirect" "type": "indirect"
} }
}, },
"nixpkgsV1": {
"locked": {
"lastModified": 1678500271,
"narHash": "sha256-tRBLElf6f02HJGG0ZR7znMNFv/Uf7b2fFInpTHiHaSE=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "5eb98948b66de29f899c7fe27ae112a47964baf8",
"type": "github"
},
"original": {
"id": "nixpkgs",
"ref": "nixos-22.11",
"type": "indirect"
}
},
"poetry2nix": { "poetry2nix": {
"flake": false, "flake": false,
"locked": { "locked": {
@ -281,10 +276,10 @@
}, },
"root": { "root": {
"inputs": { "inputs": {
"alejandra": "alejandra",
"all-cabal-json": "all-cabal-json", "all-cabal-json": "all-cabal-json",
"crane": "crane", "crane": "crane",
"devshell": "devshell", "devshell": "devshell",
"drv-parts": "drv-parts",
"flake-parts": "flake-parts", "flake-parts": "flake-parts",
"flake-utils-pre-commit": "flake-utils-pre-commit", "flake-utils-pre-commit": "flake-utils-pre-commit",
"ghc-utils": "ghc-utils", "ghc-utils": "ghc-utils",
@ -292,27 +287,11 @@
"mach-nix": "mach-nix", "mach-nix": "mach-nix",
"nix-pypi-fetcher": "nix-pypi-fetcher", "nix-pypi-fetcher": "nix-pypi-fetcher",
"nixpkgs": "nixpkgs", "nixpkgs": "nixpkgs",
"nixpkgsV1": "nixpkgsV1",
"poetry2nix": "poetry2nix", "poetry2nix": "poetry2nix",
"pre-commit-hooks": "pre-commit-hooks", "pre-commit-hooks": "pre-commit-hooks",
"pruned-racket-catalog": "pruned-racket-catalog" "pruned-racket-catalog": "pruned-racket-catalog"
} }
},
"rust-analyzer-src": {
"flake": false,
"locked": {
"lastModified": 1657557289,
"narHash": "sha256-PRW+nUwuqNTRAEa83SfX+7g+g8nQ+2MMbasQ9nt6+UM=",
"owner": "rust-lang",
"repo": "rust-analyzer",
"rev": "caf23f29144b371035b864a1017dbc32573ad56d",
"type": "github"
},
"original": {
"owner": "rust-lang",
"ref": "nightly",
"repo": "rust-analyzer",
"type": "github"
}
} }
}, },
"root": "root", "root": "root",

View File

@ -8,13 +8,14 @@
inputs = { inputs = {
nixpkgs.url = "nixpkgs/nixos-unstable"; nixpkgs.url = "nixpkgs/nixos-unstable";
nixpkgsV1.url = "nixpkgs/nixos-22.11";
flake-parts.url = "github:hercules-ci/flake-parts"; flake-parts.url = "github:hercules-ci/flake-parts";
flake-parts.inputs.nixpkgs-lib.follows = "nixpkgs"; flake-parts.inputs.nixpkgs-lib.follows = "nixpkgs";
### dev dependencies drv-parts.url = "github:davhau/drv-parts";
alejandra.url = "github:kamadorueda/alejandra"; drv-parts.inputs.nixpkgs.follows = "nixpkgs";
alejandra.inputs.nixpkgs.follows = "nixpkgs"; drv-parts.inputs.flake-parts.follows = "flake-parts";
pre-commit-hooks.url = "github:cachix/pre-commit-hooks.nix"; pre-commit-hooks.url = "github:cachix/pre-commit-hooks.nix";
pre-commit-hooks.inputs.nixpkgs.follows = "nixpkgs"; pre-commit-hooks.inputs.nixpkgs.follows = "nixpkgs";
@ -77,7 +78,6 @@
outputs = { outputs = {
self, self,
alejandra,
devshell, devshell,
flake-parts, flake-parts,
gomod2nix, gomod2nix,
@ -187,7 +187,7 @@
format.type = "app"; format.type = "app";
format.program = let format.program = let
path = lib.makeBinPath [ path = lib.makeBinPath [
alejandra.defaultPackage.${system} pkgs.alejandra
pkgs.python3.pkgs.black pkgs.python3.pkgs.black
]; ];
in in
@ -219,7 +219,7 @@
devshell.name = "dream2nix-devshell"; devshell.name = "dream2nix-devshell";
packages = [ packages = [
alejandra.defaultPackage.${system} pkgs.alejandra
pkgs.python3.pkgs.black pkgs.python3.pkgs.black
]; ];
@ -281,7 +281,7 @@
pass_filenames = false; pass_filenames = false;
entry = l.toString (pkgs.writeScript "treefmt" '' entry = l.toString (pkgs.writeScript "treefmt" ''
#!${pkgs.bash}/bin/bash #!${pkgs.bash}/bin/bash
export PATH="$PATH:${alejandra.defaultPackage.${system}}/bin" export PATH="$PATH:${pkgs.alejandra}/bin"
${pkgs.treefmt}/bin/treefmt --clear-cache --fail-on-change ${pkgs.treefmt}/bin/treefmt --clear-cache --fail-on-change
''); '');
}; };
@ -342,6 +342,7 @@
imports = [ imports = [
./tests ./tests
./templates ./templates
./v1/nix/modules/flake-parts/all-modules.nix
]; ];
systems = [ systems = [
"x86_64-linux" "x86_64-linux"

View File

@ -0,0 +1,10 @@
{
config,
lib,
...
}: let
l = lib // builtins;
in {
config = {
};
}

View File

@ -0,0 +1,11 @@
{
config,
lib,
...
}: let
l = lib // builtins;
t = l.types;
in {
options = {
};
}

View File

@ -0,0 +1,38 @@
{
config,
lib,
...
}: let
l = lib // builtins;
t = l.types;
# Attributes we never want to copy from nixpkgs
excludedNixpkgsAttrs =
l.genAttrs
[
"all"
"args"
"builder"
"name"
"pname"
"version"
"src"
"outputs"
]
(name: null);
extractOverrideAttrs = overrideFunc:
(overrideFunc (old: {passthru.old = old;}))
.old;
extractPythonAttrs = pythonPackage: let
pythonAttrs = extractOverrideAttrs pythonPackage.overridePythonAttrs;
in
l.filterAttrs (name: _: ! excludedNixpkgsAttrs ? ${name}) pythonAttrs;
in {
imports = [
./interface.nix
];
config.attrs-from-nixpkgs.lib = {inherit extractOverrideAttrs extractPythonAttrs;};
}

View File

@ -0,0 +1,27 @@
{
config,
lib,
...
}: let
l = lib // builtins;
t = l.types;
in {
options.attrs-from-nixpkgs = {
lib.extractOverrideAttrs = l.mkOption {
type = t.functionTo t.attrs;
description = ''
Helper function to extract attrs from nixpkgs to be re-used as overrides.
'';
readOnly = true;
};
# Extracts derivation args from a nixpkgs python package.
lib.extractPythonAttrs = l.mkOption {
type = t.functionTo t.attrs;
description = ''
Helper function to extract python attrs from nixpkgs to be re-used as overrides.
'';
readOnly = true;
};
};
}

View File

@ -0,0 +1,6 @@
{
imports = [
./implementation.nix
./interface.nix
];
}

View File

@ -0,0 +1,16 @@
{
config,
lib,
...
}: let
l = lib // builtins;
in {
config = {
package-func.func = config.deps.python.pkgs.buildPythonPackage;
package-func.args = config.buildPythonPackage;
deps = {nixpkgs, ...}: {
python = l.mkOptionDefault nixpkgs.python3;
};
};
}

View File

@ -0,0 +1,131 @@
{
config,
lib,
...
}: let
l = lib // builtins;
t = l.types;
boolOpt = l.mkOption {
type = t.bool;
default = false;
};
in {
options.deps.python = l.mkOption {
type = t.package;
description = "The python interpreter package to use";
};
options.buildPythonPackage = {
disabled =
boolOpt
// {
description = ''
used to disable derivation, useful for specific python versions
'';
};
catchConflicts =
boolOpt
// {
description = ''
Raise an error if two packages are installed with the same name
TODO: For cross we probably need a different PYTHONPATH, or not
add the runtime deps until after buildPhase.
'';
default =
config.deps.python.stdenv.hostPlatform
== config.deps.python.stdenv.buildPlatform;
};
dontWrapPythonPrograms =
boolOpt
// {
description = ''
Skip wrapping of python programs altogether
'';
};
dontUsePipInstall =
boolOpt
// {
description = ''
Don't use Pip to install a wheel
Note this is actually a variable for the pipInstallPhase in pip's setupHook.
It's included here to prevent an infinite recursion.
'';
};
permitUserSite =
boolOpt
// {
description = ''
Skip setting the PYTHONNOUSERSITE environment variable in wrapped programs
'';
};
removeBinBytecode =
boolOpt
// {
default = true;
decsription = ''
Remove bytecode from bin folder.
When a Python script has the extension `.py`, bytecode is generated
Typically, executables in bin have no extension, so no bytecode is generated.
However, some packages do provide executables with extensions, and thus bytecode is generated.
'';
};
format = l.mkOption {
type = t.str;
default = "setuptools";
description = ''
Several package formats are supported:
"setuptools" : Install a common setuptools/distutils based package. This builds a wheel.
"wheel" : Install from a pre-compiled wheel.
"flit" : Install a flit package. This builds a wheel.
"pyproject": Install a package using a ``pyproject.toml`` file (PEP517). This builds a wheel.
"egg": Install a package from an egg.
"other" : Provide your own buildPhase and installPhase.
'';
};
disabledTestPaths = l.mkOption {
type = t.listOf t.anything;
default = [];
description = ''
Test paths to ignore in checkPhase
'';
};
# previously only set via env
disabledTests = l.mkOption {
type = t.listOf t.str;
default = [];
description = ''
Disable running specific unit tests
'';
};
pytestFlagsArray = l.mkOption {
type = t.listOf t.str;
default = [];
description = ''
Extra flags passed to pytest
'';
};
pipInstallFlags = l.mkOption {
type = t.listOf t.str;
default = [];
description = ''
Extra flags passed to `pip install`
'';
};
pythonImportsCheck = l.mkOption {
type = t.listOf t.str;
default = [];
description = ''
Check whether importing the listed modules works
'';
};
};
}

View File

@ -0,0 +1,135 @@
{
config,
lib,
...
}: let
l = lib // builtins;
cfg = config.eval-cache;
packageName = config.public.name;
filterTrue = l.filterAttrsRecursive (key: val: l.isAttrs val || val == true);
invalidationFields = filterTrue cfg.invalidationFields;
intersectAttrsRecursive = a: b:
l.mapAttrs
(
key: valB:
if l.isAttrs valB && l.isAttrs a.${key}
then intersectAttrsRecursive a.${key} valB
else valB
)
(l.intersectAttrs a b);
invalidationData = intersectAttrsRecursive invalidationFields config;
invalidationHash =
l.hashString "sha256"
(l.toJSON [invalidationData cfg.fields]);
fields = filterTrue cfg.fields;
# SAVE
currentContent = intersectAttrsRecursive fields config;
newCache = {
inherit invalidationHash;
content = currentContent;
};
newFile' = config.deps.writeText "cache.json" (l.toJSON newCache);
newFile = config.deps.runCommand "cache.json" {} ''
cat ${newFile'} | ${config.deps.jq}/bin/jq > $out
'';
# LOAD
file = cfg.repoRoot + cfg.cacheFileRel;
refreshCommand =
l.unsafeDiscardStringContext
"cat $(nix-build ${cfg.newFile.drvPath} --no-link) > $(git rev-parse --show-toplevel)/${cfg.cacheFileRel}";
newFileMsg = "To generate a new cache file, execute:\n ${refreshCommand}";
ifdInfoMsg = "Information on how to fix this is shown below if evaluated with `--allow-import-from-derivation`";
cacheMissingMsg = "The cache file ${cfg.cacheFileRel} for drv-parts module '${packageName}' doesn't exist, please create it.";
cacheMissingError =
l.trace ''
${"\n"}
${cacheMissingMsg}
${ifdInfoMsg}
''
l.trace ''
${"\n"}
${newFileMsg}
'';
cacheInvalidMsg = "The cache file ${cfg.cacheFileRel} for drv-parts module '${packageName}' is outdated, please update it.";
cacheInvalidError =
l.trace ''
${"\n"}
${cacheInvalidMsg}
${ifdInfoMsg}
''
l.trace ''
${"\n"}
${newFileMsg}
'';
cachePrio = l.modules.defaultPriority + 1;
mapCachePrio = l.mapAttrs (key: val: l.mkOverride cachePrio val);
cache = l.fromJSON (l.readFile file);
cacheFileExists = l.pathExists file;
cacheFileValid = cache.invalidationHash == invalidationHash;
# Return either the content from cache.json, or if it's invalid or missing,
# use the content without going through the cache.
loadedContent =
if ! cacheFileExists
then cacheMissingError currentContent
else if ! cacheFileValid
then cacheInvalidError currentContent
else mapCachePrio cache.content;
configIfEnabled = l.mkIf (cfg.enable) {
eval-cache = {
inherit
newFile
;
refresh =
config.deps.writeScript
"refresh-${config.public.name}"
refreshCommand;
};
eval-cache.content = loadedContent;
deps = {nixpkgs, ...}: {
inherit
(nixpkgs)
jq
runCommand
writeText
writeScript
;
};
};
configIfDisabled = l.mkIf (! cfg.enable) {
eval-cache.content = currentContent;
};
in {
imports = [
./interface.nix
];
config = l.mkMerge [configIfEnabled configIfDisabled];
}

View File

@ -0,0 +1,69 @@
{
config,
lib,
...
}: let
l = lib // builtins;
t = l.types;
in {
options.eval-cache = {
# GLOBAL OPTIONS
repoRoot = l.mkOption {
type = t.path;
description = "The root of the current repo. Eg. 'self' in a flake";
example = lib.literalExpression ''
self
'';
};
cacheFileRel = l.mkOption {
type = t.str;
description = "Location of the cache file relative to the repoRoot";
example = lib.literalExpression ''
/rel/path/to/my/package/cache.json
'';
};
# LOCAL OPTIONS
enable = l.mkEnableOption "the evaluation cache for this derivation";
content = l.mkOption {
type = t.submodule {
freeformType = t.anything;
};
};
invalidationFields = l.mkOption rec {
type = t.attrsOf (t.oneOf [t.bool type]);
description = "Fields, when changed, require refreshing the cache";
default = {};
example = {
src = true;
};
};
fields = l.mkOption rec {
type = t.attrsOf (t.oneOf [t.bool type]);
description = "Fields for which to cache evaluation";
default = {};
example = {
pname = true;
version = true;
};
};
# INTERNAL OPTIONS
newFile = l.mkOption {
type = t.path;
description = "Cache file generated from the current inputs";
internal = true;
readOnly = true;
};
refresh = l.mkOption {
type = t.path;
description = "Script to refresh the cache file of this package";
readOnly = true;
};
};
}

View File

@ -0,0 +1,220 @@
{
config,
lib,
drv-parts,
...
}: let
l = lib // builtins;
python = config.deps.python;
cfg = config.mach-nix;
packageName = config.public.name;
# For a given name, return the path containing the downloaded file
getDistDir = name: "${cfg.pythonSources.names}/${name}";
# (IFD) Get the dist file for a given name by looking inside (pythonSources)
getDistFile = name: let
distDir = getDistDir name;
distFile = l.head (l.attrNames (builtins.readDir distDir));
in "${distDir}/${distFile}";
# Extract the version from a dist's file name
getVersion = file: let
base = l.pipe file [
(l.removeSuffix ".tgx")
(l.removeSuffix ".tar.gz")
(l.removeSuffix ".zip")
(l.removeSuffix ".whl")
];
version = l.last (l.splitString "-" base);
in
version;
# (IFD) For each dist we need to recall:
# - the type (wheel or sdist)
# - the version (only needed for sdist, so we can build a wheel)
getDistInfo = name: let
file = getDistFile name;
in
if l.hasSuffix ".whl" file
then "wheel"
else getVersion file;
preparedWheels = let
filterAttrs = l.flip l.filterAttrs;
mapAttrs = l.flip l.mapAttrs;
distInfos = config.eval-cache.content.mach-nix.dists;
# Validate Substitutions. Allow only names that we actually depend on.
unknownSubstitutions = l.attrNames (l.removeAttrs cfg.substitutions (l.attrNames distInfos));
substitutions =
if unknownSubstitutions == []
then cfg.substitutions
else
throw ''
${"\n"}The following substitutions for python derivation '${packageName}' will not have any effect. There are no dependencies with such names:
- ${lib.concatStringsSep "\n - " unknownSubstitutions}
'';
# separate 2 types of downloaded files: sdist, wheel
# key: name; val: {version or "wheel"}
wheelInfos = filterAttrs distInfos (name: ver: ver == "wheel");
sdistInfos = filterAttrs distInfos (name: ver: ! wheelInfos ? ${name});
# get the paths of all downloaded wheels
downloadedWheels = mapAttrs wheelInfos (name: ver: getDistDir name);
# Only build sdists which are not substituted via config.substitutions and which aren't the toplevel
# package.
sdistsToBuild = filterAttrs sdistInfos (name: ver: (! substitutions ? ${name}) && name != packageName);
builtWheels = mapAttrs sdistsToBuild (name: ver: mkWheelDist name ver (getDistDir name));
# Usually references to buildInputs would get lost in the dist output.
# Patch wheels to ensure build inputs remain dependencies of the `dist` output
# Those references are needed for the final autoPatchelfHook to find the required deps.
patchedWheels = mapAttrs substitutions (name: dist: dist.overridePythonAttrs (old: {postFixup = "ln -s $out $dist/out";}));
in {inherit patchedWheels downloadedWheels builtWheels;};
# The final dists we want to install.
# A mix of:
# - downloaded wheels
# - downloaded sdists built into wheels (see above)
# - substitutions from nixpkgs patched for compat with autoPatchelfHook
finalDistsPaths =
preparedWheels.downloadedWheels // (l.mapAttrs (_: drv: drv.public.out.dist) config.mach-nix.drvs);
# build a wheel for a given sdist
mkWheelDist = name: version: distDir: let
manualSetupDeps =
lib.mapAttrs
(name: deps: map (dep: finalDistsPaths.${dep}) deps)
cfg.manualSetupDeps;
in
{config, ...}: {
imports = [
drv-parts.modules.drv-parts.mkDerivation
../buildPythonPackage
./interface.nix
../eval-cache
../nixpkgs-overrides
];
config = {
nixpkgs-overrides.enable = true;
deps = {nixpkgs, ...}:
l.mapAttrs (_: l.mkDefault) {
inherit python;
inherit
(nixpkgs)
autoPatchelfHook
stdenv
;
};
public = {
inherit name version;
};
buildPythonPackage = {
format = "setuptools";
pipInstallFlags =
(map (distDir: "--find-links ${distDir}") manualSetupDeps.${name} or [])
++ (
map (dep: "--find-links ${finalDistsPaths.${dep}}")
config.eval-cache.content.mach-nix.dependencyTree.${name} or []
);
};
mkDerivation = {
# distDir will contain a single file which is the src
preUnpack = ''export src="${distDir}"/*'';
nativeBuildInputs = [config.deps.autoPatchelfHook];
# ensure build inputs are propagated for autopPatchelfHook
postFixup = "ln -s $out $dist/out";
};
# TODO If setup deps have been specified manually, we need to remove the
# propagatedBuildInputs from nixpkgs to prevent collisions.
#// lib.optionalAttrs (manualSetupDeps ? ${name}) {
# propagatedBuildInputs = [];
#};
};
};
dependenciesFile = "${cfg.pythonSources}/dependencies.json";
dependencyTree = l.fromJSON (l.readFile dependenciesFile);
makeModuleFromDerivation = _name: drv:
drv-parts.lib.makeModule {
packageFunc = drv;
# TODO: if `overridePythonAttrs` is used here, the .dist output is missing
# Maybe a bug in drv-parts?
overrideFuncName = "overrideAttrs";
modules = [
{deps = {inherit (config.deps) stdenv;};}
];
};
in {
imports = [
drv-parts.modules.drv-parts.mkDerivation
../buildPythonPackage
./interface.nix
../eval-cache
];
config = {
mach-nix.drvs = (l.mapAttrs makeModuleFromDerivation preparedWheels.patchedWheels) // preparedWheels.builtWheels;
mach-nix.dists =
l.mapAttrs
(name: _: getDistInfo name)
(l.readDir cfg.pythonSources.names);
mach-nix.dependencyTree = dependencyTree;
deps = {nixpkgs, ...}:
l.mapAttrs (_: l.mkDefault) {
inherit
(nixpkgs)
autoPatchelfHook
stdenv
;
manylinuxPackages = nixpkgs.pythonManylinuxPackages.manylinux1;
fetchPythonRequirements = nixpkgs.callPackage ../../../pkgs/fetchPythonRequirements {};
runCommand = nixpkgs.runCommand;
pip = nixpkgs.python3Packages.pip;
};
eval-cache.fields = {
mach-nix.dists = true;
mach-nix.dependencyTree = true;
};
eval-cache.invalidationFields = {
mach-nix.pythonSources = true;
};
buildPythonPackage = {
pipInstallFlags =
["--ignore-installed"]
++ (
map (distDir: "--find-links ${distDir}")
(l.attrValues finalDistsPaths)
);
};
mkDerivation = {
doCheck = false;
dontPatchELF = l.mkDefault true;
dontStrip = l.mkDefault true;
nativeBuildInputs = [
config.deps.autoPatchelfHook
];
buildInputs = with config.deps; [
manylinuxPackages
];
passthru = {
inherit (config.mach-nix) pythonSources;
dists = finalDistsPaths;
};
};
};
}

View File

@ -0,0 +1,73 @@
{
config,
lib,
drv-parts,
dependencySets,
...
}: let
l = lib // builtins;
t = l.types;
in {
options.mach-nix = {
pythonSources = l.mkOption {
type = t.package;
description = ''
A package that contains fetched python sources.
Each single python source must be located in a subdirectory named after the package name.
'';
};
substitutions = l.mkOption {
type = t.lazyAttrsOf t.package;
description = ''
Substitute individual python packages from nixpkgs.
'';
default = {};
};
manualSetupDeps = l.mkOption {
type = t.lazyAttrsOf (t.listOf t.str);
description = ''
Replace the default setup dependencies from nixpkgs for sdist based builds
'';
default = {};
example = {
vobject = [
"python-dateutil"
"six"
];
libsass = [
"six"
];
};
};
drvs = l.mkOption {
type = t.attrsOf (t.submoduleWith {
modules = [drv-parts.modules.drv-parts.core];
specialArgs = {inherit dependencySets;};
});
description = "drv-parts modules that define python dependencies";
};
# INTERNAL
dists = l.mkOption {
type = t.lazyAttrsOf t.str;
description = ''
Attrs which depend on IFD and therefore should be cached
'';
internal = true;
readOnly = true;
};
dependencyTree = l.mkOption {
type = t.lazyAttrsOf t.anything;
description = ''
Dependency tree of the python environment
'';
internal = true;
readOnly = true;
};
};
}

View File

@ -0,0 +1,43 @@
{
config,
lib,
options,
...
}: let
l = lib // builtins;
cfg = config.nixpkgs-overrides;
# Attributes we never want to copy from nixpkgs
excludedNixpkgsAttrs =
l.genAttrs
[
"all"
"args"
"builder"
"name"
"pname"
"version"
"src"
"outputs"
]
(name: null);
extractOverrideAttrs = overrideFunc:
(overrideFunc (old: {passthru.old = old;}))
.old;
extractPythonAttrs = pythonPackage: let
pythonAttrs = extractOverrideAttrs pythonPackage.overridePythonAttrs;
in
l.filterAttrs (name: _: ! excludedNixpkgsAttrs ? ${name}) pythonAttrs;
extracted = extractPythonAttrs config.deps.python.pkgs.${config.public.name};
in {
imports = [
./interface.nix
];
config = l.mkIf cfg.enable {
package-func.args = extracted;
};
}

View File

@ -0,0 +1,12 @@
{
config,
lib,
...
}: let
l = lib // builtins;
t = l.types;
in {
options.nixpkgs-overrides = {
enable = l.mkEnableOption "Whether to apply override from nixpkgs";
};
}

View File

@ -0,0 +1,54 @@
{
"content": {
"mach-nix": {
"dependencyTree": {
"ansible": [
"cryptography",
"jinja2",
"paramiko",
"pyyaml",
"setuptools"
],
"bcrypt": [],
"cffi": [
"pycparser"
],
"cryptography": [
"cffi"
],
"jinja2": [
"markupsafe"
],
"markupsafe": [],
"paramiko": [
"bcrypt",
"cryptography",
"pynacl",
"six"
],
"pycparser": [],
"pynacl": [
"cffi"
],
"pyyaml": [],
"setuptools": [],
"six": []
},
"dists": {
"ansible": "2.7.1",
"bcrypt": "wheel",
"cffi": "wheel",
"cryptography": "wheel",
"jinja2": "wheel",
"markupsafe": "wheel",
"paramiko": "wheel",
"pycparser": "wheel",
"pynacl": "wheel",
"pyyaml": "wheel",
"setuptools": "wheel",
"six": "wheel"
}
}
},
"invalidationHash": "89487ce745f0e952fb4a67fa51cf4866663de7a57bb3067ed00c2137a80bd501"
}

View File

@ -0,0 +1,43 @@
{
config,
lib,
...
}: let
l = lib // builtins;
python = config.deps.python;
in {
imports = [
../../drv-parts/mach-nix-xs
];
deps = {nixpkgs, ...}: {
python = nixpkgs.python39;
};
public = {
name = "ansible";
version = "2.7.1";
};
mkDerivation = {
preUnpack = ''
export src=$(ls ${config.mach-nix.pythonSources}/names/${config.public.name}/*);
'';
};
buildPythonPackage = {
format = "setuptools";
pythonImportsCheck = [
config.public.name
];
};
mach-nix.pythonSources = config.deps.fetchPythonRequirements {
inherit python;
name = config.public.name;
requirementsList = ["${config.public.name}==${config.public.version}"];
hash = "sha256-dCo1llHcCiFrBOEd6mWhwqwVglsN2grSbcdBj8OzKDY=";
maxDate = "2023-01-01";
};
}

View File

@ -0,0 +1,449 @@
{
"content": {
"mach-nix": {
"dependencyTree": {
"alembic": [
"mako",
"sqlalchemy"
],
"anyio": [
"idna",
"sniffio"
],
"apache-airflow": [
"alembic",
"apache-airflow-providers-common-sql",
"apache-airflow-providers-ftp",
"apache-airflow-providers-http",
"apache-airflow-providers-imap",
"apache-airflow-providers-sqlite",
"argcomplete",
"attrs",
"blinker",
"cattrs",
"colorlog",
"configupdater",
"connexion",
"cron-descriptor",
"croniter",
"cryptography",
"deprecated",
"dill",
"flask",
"flask-appbuilder",
"flask-caching",
"flask-login",
"flask-session",
"flask-wtf",
"graphviz",
"gunicorn",
"httpx",
"itsdangerous",
"jinja2",
"jsonschema",
"lazy-object-proxy",
"linkify-it-py",
"lockfile",
"markdown",
"markdown-it-py",
"markupsafe",
"marshmallow-oneofschema",
"mdit-py-plugins",
"packaging",
"pathspec",
"pendulum",
"pluggy",
"psutil",
"pygments",
"pyjwt",
"python-daemon",
"python-dateutil",
"python-nvd3",
"python-slugify",
"rich",
"setproctitle",
"sqlalchemy",
"sqlalchemy-jsonfield",
"tabulate",
"tenacity",
"termcolor",
"typing-extensions",
"unicodecsv",
"werkzeug"
],
"apache-airflow-providers-common-sql": [
"sqlparse"
],
"apache-airflow-providers-ftp": [],
"apache-airflow-providers-http": [
"requests",
"requests-toolbelt"
],
"apache-airflow-providers-imap": [],
"apache-airflow-providers-sqlite": [
"apache-airflow-providers-common-sql"
],
"apispec": [],
"argcomplete": [],
"attrs": [],
"babel": [
"pytz"
],
"blinker": [],
"cachelib": [],
"cattrs": [
"attrs",
"exceptiongroup"
],
"certifi": [],
"cffi": [
"pycparser"
],
"charset-normalizer": [],
"click": [],
"clickclick": [
"click",
"pyyaml"
],
"colorama": [],
"colorlog": [],
"commonmark": [],
"configupdater": [],
"connexion": [
"clickclick",
"flask",
"inflection",
"itsdangerous",
"jsonschema",
"packaging",
"pyyaml",
"requests",
"werkzeug"
],
"cron-descriptor": [],
"croniter": [
"python-dateutil"
],
"cryptography": [
"cffi"
],
"deprecated": [
"wrapt"
],
"dill": [],
"dnspython": [],
"docutils": [],
"email-validator": [
"dnspython",
"idna"
],
"exceptiongroup": [],
"flask": [
"click",
"itsdangerous",
"jinja2",
"werkzeug"
],
"flask-appbuilder": [
"apispec",
"click",
"colorama",
"email-validator",
"flask",
"flask-babel",
"flask-jwt-extended",
"flask-login",
"flask-sqlalchemy",
"flask-wtf",
"jsonschema",
"marshmallow",
"marshmallow-enum",
"marshmallow-sqlalchemy",
"prison",
"pyjwt",
"python-dateutil",
"sqlalchemy",
"sqlalchemy-utils",
"wtforms"
],
"flask-babel": [
"babel",
"flask",
"jinja2",
"pytz"
],
"flask-caching": [
"cachelib",
"flask"
],
"flask-jwt-extended": [
"flask",
"pyjwt",
"werkzeug"
],
"flask-login": [
"flask",
"werkzeug"
],
"flask-session": [
"cachelib",
"flask"
],
"flask-sqlalchemy": [
"flask",
"sqlalchemy"
],
"flask-wtf": [
"flask",
"itsdangerous",
"wtforms"
],
"graphviz": [],
"greenlet": [],
"gunicorn": [
"setuptools"
],
"h11": [],
"httpcore": [
"anyio",
"certifi",
"h11",
"sniffio"
],
"httpx": [
"certifi",
"httpcore",
"rfc3986",
"sniffio"
],
"idna": [],
"inflection": [],
"itsdangerous": [],
"jinja2": [
"markupsafe"
],
"jsonschema": [
"attrs",
"pyrsistent"
],
"lazy-object-proxy": [],
"linkify-it-py": [
"uc-micro-py"
],
"lockfile": [],
"mako": [
"markupsafe"
],
"markdown": [],
"markdown-it-py": [
"mdurl"
],
"markupsafe": [],
"marshmallow": [
"packaging"
],
"marshmallow-enum": [
"marshmallow"
],
"marshmallow-oneofschema": [
"marshmallow"
],
"marshmallow-sqlalchemy": [
"marshmallow",
"sqlalchemy"
],
"mdit-py-plugins": [
"markdown-it-py"
],
"mdurl": [],
"packaging": [],
"pathspec": [],
"pendulum": [
"python-dateutil",
"pytzdata"
],
"pluggy": [],
"prison": [
"six"
],
"psutil": [],
"pycparser": [],
"pygments": [],
"pyjwt": [],
"pyrsistent": [],
"python-daemon": [
"docutils",
"lockfile",
"setuptools"
],
"python-dateutil": [
"six"
],
"python-nvd3": [],
"python-slugify": [
"text-unidecode"
],
"pytz": [],
"pytzdata": [],
"pyyaml": [],
"requests": [
"certifi",
"charset-normalizer",
"idna",
"urllib3"
],
"requests-toolbelt": [
"requests"
],
"rfc3986": [],
"rich": [
"commonmark",
"pygments"
],
"setproctitle": [],
"setuptools": [],
"six": [],
"sniffio": [],
"sqlalchemy": [
"greenlet"
],
"sqlalchemy-jsonfield": [
"sqlalchemy"
],
"sqlalchemy-utils": [
"sqlalchemy"
],
"sqlparse": [],
"swagger-ui-bundle": [
"jinja2"
],
"tabulate": [],
"tenacity": [],
"termcolor": [],
"text-unidecode": [],
"typing-extensions": [],
"uc-micro-py": [],
"unicodecsv": [],
"urllib3": [],
"werkzeug": [
"markupsafe"
],
"wrapt": [],
"wtforms": [
"markupsafe"
]
},
"dists": {
"alembic": "wheel",
"anyio": "wheel",
"apache-airflow": "wheel",
"apache-airflow-providers-common-sql": "wheel",
"apache-airflow-providers-ftp": "wheel",
"apache-airflow-providers-http": "wheel",
"apache-airflow-providers-imap": "wheel",
"apache-airflow-providers-sqlite": "wheel",
"apispec": "wheel",
"argcomplete": "wheel",
"attrs": "wheel",
"babel": "wheel",
"blinker": "wheel",
"cachelib": "wheel",
"cattrs": "wheel",
"certifi": "wheel",
"cffi": "wheel",
"charset-normalizer": "wheel",
"click": "wheel",
"clickclick": "wheel",
"colorama": "wheel",
"colorlog": "wheel",
"commonmark": "wheel",
"configupdater": "wheel",
"connexion": "wheel",
"cron-descriptor": "1.2.32",
"croniter": "wheel",
"cryptography": "wheel",
"deprecated": "wheel",
"dill": "wheel",
"dnspython": "wheel",
"docutils": "wheel",
"email-validator": "wheel",
"exceptiongroup": "wheel",
"flask": "wheel",
"flask-appbuilder": "wheel",
"flask-babel": "wheel",
"flask-caching": "wheel",
"flask-jwt-extended": "wheel",
"flask-login": "wheel",
"flask-session": "wheel",
"flask-sqlalchemy": "wheel",
"flask-wtf": "wheel",
"graphviz": "wheel",
"greenlet": "wheel",
"gunicorn": "wheel",
"h11": "wheel",
"httpcore": "wheel",
"httpx": "wheel",
"idna": "wheel",
"inflection": "wheel",
"itsdangerous": "wheel",
"jinja2": "wheel",
"jsonschema": "wheel",
"lazy-object-proxy": "1.8.0",
"linkify-it-py": "wheel",
"lockfile": "wheel",
"mako": "wheel",
"markdown": "wheel",
"markdown-it-py": "wheel",
"markupsafe": "wheel",
"marshmallow": "wheel",
"marshmallow-enum": "wheel",
"marshmallow-oneofschema": "wheel",
"marshmallow-sqlalchemy": "wheel",
"mdit-py-plugins": "wheel",
"mdurl": "wheel",
"packaging": "wheel",
"pathspec": "wheel",
"pendulum": "2.1.2",
"pluggy": "wheel",
"prison": "wheel",
"psutil": "wheel",
"pycparser": "wheel",
"pygments": "wheel",
"pyjwt": "wheel",
"pyrsistent": "wheel",
"python-daemon": "wheel",
"python-dateutil": "wheel",
"python-nvd3": "0.15.0",
"python-slugify": "wheel",
"pytz": "wheel",
"pytzdata": "wheel",
"pyyaml": "wheel",
"requests": "wheel",
"requests-toolbelt": "wheel",
"rfc3986": "wheel",
"rich": "wheel",
"setproctitle": "wheel",
"setuptools": "wheel",
"six": "wheel",
"sniffio": "wheel",
"sqlalchemy": "wheel",
"sqlalchemy-jsonfield": "wheel",
"sqlalchemy-utils": "wheel",
"sqlparse": "wheel",
"swagger-ui-bundle": "wheel",
"tabulate": "wheel",
"tenacity": "wheel",
"termcolor": "wheel",
"text-unidecode": "wheel",
"typing-extensions": "wheel",
"uc-micro-py": "wheel",
"unicodecsv": "0.14.1",
"urllib3": "wheel",
"werkzeug": "wheel",
"wrapt": "wheel",
"wtforms": "wheel"
}
}
},
"invalidationHash": "5c4ce66375d559fafc21946cf72047315b17d8c1fe64881076fcf939a3626e18"
}

View File

@ -0,0 +1,94 @@
{
config,
lib,
drv-parts,
...
}: let
l = lib // builtins;
python = config.deps.python;
extractPythonAttrs = config.attrs-from-nixpkgs.lib.extractPythonAttrs;
nixpkgsAttrs = extractPythonAttrs python.pkgs.apache-airflow;
in {
imports = [
../../drv-parts/mach-nix-xs
../../drv-parts/attrs-from-nixpkgs
];
deps = {
nixpkgs,
nixpkgsStable,
...
}: {
inherit
(nixpkgs)
git
fetchFromGitHub
;
};
public = {
name = "apache-airflow";
version = "2.5.0";
};
mkDerivation = {
src = config.deps.fetchFromGitHub {
owner = "apache";
repo = "airflow";
rev = "refs/tags/${config.public.version}";
# Download using the git protocol rather than using tarballs, because the
# GitHub archive tarballs don't appear to include tests
forceFetchGit = true;
hash = "sha256-QWUXSG+RSHkF5kP1ZYtx+tHjO0n7hfya9CFA3lBhJHk=";
};
nativeBuildInputs = [
python.pkgs.GitPython
];
inherit
(nixpkgsAttrs)
buildInputs
checkInputs
postPatch
postInstall
preCheck
;
};
mach-nix.pythonSources = config.deps.fetchPythonRequirements {
inherit (config.deps) python;
name = config.public.name;
requirementsList = [
"apache-airflow"
];
hash = "sha256-o5Gu069nB54/cI1muPfzrMX4m/Nm+pPOu1nUarNqeHM=";
maxDate = "2023-01-01";
};
# Replace some python packages entirely with candidates from nixpkgs, because
# they are hard to fix
mach-nix.substitutions = {
cron-descriptor = python.pkgs.cron-descriptor;
python-nvd3 = python.pkgs.python-nvd3;
pendulum = python.pkgs.pendulum;
};
env = {
inherit
(nixpkgsAttrs)
INSTALL_PROVIDERS_FROM_SOURCES
makeWrapperArgs
;
};
buildPythonPackage = {
inherit
(nixpkgsAttrs)
disabledTests
pythonImportsCheck
pytestFlagsArray
;
};
}

View File

@ -0,0 +1,182 @@
{
"content": {
"mach-nix": {
"dependencyTree": {
"appdirs": [],
"attrs": [],
"babel": [
"pytz"
],
"beautifulsoup4": [
"soupsieve"
],
"cached-property": [],
"certifi": [],
"cffi": [
"pycparser"
],
"chardet": [],
"cryptography": [
"cffi"
],
"decorator": [],
"defusedxml": [],
"docutils": [],
"ebaysdk": [],
"freezegun": [
"python-dateutil",
"six"
],
"gevent": [
"greenlet",
"setuptools",
"zope-event",
"zope-interface"
],
"greenlet": [],
"idna": [],
"isodate": [
"six"
],
"jinja2": [
"markupsafe"
],
"libsass": [],
"lxml": [],
"markupsafe": [],
"num2words": [],
"ofxparse": [],
"passlib": [],
"pillow": [],
"polib": [],
"psutil": [],
"psycopg2": [],
"pyasn1": [],
"pyasn1-modules": [
"pyasn1"
],
"pycparser": [],
"pydot": [
"pyparsing"
],
"pyopenssl": [
"cryptography",
"six"
],
"pyparsing": [],
"pypdf2": [],
"pyserial": [],
"python-dateutil": [
"six"
],
"python-ldap": [],
"python-stdnum": [],
"pytz": [],
"pyusb": [],
"qrcode": [
"six"
],
"reportlab": [
"pillow"
],
"requests": [
"certifi",
"chardet",
"idna",
"urllib3"
],
"requests-toolbelt": [
"requests"
],
"setuptools": [],
"six": [],
"soupsieve": [],
"urllib3": [],
"vobject": [],
"werkzeug": [],
"xlrd": [],
"xlsxwriter": [],
"xlwt": [],
"zeep": [
"appdirs",
"attrs",
"cached-property",
"defusedxml",
"isodate",
"lxml",
"pytz",
"requests",
"requests-toolbelt",
"six"
],
"zope-event": [
"setuptools"
],
"zope-interface": [
"setuptools"
]
},
"dists": {
"appdirs": "wheel",
"attrs": "wheel",
"babel": "wheel",
"beautifulsoup4": "wheel",
"cached-property": "wheel",
"certifi": "wheel",
"cffi": "wheel",
"chardet": "wheel",
"cryptography": "wheel",
"decorator": "wheel",
"defusedxml": "wheel",
"docutils": "wheel",
"ebaysdk": "2.1.5",
"freezegun": "wheel",
"gevent": "wheel",
"greenlet": "wheel",
"idna": "wheel",
"isodate": "wheel",
"jinja2": "wheel",
"libsass": "0.18.0",
"lxml": "wheel",
"markupsafe": "1.1.0",
"num2words": "wheel",
"ofxparse": "0.19",
"passlib": "wheel",
"pillow": "wheel",
"polib": "wheel",
"psutil": "5.6.7",
"psycopg2": "2.8.6",
"pyasn1": "wheel",
"pyasn1-modules": "wheel",
"pycparser": "wheel",
"pydot": "wheel",
"pyopenssl": "wheel",
"pyparsing": "wheel",
"pypdf2": "1.26.0",
"pyserial": "wheel",
"python-dateutil": "wheel",
"python-ldap": "3.4.0",
"python-stdnum": "wheel",
"pytz": "wheel",
"pyusb": "1.0.2",
"qrcode": "wheel",
"reportlab": "wheel",
"requests": "wheel",
"requests-toolbelt": "wheel",
"setuptools": "wheel",
"six": "wheel",
"soupsieve": "wheel",
"urllib3": "wheel",
"vobject": "0.9.6.1",
"werkzeug": "wheel",
"xlrd": "wheel",
"xlsxwriter": "wheel",
"xlwt": "wheel",
"zeep": "wheel",
"zope-event": "wheel",
"zope-interface": "wheel"
}
}
},
"invalidationHash": "cfbfb66cc62670414f09f8cab951abad359f6af8fb2f9e1630e991f4a7f7ec13"
}

View File

@ -0,0 +1,66 @@
{
config,
lib,
...
}: let
l = lib // builtins;
python = config.deps.python;
in {
imports = [
../../drv-parts/mach-nix-xs
];
deps = {nixpkgs, ...}: {
inherit
(nixpkgs)
postgresql
fetchFromGitHub
;
python = nixpkgs.python38;
};
public = {
name = "odoo";
version = "16.0";
};
mkDerivation = {
src = config.deps.fetchFromGitHub {
owner = "odoo";
repo = "odoo";
# ref: 16.0
rev = "2d42fd69cada3b1f2716c3d0a20bec6170f9b226";
hash = "sha256-ZlPH+RaRZbWooe+kpiFYZtvuVmXtOMHeCW+Z74ZscXY=";
};
};
mach-nix.pythonSources = config.deps.fetchPythonRequirements {
inherit (config.deps) python;
name = config.public.name;
requirementsFiles = ["${config.mkDerivation.src}/requirements.txt"];
hash = "sha256-E9wNvBakm+R5TSsFsnGpSaziYpi2otm0iBiyphUVSFI=";
maxDate = "2023-01-01";
nativeBuildInputs = with config.deps; [
postgresql
];
};
# Replace some python packages entirely with candidates from nixpkgs, because
# they are hard to fix
mach-nix.substitutions = {
python-ldap = python.pkgs.python-ldap;
pillow = python.pkgs.pillow;
};
# fix some builds via overrides
mach-nix.drvs = {
libsass.mkDerivation = {
doCheck = false;
doInstallCheck = l.mkForce false;
};
pypdf2.mkDerivation = {
doCheck = false;
doInstallCheck = l.mkForce false;
};
};
}

View File

@ -0,0 +1,13 @@
{
"content": {
"mach-nix": {
"dependencyTree": {
"pillow": []
},
"dists": {
"pillow": "9.3.0"
}
}
},
"invalidationHash": "88dbd64243b8ebf37d1176dacd6c5d3c9b47b932af62f0132522d36850626fb1"
}

View File

@ -0,0 +1,63 @@
# Build Pillow from source, without a wheel, and rather
# minimal features - only zlib and libjpeg as dependencies.
{
config,
lib,
...
}: let
l = lib // builtins;
python = config.deps.python;
in {
imports = [
../../drv-parts/mach-nix-xs
];
deps = {nixpkgs, ...}: {
python = nixpkgs.python39;
inherit
(nixpkgs)
pkg-config
zlib
libjpeg
;
};
public = {
name = "pillow";
version = "9.3.0";
};
mkDerivation = {
nativeBuildInputs = [
config.deps.pkg-config
];
propagatedBuildInputs = [
config.deps.zlib
config.deps.libjpeg
];
preUnpack = ''
export src=$(ls ${config.mach-nix.pythonSources}/names/${config.public.name}/*);
'';
};
buildPythonPackage = {
format = "setuptools";
pythonImportsCheck = [
"PIL"
];
};
mach-nix.pythonSources = config.deps.fetchPythonRequirements {
inherit python;
name = config.public.name;
requirementsList = ["${config.public.name}==${config.public.version}"];
hash = "sha256-eS81pqSjU6mgBL6tXadSxkGdafsVFThByOQcOf8FkF0=";
maxDate = "2023-01-01";
pipFlags = [
"--no-binary"
":all:"
];
};
}

View File

@ -0,0 +1,160 @@
{
"content": {
"mach-nix": {
"dependencyTree": {
"absl-py": [],
"astunparse": [
"six",
"wheel"
],
"cachetools": [],
"certifi": [],
"charset-normalizer": [],
"flatbuffers": [],
"gast": [],
"google-auth": [
"cachetools",
"pyasn1-modules",
"rsa",
"six"
],
"google-auth-oauthlib": [
"google-auth",
"requests-oauthlib"
],
"google-pasta": [
"six"
],
"grpcio": [],
"h5py": [
"numpy"
],
"idna": [],
"keras": [],
"libclang": [],
"markdown": [],
"markupsafe": [],
"numpy": [],
"oauthlib": [],
"opt-einsum": [
"numpy"
],
"packaging": [],
"protobuf": [],
"pyasn1": [],
"pyasn1-modules": [
"pyasn1"
],
"requests": [
"certifi",
"charset-normalizer",
"idna",
"urllib3"
],
"requests-oauthlib": [
"oauthlib",
"requests"
],
"rsa": [
"pyasn1"
],
"setuptools": [],
"six": [],
"tensorboard": [
"absl-py",
"google-auth",
"google-auth-oauthlib",
"grpcio",
"markdown",
"numpy",
"protobuf",
"requests",
"setuptools",
"tensorboard-data-server",
"tensorboard-plugin-wit",
"werkzeug",
"wheel"
],
"tensorboard-data-server": [],
"tensorboard-plugin-wit": [],
"tensorflow": [
"absl-py",
"astunparse",
"flatbuffers",
"gast",
"google-pasta",
"grpcio",
"h5py",
"keras",
"libclang",
"numpy",
"opt-einsum",
"packaging",
"protobuf",
"setuptools",
"six",
"tensorboard",
"tensorflow-estimator",
"tensorflow-io-gcs-filesystem",
"termcolor",
"typing-extensions",
"wrapt"
],
"tensorflow-estimator": [],
"tensorflow-io-gcs-filesystem": [],
"termcolor": [],
"typing-extensions": [],
"urllib3": [],
"werkzeug": [
"markupsafe"
],
"wheel": [],
"wrapt": []
},
"dists": {
"absl-py": "wheel",
"astunparse": "wheel",
"cachetools": "wheel",
"certifi": "wheel",
"charset-normalizer": "wheel",
"flatbuffers": "wheel",
"gast": "wheel",
"google-auth": "wheel",
"google-auth-oauthlib": "wheel",
"google-pasta": "wheel",
"grpcio": "wheel",
"h5py": "wheel",
"idna": "wheel",
"keras": "wheel",
"libclang": "wheel",
"markdown": "wheel",
"markupsafe": "wheel",
"numpy": "wheel",
"oauthlib": "wheel",
"opt-einsum": "wheel",
"packaging": "wheel",
"protobuf": "wheel",
"pyasn1": "wheel",
"pyasn1-modules": "wheel",
"requests": "wheel",
"requests-oauthlib": "wheel",
"rsa": "wheel",
"setuptools": "wheel",
"six": "wheel",
"tensorboard": "wheel",
"tensorboard-data-server": "wheel",
"tensorboard-plugin-wit": "wheel",
"tensorflow": "wheel",
"tensorflow-estimator": "wheel",
"tensorflow-io-gcs-filesystem": "wheel",
"termcolor": "wheel",
"typing-extensions": "wheel",
"urllib3": "wheel",
"werkzeug": "wheel",
"wheel": "wheel",
"wrapt": "wheel"
}
}
},
"invalidationHash": "d8bab42c0570e3e283d273278d0cd6ababd1d9ee0c1aafec91dcd3c56521dd35"
}

View File

@ -0,0 +1,46 @@
{
config,
lib,
...
}: let
l = lib // builtins;
python = config.deps.python;
in {
imports = [
../../drv-parts/mach-nix-xs
];
deps = {nixpkgs, ...}: {
inherit
(nixpkgs)
postgresql
fetchFromGitHub
;
};
public = {
name = "tensorflow";
version = "2.11.0";
};
mkDerivation = {
preUnpack = ''
export src=$(ls ${config.mach-nix.pythonSources}/names/${config.public.name}/*);
'';
};
buildPythonPackage = {
format = "wheel";
pythonImportsCheck = [
config.public.name
];
};
mach-nix.pythonSources = config.deps.fetchPythonRequirements {
inherit (config.deps) python;
name = config.public.name;
requirementsList = ["${config.public.name}==${config.public.version}"];
hash = "sha256-PDUrECFjoPznqXwqi2e1djx63t+kn/kAyM9JqQrTmd0=";
maxDate = "2023-01-01";
};
}

View File

@ -0,0 +1,16 @@
{
self,
lib,
...
}: {
perSystem = {
config,
self',
inputs',
pkgs,
...
}: {
};
flake = {
};
}

View File

@ -0,0 +1,39 @@
# Automatically exports modules from the `/**/modules` directory to:
# `flake.modules.<kind>.<name>`
# Automatically imports all flake-parts modules from `/**/modules/flake-parts`
{
config,
lib,
...
}: let
modulesDir = ../.;
moduleKinds = builtins.readDir modulesDir;
mapModules = kind:
lib.mapAttrs'
(fn: _:
lib.nameValuePair
(lib.removeSuffix ".nix" fn)
(modulesDir + "/${kind}/${fn}"))
(builtins.readDir (modulesDir + "/${kind}"));
flakePartsModules = lib.attrValues (
lib.filterAttrs
(modName: _: modName != "all-modules")
(mapModules "flake-parts")
);
in {
imports = flakePartsModules;
options.flake.modules = lib.mkOption {
type = lib.types.anything;
};
# generates future flake outputs: `modules.<kind>.<module-name>`
config.flake.modules = lib.mapAttrs (kind: _: mapModules kind) moduleKinds;
# comapt to current schema: `nixosModules` / `darwinModules`
config.flake.nixosModules = config.flake.modules.nixos or {};
config.flake.darwinModules = config.flake.modules.darwin or {};
}

View File

@ -0,0 +1,47 @@
# custom app to update the eval-cache of each exported package.
{
self,
lib,
inputs,
...
}: {
perSystem = {
config,
self',
inputs',
pkgs,
system,
...
}: let
l = lib // builtins;
allNewFileCommands =
l.flatten
(l.mapAttrsToList
(name: pkg: pkg.config.eval-cache.refresh or [])
self'.packages);
update-caches =
config.writers.writePureShellScript
(with pkgs; [
coreutils
git
nix
])
(
"set -x\n"
+ (l.concatStringsSep "\n" allNewFileCommands)
);
toApp = script: {
type = "app";
program = "${script}";
};
in {
apps = l.mapAttrs (_: toApp) {
inherit
update-caches
;
};
};
}

View File

@ -0,0 +1,36 @@
# evaluate packages from `/**/modules/drvs` and export them via `flake.packages`
{
self,
lib,
inputs,
...
}: let
system = "x86_64-linux";
# A module imported into every package setting up the eval cache
evalCacheSetup = {config, ...}: {
eval-cache.cacheFileRel = "/v1/nix/modules/drvs/${config.public.name}/cache-${system}.json";
eval-cache.repoRoot = self;
eval-cache.enable = true;
};
# evalautes the package behind a given module
makeDrv = module: let
evaled = lib.evalModules {
modules = [
inputs.drv-parts.modules.drv-parts.core
inputs.drv-parts.modules.drv-parts.docs
module
../drv-parts/eval-cache
evalCacheSetup
];
specialArgs.dependencySets = {
nixpkgs = inputs.nixpkgsV1.legacyPackages.${system};
};
specialArgs.drv-parts = inputs.drv-parts;
};
in
evaled // evaled.config.public;
in {
# map all modules in ../drvs to a package output in the flake.
flake.packages.${system} = lib.mapAttrs (_: drvModule: makeDrv drvModule) self.modules.drvs;
}

View File

@ -0,0 +1,98 @@
{
perSystem = {
config,
lib,
pkgs,
...
}: {
options.writers = {
writePureShellScript = lib.mkOption {
type = lib.types.functionTo lib.types.anything;
};
writePureShellScriptBin = lib.mkOption {
type = lib.types.functionTo lib.types.anything;
};
};
/*
create a script that runs in a `pure` environment, in the sense that:
- PATH only contains exactly the packages passed via the PATH arg
- NIX_PATH is set to the path of the current `pkgs`
- TMPDIR is set up and cleaned up even if the script fails
- all environment variables are unset, except:
- the ones listed in `keepVars` below
- ones listed via the KEEP_VARS variable
- the behavior is similar to `nix-shell --pure`
*/
config.writers = let
mkScript = PATH: script: ''
#!${pkgs.bash}/bin/bash
set -Eeuo pipefail
export PATH="${lib.makeBinPath PATH}"
export NIX_PATH=nixpkgs=${pkgs.path}
export TMPDIR=$(${pkgs.coreutils}/bin/mktemp -d)
trap "${pkgs.coreutils}/bin/chmod -R +w '$TMPDIR'; ${pkgs.coreutils}/bin/rm -rf '$TMPDIR'" EXIT
if [ -z "''${IMPURE:-}" ]; then
${cleanEnv}
fi
${script}
'';
# list taken from nix source: src/nix-build/nix-build.cc
keepVars = lib.concatStringsSep " " [
"HOME"
"XDG_RUNTIME_DIR"
"USER"
"LOGNAME"
"DISPLAY"
"WAYLAND_DISPLAY"
"WAYLAND_SOCKET"
"PATH"
"TERM"
"IN_NIX_SHELL"
"NIX_SHELL_PRESERVE_PROMPT"
"TZ"
"PAGER"
"NIX_BUILD_SHELL"
"SHLVL"
"http_proxy"
"https_proxy"
"ftp_proxy"
"all_proxy"
"no_proxy"
# We want to keep out own variables as well
"IMPURE"
"KEEP_VARS"
"NIX_PATH"
"TMPDIR"
];
cleanEnv = ''
KEEP_VARS="''${KEEP_VARS:-}"
unsetVars=$(
${pkgs.coreutils}/bin/comm \
<(${pkgs.gawk}/bin/awk 'BEGIN{for(v in ENVIRON) print v}' | ${pkgs.coreutils}/bin/cut -d = -f 1 | ${pkgs.coreutils}/bin/sort) \
<(echo "${keepVars} $KEEP_VARS" | ${pkgs.coreutils}/bin/tr " " "\n" | ${pkgs.coreutils}/bin/sort) \
-2 \
-3
)
unset $unsetVars
'';
in {
writePureShellScript = PATH: script:
pkgs.writeScript "script.sh" (mkScript PATH script);
writePureShellScriptBin = binName: PATH: script:
pkgs.writeScriptBin binName (mkScript PATH script);
};
};
}

View File

@ -0,0 +1 @@
import ./fetch-python-requirements.nix

View File

@ -0,0 +1,185 @@
# fetchPythonRequirements downlaods python packages specified by a list of
# pip-style python requirements
# It also requires a maximum date 'maxDate' being specified.
# The result will be as if `pip download` would have been executed
# at the point in time specified by maxDate.
# This is ensured by putting pip behind a local proxy filtering the
# api responses from pypi.org to only contain files for which the
# release date is lower than the specified maxDate.
# TODO: ignore if packages are yanked
# TODO: for MAX_DATE only allow timestamp or format 2023-01-01
{
buildPackages,
cacert,
curl,
lib,
python3,
stdenv,
}: let
fetchPythonRequirements = {
# This specifies the python version for which the packages should be downloaded
# Pip needs to be executed from that specific python version.
# Pip accepts '--python-version', but this works only for wheel packages.
python,
# hash for the fixed output derivation
hash,
# list of strings of requirements.txt entries
requirementsList ? [],
# list of requirements.txt files
requirementsFiles ? [],
# restrict to binary releases (.whl)
# this allows buildPlatform independent fetching
onlyBinary ? false,
# additional flags for `pip download`.
# for reference see: https://pip.pypa.io/en/stable/cli/pip_download/
pipFlags ? [],
name ? null,
nameSuffix ? "python-requirements",
nativeBuildInputs ? [],
# maximum release date for packages
maxDate ?
throw ''
'maxDate' must be specified for fetchPythonRequirements.
Changing this value will affect the output hash
Example value: "2023-01-01"
'',
# It's better to not refer to python.pkgs.pip directly, as we want to reduce
# the times we have to update the output hash
pipVersion ? "23.0",
# Write "dependencies.json" to $out, documenting which package depends on which.
writeDependencyTree ? true,
}:
# specifying `--platform` for pip download is only allowed in combination with `--only-binary :all:`
# therefore, if onlyBinary is disabled, we must enforce targetPlatform == buildPlatform to ensure reproducibility
if ! onlyBinary && stdenv.system != stdenv.buildPlatform.system
then
throw ''
fetchPythonRequirements cannot fetch sdist packages for ${stdenv.system} on a ${stdenv.buildPlatform.system}.
Either build on a ${stdenv.system} or set `onlyBinary = true`.
''
else let
# map nixos system strings to python platforms
sysToPlatforms = {
"x86_64-linux" = [
"manylinux1_x86_64"
"manylinux2010_x86_64"
"manylinux2014_x86_64"
"linux_x86_64"
];
"x86_64-darwin" =
lib.forEach (lib.range 0 15) (minor: "macosx_10_${builtins.toString minor}_x86_64");
"aarch64-linux" = [
"manylinux1_aarch64"
"manylinux2010_aarch64"
"manylinux2014_aarch64"
"linux_aarch64"
];
};
platforms =
if sysToPlatforms ? "${stdenv.system}"
then sysToPlatforms."${stdenv.system}"
else
throw ''
'binaryOnly' fetching is currently not supported for target ${stdenv.system}.
You could set 'binaryOnly = false' and execute the build on a ${stdenv.system}.
'';
# we use mitmproxy to filter the pypi responses
pythonWithMitmproxy =
python3.withPackages (ps: [ps.mitmproxy ps.python-dateutil ps.pkginfo ps.packaging]);
# fixed output derivation containing downloaded packages,
# each being symlinked from it's normalized name
# Example:
# "$out/werkzeug" will point to "$out/Werkzeug-0.14.1-py2.py3-none-any.whl"
self = stdenv.mkDerivation (finalAttrs: {
# An invalidation hash is embedded into the `name`.
# This will prevent `forgot to update the hash` scenarios, as any change
# in the derivaiton name enforces a re-build.
name = let
pythonMajorAndMinorVer =
lib.concatStringsSep "."
(lib.sublist 0 2 (lib.splitString "." python.version));
invalidationHash = builtins.hashString "sha256" ''
# Ignore the python minor version. It should not affect resolution
${python.implementation}
${pythonMajorAndMinorVer}
${stdenv.system}
# All variables that might influence the output
${finalAttrs.MAX_DATE}
${finalAttrs.onlyBinaryFlags}
${finalAttrs.pipVersion}
${finalAttrs.pipFlags}
${toString writeDependencyTree}
# Include requirements
# We hash the content, as store paths might change more often
${toString finalAttrs.requirementsList}
${toString finalAttrs.requirementsFiles}
# Only hash the content of the python scripts, as the store path
# changes with every nixpkgs commit
${builtins.readFile finalAttrs.filterPypiResponsesScript}
${builtins.readFile finalAttrs.buildScript}
${builtins.readFile finalAttrs.writeDependencyTreeScript}
'';
invalidationHashShort =
lib.substring 0 10
(builtins.unsafeDiscardStringContext invalidationHash);
namePrefix =
if name == null
then ""
else name + "-";
in "${namePrefix}${nameSuffix}-${invalidationHashShort}";
outputHashMode = "recursive";
outputHashAlgo = "sha256";
outputHash = hash;
# Multiple outputs are not allowed in an FOD, therefore use passthru
# to export $dist and $names
passthru.dist = "${finalAttrs.finalPackage}/dist";
passthru.names = "${finalAttrs.finalPackage}/names";
nativeBuildInputs =
nativeBuildInputs
++ [pythonWithMitmproxy curl cacert];
dontUnpack = true;
dontInstall = true;
dontFixup = true;
pythonBin = python.interpreter;
filterPypiResponsesScript = ./filter-pypi-responses.py;
buildScript = ./fetch-python-requirements.py;
writeDependencyTreeScript = ./write-dependency-tree.py;
inherit
pythonWithMitmproxy
pipVersion
requirementsFiles
requirementsList
;
MAX_DATE = builtins.toString maxDate;
pipFlags = lib.concatStringsSep " " pipFlags;
onlyBinaryFlags = lib.optionalString onlyBinary "--only-binary :all: ${
lib.concatStringsSep " " (lib.forEach platforms (pf: "--platform ${pf}"))
}";
requirementsFlags =
lib.optionalString (requirementsFiles != [])
''-r ${lib.concatStringsSep " -r " (map toString finalAttrs.requirementsFiles)}'';
buildPhase = ''
$pythonWithMitmproxy/bin/python $buildScript
${lib.optionalString writeDependencyTree "$pythonWithMitmproxy/bin/python $writeDependencyTreeScript $out/dist > $out/dependencies.json"}
'';
});
in
self;
in
fetchPythonRequirements

View File

@ -0,0 +1,153 @@
import os
import socket
import ssl
import subprocess
import time
import dateutil.parser
import urllib.request
from pathlib import Path
import certifi
from packaging.utils import (
canonicalize_name,
parse_sdist_filename,
parse_wheel_filename,
)
HOME = Path(os.getcwd())
OUT = Path(os.getenv("out"))
PYTHON_BIN = os.getenv("pythonBin")
PYTHON_WITH_MITM_PROXY = os.getenv("pythonWithMitmproxy")
FILTER_PYPI_RESPONSE_SCRIPTS = os.getenv("filterPypiResponsesScript")
PIP_VERSION = os.getenv("pipVersion")
PIP_FLAGS = os.getenv("pipFlags")
ONLY_BINARY_FLAGS = os.getenv("onlyBinaryFlags")
REQUIREMENTS_LIST = os.getenv("requirementsList")
REQUIREMENTS_FLAGS = os.getenv("requirementsFlags")
def get_max_date():
try:
return int(os.getenv("MAX_DATE"))
except ValueError:
return dateutil.parser.parse(os.getenv("MAX_DATE"))
def get_free_port():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(("", 0))
port = sock.getsockname()[1]
sock.close()
return port
def start_mitmproxy(port):
proc = subprocess.Popen(
[
f"{PYTHON_WITH_MITM_PROXY}/bin/mitmdump",
"--listen-port",
str(port),
"--ignore-hosts",
".*files.pythonhosted.org.*",
"--script",
FILTER_PYPI_RESPONSE_SCRIPTS,
],
env={"MAX_DATE": os.getenv("MAX_DATE"), "HOME": HOME},
)
return proc
def wait_for_proxy(proxy_port, cafile):
timeout = time.time() + 60 * 5
req = urllib.request.Request("https://pypi.org")
req.set_proxy(f"127.0.0.1:{proxy_port}", "http")
req.set_proxy(f"127.0.0.1:{proxy_port}", "https")
context = ssl.create_default_context(cafile=cafile)
while time.time() < timeout:
try:
res = urllib.request.urlopen(req, None, 5, context=context)
if res.status < 400:
break
except urllib.error.URLError as e:
pass
finally:
time.sleep(1)
# as we only proxy *some* calls, we need to combine upstream
# ca certificates and the one from mitm proxy
def generate_ca_bundle(path):
with open(HOME / ".mitmproxy/mitmproxy-ca-cert.pem", "r") as f:
mitmproxy_cacert = f.read()
with open(certifi.where(), "r") as f:
certifi_cacert = f.read()
with open(path, "w") as f:
f.write(mitmproxy_cacert)
f.write("\n")
f.write(certifi_cacert)
return path
def create_venv(path):
subprocess.run([PYTHON_BIN, "-m", "venv", path], check=True)
def pip(venv_path, *args):
subprocess.run([f"{venv_path}/bin/pip", *args], check=True)
if __name__ == "__main__":
OUT.mkdir()
dist_path = OUT / "dist"
names_path = OUT / "names"
dist_path.mkdir()
names_path.mkdir()
print(f"selected maximum release date for python packages: {get_max_date()}")
proxy_port = get_free_port()
proxy = start_mitmproxy(proxy_port)
venv_path = Path(".venv").absolute()
create_venv(venv_path)
pip(venv_path, "install", "--upgrade", f"pip=={PIP_VERSION}")
cafile = generate_ca_bundle(HOME / ".ca-cert.pem")
wait_for_proxy(proxy_port, cafile)
optional_flags = [
PIP_FLAGS,
ONLY_BINARY_FLAGS,
REQUIREMENTS_LIST,
REQUIREMENTS_FLAGS,
]
optional_flags = " ".join(filter(None, optional_flags)).split(" ")
pip(
venv_path,
"download",
"--no-cache",
"--dest",
dist_path,
"--progress-bar",
"off",
"--proxy",
f"https://localhost:{proxy_port}",
"--cert",
cafile,
*optional_flags,
)
proxy.kill()
for dist_file in dist_path.iterdir():
if dist_file.suffix == ".whl":
name = parse_wheel_filename(dist_file.name)[0]
else:
name = parse_sdist_filename(dist_file.name)[0]
pname = canonicalize_name(name)
name_path = names_path / pname
print(f"creating link {name_path} -> {dist_file}")
name_path.mkdir()
(name_path / dist_file.name).symlink_to(f"../../dist/{dist_file.name}")

View File

@ -0,0 +1,97 @@
"""
This script is part of fetchPythonRequirements
It is meant to be used with mitmproxy via `--script`
It will filter api repsonses from the pypi.org api (used by pip),
to only contain files with release date < MAX_DATE
For retrieving the release dates for files, it uses the pypi.org json api
It has to do one extra api request for each queried package name
"""
import json
import os
import sys
import ssl
from urllib.request import Request, urlopen
from pathlib import Path
import dateutil.parser
import gzip
from mitmproxy import http
"""
Query the pypi json api to get timestamps for all release files of the given pname.
return all file names which are newer than the given timestamp
"""
def get_files_to_hide(pname, max_ts):
ca_file = Path(os.getenv("HOME")) / ".ca-cert.pem"
context = ssl.create_default_context(cafile=ca_file)
if not ca_file.exists():
print("mitmproxy ca not found")
sys.exit(1)
# query the api
url = f"https://pypi.org/pypi/{pname}/json"
req = Request(url)
req.add_header("Accept-Encoding", "gzip")
with urlopen(req, context=context) as response:
content = gzip.decompress(response.read())
resp = json.loads(content)
# collect files to hide
files = set()
for ver, releases in resp["releases"].items():
for release in releases:
ts = dateutil.parser.parse(release["upload_time"]).timestamp()
if ts > max_ts:
files.add(release["filename"])
return files
# accept unix timestamp or human readable format
try:
max_ts = int(os.getenv("MAX_DATE"))
except ValueError:
max_date = dateutil.parser.parse(os.getenv("MAX_DATE"))
max_ts = max_date.timestamp()
"""
Response format:
{
"files": [
{
"filename": "pip-0.2.tar.gz",
"hashes": {
"sha256": "88bb8d029e1bf4acd0e04d300104b7440086f94cc1ce1c5c3c31e3293aee1f81"
},
"requires-python": null,
"url": "https://files.pythonhosted.org/packages/3d/9d/1e313763bdfb6a48977b65829c6ce2a43eaae29ea2f907c8bbef024a7219/pip-0.2.tar.gz",
"yanked": false
},
{
"filename": "pip-0.2.1.tar.gz",
"hashes": {
"sha256": "83522005c1266cc2de97e65072ff7554ac0f30ad369c3b02ff3a764b962048da"
},
"requires-python": null,
"url": "https://files.pythonhosted.org/packages/18/ad/c0fe6cdfe1643a19ef027c7168572dac6283b80a384ddf21b75b921877da/pip-0.2.1.tar.gz",
"yanked": false
}
}
"""
def response(flow: http.HTTPFlow) -> None:
if not "/simple/" in flow.request.url:
return
pname = flow.request.url.strip("/").split("/")[-1]
badFiles = get_files_to_hide(pname, max_ts)
keepFile = lambda file: file["filename"] not in badFiles
data = json.loads(flow.response.text)
if badFiles:
print(f"removing the following files form the API response:\n {badFiles}")
data["files"] = list(filter(keepFile, data["files"]))
flow.response.text = json.dumps(data)

View File

@ -0,0 +1,126 @@
#!/usr/bin/env nix-shell
#! nix-shell -i python3 -p python3 python3Packages.pkginfo python3Packages.packaging
"""
Given a directory of python source distributions (.tar.gz) and wheels,
return a JSON representation of their dependency tree.
We want to put each python package into a separate derivation,
therefore nix needs to know which of those packages depend on
each other.
We only care about the graph between packages, as we start from
a complete set of python packages in the right version
- resolved & fetched by `pip download`, `mach-nix` or other tools.
That means that version specifiers as in (PEP 440; https://peps.python.org/pep-0508/)
and extras specified in markers (PEP 508; https://peps.python.org/pep-0508/)
can be ignored for now.
We rely on `pkginfo` (https://pythonhosted.org/pkginfo/) to read `Requires-Dist`
et al as specified in https://packaging.python.org/en/latest/specifications/core-metadata/#id23
And we use `packaging` (https://packaging.pypa.io/en/stable/index.html) to parse
dependency declarations.
The output is a list of tuples. First element in each tuple is the package name,
second a list of dependencies. Output is sorted by the number of dependencies,
so that leafs of the dependency tree come first, the package to install last.
"""
import sys
import tarfile
import json
from pathlib import Path
from pkginfo import SDist, Wheel
from packaging.requirements import Requirement
from packaging.utils import (
parse_sdist_filename,
parse_wheel_filename,
canonicalize_name,
)
def _is_source_dist(pkg_file):
return pkg_file.suffixes[-2:] == [".tar", ".gz"]
def _get_name_version(pkg_file):
if _is_source_dist(pkg_file):
name, *_ = parse_sdist_filename(pkg_file.name)
else:
name, *_ = parse_wheel_filename(pkg_file.name)
return canonicalize_name(name)
def get_pkg_info(pkg_file):
try:
if pkg_file.suffix == ".whl":
return Wheel(str(pkg_file))
elif _is_source_dist(pkg_file):
return SDist(str(pkg_file))
else:
raise NotImplemented(f"Unknown file format: {pkg_file}")
except ValueError:
pass
def _is_required_dependency(requirement):
# We set the extra field to an empty string to effectively ignore all optional
# dependencies for now.
return not requirement.marker or requirement.marker.evaluate({"extra": ""})
def parse_requirements_txt(pkg_file):
requirements = []
if requirements_txt := read_requirements_txt(pkg_file):
requirements = [
Requirement(req)
for req in requirements_txt.split("\n")
if req and not req.startswith("#")
]
return requirements
def read_requirements_txt(source_dist_file):
name, version = parse_sdist_filename(source_dist_file.name)
with tarfile.open(source_dist_file) as tar:
try:
with tar.extractfile(f"{name}-{version}/requirements.txt") as f:
return f.read().decode("utf-8")
except KeyError as e:
return
def usage():
print(f"{sys.argv[0]} <pkgs-directory>")
sys.exit(1)
if __name__ == "__main__":
if len(sys.argv) != 2:
usage()
pkgs_path = Path(sys.argv[1])
if not (pkgs_path.exists and pkgs_path.is_dir()):
usage()
dependencies = {}
for pkg_file in pkgs_path.iterdir():
info = get_pkg_info(pkg_file)
name = _get_name_version(pkg_file)
if info:
requirements = [Requirement(req) for req in info.requires_dist]
else:
requirements = []
# For source distributions which do *not* specify requires_dist,
# we fallback to parsing requirements.txt
if not requirements and _is_source_dist(pkg_file):
requirements = parse_requirements_txt(pkg_file)
requirements = filter(_is_required_dependency, requirements)
dependencies[name] = sorted(
[canonicalize_name(req.name) for req in requirements]
)
dependencies = dict(sorted(dependencies.items()))
print(json.dumps(dependencies, indent=2))