mirror of
https://github.com/nix-community/dream2nix.git
synced 2024-11-26 22:31:39 +03:00
feature: translate subdir of source via ?dir=foo
This commit is contained in:
parent
9d2d385273
commit
b8dc44d0f2
@ -474,12 +474,17 @@ class AddCommand(Command):
|
||||
sourceSpec = {}
|
||||
# handle source shortcuts
|
||||
if source.partition(':')[0].split('+')[0] in os.environ.get("fetcherNames", None).split() \
|
||||
or source.startswith('http'):
|
||||
or source.startswith('http'):
|
||||
print(f"fetching source for '{source}'")
|
||||
sourceSpec = \
|
||||
callNixFunction("fetchers.translateShortcut", shortcut=source)
|
||||
subdir = ""
|
||||
if 'dir' in sourceSpec:
|
||||
subdir = '/' + sourceSpec['dir']
|
||||
del sourceSpec['dir']
|
||||
source = \
|
||||
buildNixFunction("fetchers.fetchShortcut", shortcut=source, extract=True)
|
||||
buildNixFunction("fetchers.fetchSource", source=sourceSpec, extract=True)
|
||||
source += subdir
|
||||
# handle source paths
|
||||
else:
|
||||
# check if source path exists
|
||||
|
@ -54,7 +54,7 @@ def buildNixFunction(function_path, **kwargs):
|
||||
let
|
||||
d2n = (import {dream2nix_src} {{}});
|
||||
in
|
||||
(d2n.utils.callVieEnv d2n.{function_path})
|
||||
(d2n.utils.callViaEnv d2n.{function_path})
|
||||
''',
|
||||
],
|
||||
capture_output=True,
|
||||
|
@ -31,7 +31,7 @@ let
|
||||
source.path
|
||||
# assume path relative to main package source
|
||||
else
|
||||
"${fetchedSources."${mainPackageName}#${mainPackageVersion}"}/${source.path}"
|
||||
"${fetchedSources."${mainPackageName}"."${mainPackageVersion}"}/${source.path}"
|
||||
else if fetchers.fetchers ? "${source.type}" then
|
||||
fetchSource { inherit source; }
|
||||
else throw "unsupported source type '${source.type}'")
|
||||
|
@ -19,7 +19,7 @@ rec {
|
||||
);
|
||||
|
||||
defaultFetcher = callPackageDream ./default-fetcher.nix { inherit fetchers fetchSource; };
|
||||
|
||||
|
||||
combinedFetcher = callPackageDream ./combined-fetcher.nix { inherit defaultFetcher; };
|
||||
|
||||
constructSource =
|
||||
@ -30,8 +30,7 @@ rec {
|
||||
}@args:
|
||||
let
|
||||
fetcher = fetchers."${type}";
|
||||
namesKeep = fetcher.inputs ++ [ "type" "hash" ];
|
||||
argsKeep = lib.filterAttrs (n: v: b.elem n namesKeep) args;
|
||||
argsKeep = b.removeAttrs args [ "reComputeHash" "version" ];
|
||||
fetcherOutputs = fetcher.outputs args;
|
||||
in
|
||||
argsKeep
|
||||
@ -61,24 +60,82 @@ rec {
|
||||
fetchSource = { source, extract ? false, }:
|
||||
let
|
||||
fetcher = fetchers."${source.type}";
|
||||
fetcherOutputs = fetcher.outputs source;
|
||||
fetcherArgs = b.removeAttrs source [ "dir" "hash" "type" ];
|
||||
fetcherOutputs = fetcher.outputs fetcherArgs;
|
||||
maybeArchive = fetcherOutputs.fetched (source.hash or null);
|
||||
in
|
||||
if extract then
|
||||
utils.extractSource { source = maybeArchive; }
|
||||
if source ? dir then
|
||||
"${maybeArchive}/${source.dir}"
|
||||
else
|
||||
maybeArchive;
|
||||
|
||||
# fetch a source define dby a shortcut
|
||||
# fetch a source defined by a shortcut
|
||||
fetchShortcut = { shortcut, extract ? false, }:
|
||||
fetchSource {
|
||||
source = translateShortcut { inherit shortcut; };
|
||||
inherit extract;
|
||||
};
|
||||
|
||||
# translate shortcut to dream lock source spec
|
||||
translateShortcut = { shortcut, }:
|
||||
parseShortcut = shortcut:
|
||||
let
|
||||
# in: "git+https://foo.com/bar?kwarg1=lol&kwarg2=hello"
|
||||
# out: [ "git+" "git" "https" "//" "foo.com/bar" "?kwarg1=lol&kwarg2=hello" "kwarg1=lol&kwarg2=hello" ]
|
||||
split =
|
||||
b.match
|
||||
''(([[:alnum:]]+)\+)?([[:alnum:]]+):(//)?([^\?]*)(\?(.*))?''
|
||||
shortcut;
|
||||
|
||||
parsed = {
|
||||
proto1 = b.elemAt split 1;
|
||||
proto2 = b.elemAt split 2;
|
||||
path = b.elemAt split 4;
|
||||
allArgs = b.elemAt split 6;
|
||||
kwargs = b.removeAttrs kwargs_ [ "dir" ];
|
||||
dir = kwargs_.dir or null;
|
||||
};
|
||||
|
||||
kwargs_ =
|
||||
if parsed.allArgs == null then
|
||||
{}
|
||||
else
|
||||
lib.listToAttrs
|
||||
(map
|
||||
(kwarg:
|
||||
let
|
||||
split = lib.splitString "=" kwarg;
|
||||
in
|
||||
lib.nameValuePair
|
||||
(b.elemAt split 0)
|
||||
(b.elemAt split 1))
|
||||
(lib.splitString "&" parsed.allArgs));
|
||||
|
||||
in
|
||||
if split == null then
|
||||
throw "Unable to parse shortcut: ${shortcut}"
|
||||
else
|
||||
parsed;
|
||||
|
||||
renderUrlArgs = kwargs:
|
||||
let
|
||||
asStr =
|
||||
(lib.concatStringsSep
|
||||
"&"
|
||||
(lib.mapAttrsToList
|
||||
(name: val: "${name}=${val}")
|
||||
kwargs));
|
||||
|
||||
in
|
||||
if asStr == "" then
|
||||
""
|
||||
else
|
||||
"?" + asStr;
|
||||
|
||||
|
||||
# translate shortcut to dream lock source spec
|
||||
translateShortcut = { shortcut, computeHash ? true, }:
|
||||
let
|
||||
|
||||
parsed = parseShortcut shortcut;
|
||||
|
||||
checkArgs = fetcherName: args:
|
||||
let
|
||||
@ -93,58 +150,83 @@ rec {
|
||||
else
|
||||
args;
|
||||
|
||||
translateHttpUrl =
|
||||
translateHttpUrl =
|
||||
let
|
||||
fetcher = fetchers.http;
|
||||
fetcherOutputs = fetchers.http.outputs { url = shortcut; };
|
||||
in
|
||||
constructSource {
|
||||
type = "http";
|
||||
hash = fetcherOutputs.calcHash "sha256";
|
||||
url = shortcut;
|
||||
|
||||
urlArgsFinal = renderUrlArgs parsed.kwargs;
|
||||
|
||||
url = with parsed; "${proto2}://${path}${urlArgsFinal}";
|
||||
|
||||
fetcherOutputs = fetchers.http.outputs {
|
||||
inherit url;
|
||||
};
|
||||
|
||||
translateGitShortcut =
|
||||
let
|
||||
urlAndParams = lib.elemAt (lib.splitString "+" shortcut) 1;
|
||||
splitUrlParams = lib.splitString "?" urlAndParams;
|
||||
url = lib.head splitUrlParams;
|
||||
params = lib.listToAttrs (lib.forEach (lib.tail splitUrlParams) (keyVal:
|
||||
let
|
||||
split = lib.splitString "=" keyVal;
|
||||
name = lib.elemAt split 0;
|
||||
value = lib.elemAt split 1;
|
||||
in
|
||||
lib.nameValuePair name value
|
||||
));
|
||||
fetcher = fetchers.git;
|
||||
args = params // { inherit url; };
|
||||
fetcherOutputs = fetcher.outputs (checkArgs "git" args);
|
||||
in
|
||||
constructSource
|
||||
(params // {
|
||||
type = "git";
|
||||
hash = fetcherOutputs.calcHash "sha256";
|
||||
constructSource
|
||||
{
|
||||
inherit url;
|
||||
type = "http";
|
||||
}
|
||||
// (lib.optionalAttrs (parsed.dir != null) {
|
||||
dir = parsed.dir;
|
||||
})
|
||||
// (lib.optionalAttrs computeHash {
|
||||
hash = fetcherOutputs.calcHash "sha256";
|
||||
});
|
||||
|
||||
|
||||
translateProtoShortcut =
|
||||
let
|
||||
|
||||
kwargsUrl = b.removeAttrs parsed.kwargs fetcher.inputs;
|
||||
|
||||
urlArgs = renderUrlArgs kwargsUrl;
|
||||
|
||||
url = with parsed; "${proto2}://${path}${urlArgs}";
|
||||
|
||||
fetcherName = parsed.proto1;
|
||||
|
||||
fetcher = fetchers."${fetcherName}";
|
||||
|
||||
args = parsed.kwargs // { inherit url; };
|
||||
|
||||
fetcherOutputs = fetcher.outputs (checkArgs fetcherName args);
|
||||
|
||||
in
|
||||
constructSource
|
||||
(parsed.kwargs // {
|
||||
type = fetcherName;
|
||||
inherit url;
|
||||
}
|
||||
// (lib.optionalAttrs (parsed.dir != null) {
|
||||
dir = parsed.dir;
|
||||
})
|
||||
// (lib.optionalAttrs computeHash {
|
||||
hash = fetcherOutputs.calcHash "sha256";
|
||||
}));
|
||||
|
||||
translateRegularShortcut =
|
||||
let
|
||||
splitNameParams = lib.splitString ":" (lib.removeSuffix "/" shortcut);
|
||||
fetcherName = lib.elemAt splitNameParams 0;
|
||||
paramsStr = lib.elemAt splitNameParams 1;
|
||||
params = lib.splitString "/" paramsStr;
|
||||
|
||||
fetcherName = parsed.proto2;
|
||||
|
||||
path = lib.removeSuffix "/" parsed.path;
|
||||
|
||||
params = lib.splitString "/" path;
|
||||
|
||||
fetcher = fetchers."${fetcherName}";
|
||||
args = lib.listToAttrs
|
||||
(lib.forEach
|
||||
(lib.range 0 ((lib.length fetcher.inputs) - 1))
|
||||
(idx:
|
||||
lib.nameValuePair
|
||||
(lib.elemAt fetcher.inputs idx)
|
||||
(lib.elemAt params idx)
|
||||
));
|
||||
fetcherOutputs = fetcher.outputs args;
|
||||
|
||||
args =
|
||||
lib.listToAttrs
|
||||
(lib.forEach
|
||||
(lib.range 0 ((lib.length fetcher.inputs) - 1))
|
||||
(idx:
|
||||
lib.nameValuePair
|
||||
(lib.elemAt fetcher.inputs idx)
|
||||
(lib.elemAt params idx)
|
||||
));
|
||||
|
||||
fetcherOutputs = fetcher.outputs (args // parsed.kwargs);
|
||||
|
||||
in
|
||||
if b.length params != b.length fetcher.inputs then
|
||||
throw ''
|
||||
@ -152,16 +234,23 @@ rec {
|
||||
Should be ${fetcherName}:${lib.concatStringsSep "/" fetcher.inputs}
|
||||
''
|
||||
else
|
||||
constructSource (args // {
|
||||
constructSource (args // parsed.kwargs // {
|
||||
type = fetcherName;
|
||||
}
|
||||
// (lib.optionalAttrs (parsed.dir != null) {
|
||||
dir = parsed.dir;
|
||||
})
|
||||
// (lib.optionalAttrs computeHash {
|
||||
hash = fetcherOutputs.calcHash "sha256";
|
||||
});
|
||||
}));
|
||||
|
||||
in
|
||||
if lib.hasPrefix "git+" (lib.head (lib.splitString ":" shortcut)) then
|
||||
translateGitShortcut
|
||||
else if lib.hasPrefix "http://" shortcut || lib.hasPrefix "https://" shortcut then
|
||||
if parsed.proto1 != null then
|
||||
translateProtoShortcut
|
||||
else if lib.hasPrefix "http://" shortcut
|
||||
|| lib.hasPrefix "https://" shortcut then
|
||||
translateHttpUrl
|
||||
else
|
||||
translateRegularShortcut;
|
||||
|
||||
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ in
|
||||
|
||||
versionField = "rev";
|
||||
|
||||
outputs = { url, rev, ... }@inp:
|
||||
outputs = { url, rev, }@inp:
|
||||
if b.match "refs/(heads|tags)/.*" rev == null && builtins.match "[a-f0-9]*" rev == null then
|
||||
throw ''rev must either be a sha1 revision or "refs/heads/branch-name" or "refs/tags/tag-name"''
|
||||
else
|
||||
@ -25,26 +25,11 @@ in
|
||||
|
||||
b = builtins;
|
||||
|
||||
ref =
|
||||
if b.match "refs/(heads|tags)/.*" inp.rev != null then
|
||||
inp.rev
|
||||
else
|
||||
null;
|
||||
|
||||
rev =
|
||||
if b.match "refs/(heads|tags)/.*" inp.rev != null then
|
||||
null
|
||||
else
|
||||
inp.rev;
|
||||
|
||||
refAndRev =
|
||||
(lib.optionalAttrs (ref != null) {
|
||||
inherit ref;
|
||||
})
|
||||
//
|
||||
(lib.optionalAttrs (rev != null) {
|
||||
inherit rev;
|
||||
});
|
||||
if b.match "refs/(heads|tags)/.*" inp.rev != null then
|
||||
{ ref = inp.rev; }
|
||||
else
|
||||
{ rev = inp.rev; };
|
||||
|
||||
in
|
||||
{
|
||||
@ -61,7 +46,7 @@ in
|
||||
# In case revision is used for verification, `hash` will be null.
|
||||
fetched = hash:
|
||||
if hash == null then
|
||||
if rev == null then
|
||||
if ! refAndRev ? rev then
|
||||
throw "Cannot fetch git repo without integrity. Specify at least 'rev' or 'sha256'"
|
||||
else
|
||||
b.fetchGit
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
defaultUpdater = "githubNewestReleaseTag";
|
||||
|
||||
outputs = { owner, repo, rev, ... }@inp:
|
||||
outputs = { owner, repo, rev, }@inp:
|
||||
let
|
||||
b = builtins;
|
||||
in
|
||||
@ -35,4 +35,4 @@
|
||||
};
|
||||
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@
|
||||
|
||||
versionField = "rev";
|
||||
|
||||
outputs = { owner, repo, rev, ... }@inp:
|
||||
outputs = { owner, repo, rev, }@inp:
|
||||
let
|
||||
b = builtins;
|
||||
in
|
||||
@ -30,4 +30,4 @@
|
||||
};
|
||||
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -11,7 +11,7 @@
|
||||
"url"
|
||||
];
|
||||
|
||||
outputs = { url, ... }@inp:
|
||||
outputs = { url, }@inp:
|
||||
let
|
||||
b = builtins;
|
||||
in
|
||||
@ -22,19 +22,30 @@
|
||||
});
|
||||
|
||||
fetched = hash:
|
||||
let drv =
|
||||
if hash != null && lib.stringLength hash == 40 then
|
||||
fetchurl {
|
||||
inherit url;
|
||||
sha1 = hash;
|
||||
}
|
||||
else
|
||||
fetchurl {
|
||||
inherit url hash;
|
||||
let
|
||||
drv =
|
||||
if hash != null && lib.stringLength hash == 40 then
|
||||
fetchurl {
|
||||
inherit url;
|
||||
sha1 = hash;
|
||||
}
|
||||
else
|
||||
fetchurl {
|
||||
inherit url hash;
|
||||
};
|
||||
|
||||
drvSanitized =
|
||||
drv.overrideAttrs (old: {
|
||||
name = lib.strings.sanitizeDerivationName old.name;
|
||||
});
|
||||
|
||||
extracted =
|
||||
utils.extractSource {
|
||||
source = drvSanitized;
|
||||
};
|
||||
in drv.overrideAttrs (old: {
|
||||
name = lib.strings.sanitizeDerivationName old.name;
|
||||
});
|
||||
|
||||
in
|
||||
extracted;
|
||||
|
||||
};
|
||||
}
|
||||
|
@ -17,7 +17,7 @@
|
||||
|
||||
# defaultUpdater = "";
|
||||
|
||||
outputs = { pname, version, ... }@inp:
|
||||
outputs = { pname, version, }@inp:
|
||||
let
|
||||
b = builtins;
|
||||
|
||||
|
@ -8,7 +8,7 @@
|
||||
"path"
|
||||
];
|
||||
|
||||
outputs = { path, ... }@inp:
|
||||
outputs = { path, }@inp:
|
||||
let
|
||||
b = builtins;
|
||||
in
|
||||
@ -19,4 +19,4 @@
|
||||
fetched = hash: "${path}";
|
||||
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
defaultUpdater = "pypiNewestReleaseVersion";
|
||||
|
||||
outputs = { pname, version, extension ? "tar.gz", ... }@inp:
|
||||
outputs = { pname, version, extension ? "tar.gz", }@inp:
|
||||
let
|
||||
b = builtins;
|
||||
|
||||
|
@ -35,7 +35,8 @@
|
||||
"properties": {
|
||||
"url": { "type": "string" },
|
||||
"hash": { "type": "string" },
|
||||
"type": { "type": "string" }
|
||||
"type": { "type": "string" },
|
||||
"dir": { "type": "string" }
|
||||
},
|
||||
"required": ["type", "url"],
|
||||
"additionalProperties": false
|
||||
@ -50,7 +51,8 @@
|
||||
"url": { "type": "string" },
|
||||
"rev": { "type": "string" },
|
||||
"hash": { "type": "string" },
|
||||
"type": { "type": "string" }
|
||||
"type": { "type": "string" },
|
||||
"dir": { "type": "string" }
|
||||
},
|
||||
"required": ["type", "url", "rev"],
|
||||
"additionalProperties": false
|
||||
@ -66,7 +68,8 @@
|
||||
"repo": { "type": "string" },
|
||||
"rev": { "type": "string" },
|
||||
"hash": { "type": "string" },
|
||||
"type": { "type": "string" }
|
||||
"type": { "type": "string" },
|
||||
"dir": { "type": "string" }
|
||||
},
|
||||
"required": ["type", "owner", "repo", "rev"],
|
||||
"additionalProperties": false
|
||||
@ -81,7 +84,8 @@
|
||||
"pname": { "type": "string" },
|
||||
"version": { "type": "string" },
|
||||
"hash": { "type": "string" },
|
||||
"type": { "type": "string" }
|
||||
"type": { "type": "string" },
|
||||
"dir": { "type": "string" }
|
||||
},
|
||||
"required": ["type", "pname"],
|
||||
"additionalProperties": false
|
||||
@ -94,7 +98,8 @@
|
||||
"then": {
|
||||
"properties": {
|
||||
"path": { "type": "string" },
|
||||
"type": { "type": "string" }
|
||||
"type": { "type": "string" },
|
||||
"dir": { "type": "string" }
|
||||
},
|
||||
"required": ["type", "path"],
|
||||
"additionalProperties": false
|
||||
@ -109,7 +114,8 @@
|
||||
"pname": { "type": "string" },
|
||||
"version": { "type": "string" },
|
||||
"hash": { "type": "string" },
|
||||
"type": { "type": "string" }
|
||||
"type": { "type": "string" },
|
||||
"dir": { "type": "string" }
|
||||
},
|
||||
"required": ["type", "pname"],
|
||||
"additionalProperties": false
|
||||
@ -139,14 +145,14 @@
|
||||
"properties": {
|
||||
"_subsystem": { "type": "string" },
|
||||
"producedBy": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"_subsystem": {
|
||||
"description": "build system specifics",
|
||||
"type": "object"
|
||||
},
|
||||
|
||||
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
@ -141,21 +141,28 @@ rec {
|
||||
extractSource =
|
||||
{
|
||||
source,
|
||||
dir ? "",
|
||||
}:
|
||||
stdenv.mkDerivation {
|
||||
name = "${(source.name or "")}-extracted";
|
||||
src = source;
|
||||
inherit dir;
|
||||
phases = [ "unpackPhase" ];
|
||||
preUnpack = ''
|
||||
echo "source: $src"
|
||||
unpackFallback(){
|
||||
local fn="$1"
|
||||
tar xf "$fn"
|
||||
}
|
||||
unpackCmdHooks+=(unpackFallback)
|
||||
'';
|
||||
dontInstall = true;
|
||||
dontFixup = true;
|
||||
unpackCmd =
|
||||
if lib.hasSuffix ".tgz" source.name then
|
||||
''
|
||||
tar --delay-directory-restore -xf $src
|
||||
|
||||
# set executable flag only on directories
|
||||
chmod -R +X .
|
||||
''
|
||||
else
|
||||
null;
|
||||
postUnpack = ''
|
||||
mv $sourceRoot $out
|
||||
echo postUnpack
|
||||
mv "$sourceRoot/$dir" $out
|
||||
exit
|
||||
'';
|
||||
};
|
||||
|
@ -19,5 +19,5 @@ in
|
||||
]
|
||||
''
|
||||
export dream2nixSrc=${dream2nixWithExternals}
|
||||
${python3.pkgs.pytest}/bin/pytest ${self}/tests/unit
|
||||
${python3.pkgs.pytest}/bin/pytest ${self}/tests/unit "$@"
|
||||
''
|
||||
|
75
tests/unit/test_source_shortcuts.py
Normal file
75
tests/unit/test_source_shortcuts.py
Normal file
@ -0,0 +1,75 @@
|
||||
import pytest
|
||||
import nix_ffi
|
||||
|
||||
@pytest.mark.parametrize("shortcut, expected", [
|
||||
(
|
||||
'https://foo',
|
||||
dict (
|
||||
type = "http",
|
||||
url = "https://foo",
|
||||
),
|
||||
),
|
||||
(
|
||||
'http://foo/bar',
|
||||
dict (
|
||||
type = "http",
|
||||
url = "http://foo/bar",
|
||||
),
|
||||
),
|
||||
(
|
||||
'github:owner/repo/v1.2.3',
|
||||
dict (
|
||||
type = "github",
|
||||
owner = "owner",
|
||||
repo = "repo",
|
||||
rev = "v1.2.3",
|
||||
),
|
||||
),
|
||||
|
||||
# with arguments
|
||||
(
|
||||
'git+ssh://github.com/owner/repo?rev=refs/heads/v1.2.3&dir=sub/dir',
|
||||
dict (
|
||||
type = "git",
|
||||
url = "ssh://github.com/owner/repo",
|
||||
rev = "refs/heads/v1.2.3",
|
||||
dir = "sub/dir",
|
||||
),
|
||||
),
|
||||
(
|
||||
'http://foo/bar?kwarg1=foo&dir=sub/dir',
|
||||
dict (
|
||||
type = "http",
|
||||
url = "http://foo/bar?kwarg1=foo",
|
||||
dir = "sub/dir",
|
||||
),
|
||||
),
|
||||
(
|
||||
'github:owner/repo/v1.2.3?kwarg1=foo&dir=sub/dir',
|
||||
dict (
|
||||
type = "github",
|
||||
owner = "owner",
|
||||
repo = "repo",
|
||||
rev = "v1.2.3",
|
||||
kwarg1 = "foo",
|
||||
dir = "sub/dir",
|
||||
),
|
||||
),
|
||||
(
|
||||
'github:photoview/photoview/master?dir=lol',
|
||||
dict (
|
||||
type = "github",
|
||||
owner = "photoview",
|
||||
repo = "photoview",
|
||||
rev = "master",
|
||||
dir = "lol",
|
||||
),
|
||||
),
|
||||
])
|
||||
def test_translateShortcut(shortcut, expected):
|
||||
result = nix_ffi.callNixFunction(
|
||||
'fetchers.translateShortcut',
|
||||
shortcut=shortcut,
|
||||
computeHash=False,
|
||||
)
|
||||
assert result == expected
|
Loading…
Reference in New Issue
Block a user