mirror of
https://github.com/nix-community/dream2nix.git
synced 2024-12-29 17:33:46 +03:00
Merge pull request #53 from nix-community/dev
refactoring + CI Tests + go translator
This commit is contained in:
commit
c5f6bd5a17
40
.github/workflows/tests.yml
vendored
Normal file
40
.github/workflows/tests.yml
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
name: "Test dream2nix"
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
push:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
pure-tests:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2.4.0
|
||||||
|
- uses: cachix/install-nix-action@v15
|
||||||
|
with:
|
||||||
|
install_url: https://nixos-nix-install-tests.cachix.org/serve/w659aglf1hfvkj5wj696q9x8r19p6b7k/install
|
||||||
|
install_options: '--tarball-url-prefix https://nixos-nix-install-tests.cachix.org/serve'
|
||||||
|
extra_nix_config: |
|
||||||
|
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- uses: cachix/cachix-action@v10
|
||||||
|
with:
|
||||||
|
name: nix-community
|
||||||
|
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||||
|
|
||||||
|
- run: nix flake check
|
||||||
|
|
||||||
|
impure-tests:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2.4.0
|
||||||
|
- uses: cachix/install-nix-action@v15
|
||||||
|
with:
|
||||||
|
install_url: https://nixos-nix-install-tests.cachix.org/serve/w659aglf1hfvkj5wj696q9x8r19p6b7k/install
|
||||||
|
install_options: '--tarball-url-prefix https://nixos-nix-install-tests.cachix.org/serve'
|
||||||
|
extra_nix_config: |
|
||||||
|
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- uses: cachix/cachix-action@v10
|
||||||
|
with:
|
||||||
|
name: nix-community
|
||||||
|
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||||
|
|
||||||
|
- run: nix run .#tests-impure
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,3 +1,4 @@
|
|||||||
.*/
|
.*/
|
||||||
|
!.github/
|
||||||
result
|
result
|
||||||
interpreter
|
interpreter
|
||||||
|
22
ci.nix
Normal file
22
ci.nix
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
let
|
||||||
|
b = builtins;
|
||||||
|
flakeCompatSrc = b.fetchurl "https://raw.githubusercontent.com/edolstra/flake-compat/12c64ca55c1014cdc1b16ed5a804aa8576601ff2/default.nix";
|
||||||
|
flake = (import flakeCompatSrc { src = ./.; }).defaultNix;
|
||||||
|
pkgs = import flake.inputs.nixpkgs {};
|
||||||
|
recurseIntoAll = b.mapAttrs (name: val: pkgs.recurseIntoAttrs val);
|
||||||
|
|
||||||
|
in
|
||||||
|
# {
|
||||||
|
# inherit flake;
|
||||||
|
# }
|
||||||
|
|
||||||
|
# // (recurseIntoAll {
|
||||||
|
|
||||||
|
# checks = flake.checks.x86_64-linux;
|
||||||
|
|
||||||
|
# })
|
||||||
|
|
||||||
|
# hercules ci's nix version cannot fetch submodules and crashes
|
||||||
|
{
|
||||||
|
inherit (pkgs) hello;
|
||||||
|
}
|
17
flake.lock
17
flake.lock
@ -1,5 +1,21 @@
|
|||||||
{
|
{
|
||||||
"nodes": {
|
"nodes": {
|
||||||
|
"gomod2nix": {
|
||||||
|
"flake": false,
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1627572165,
|
||||||
|
"narHash": "sha256-MFpwnkvQpauj799b4QTBJQFEddbD02+Ln5k92QyHOSk=",
|
||||||
|
"owner": "tweag",
|
||||||
|
"repo": "gomod2nix",
|
||||||
|
"rev": "67f22dd738d092c6ba88e420350ada0ed4992ae8",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "tweag",
|
||||||
|
"repo": "gomod2nix",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
"mach-nix": {
|
"mach-nix": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
@ -81,6 +97,7 @@
|
|||||||
},
|
},
|
||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
|
"gomod2nix": "gomod2nix",
|
||||||
"mach-nix": "mach-nix",
|
"mach-nix": "mach-nix",
|
||||||
"nix-parsec": "nix-parsec",
|
"nix-parsec": "nix-parsec",
|
||||||
"nixpkgs": "nixpkgs",
|
"nixpkgs": "nixpkgs",
|
||||||
|
17
flake.nix
17
flake.nix
@ -3,7 +3,10 @@
|
|||||||
|
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "nixpkgs/nixos-unstable";
|
nixpkgs.url = "nixpkgs/nixos-unstable";
|
||||||
|
|
||||||
|
# required for builder go/gomod2nix
|
||||||
|
gomod2nix = { url = "github:tweag/gomod2nix"; flake = false; };
|
||||||
|
|
||||||
# required for translator nodejs/pure/package-lock
|
# required for translator nodejs/pure/package-lock
|
||||||
nix-parsec = { url = "github:nprindle/nix-parsec"; flake = false; };
|
nix-parsec = { url = "github:nprindle/nix-parsec"; flake = false; };
|
||||||
|
|
||||||
@ -19,6 +22,7 @@
|
|||||||
|
|
||||||
outputs = {
|
outputs = {
|
||||||
self,
|
self,
|
||||||
|
gomod2nix,
|
||||||
mach-nix,
|
mach-nix,
|
||||||
nix-parsec,
|
nix-parsec,
|
||||||
nixpkgs,
|
nixpkgs,
|
||||||
@ -127,7 +131,14 @@
|
|||||||
|
|
||||||
# all apps including cli, install, etc.
|
# all apps including cli, install, etc.
|
||||||
apps = forAllSystems (system: pkgs:
|
apps = forAllSystems (system: pkgs:
|
||||||
dream2nixFor."${system}".apps.flakeApps
|
dream2nixFor."${system}".apps.flakeApps // {
|
||||||
|
tests-impure = {
|
||||||
|
type = "app";
|
||||||
|
program =
|
||||||
|
b.toString
|
||||||
|
(dream2nixFor."${system}".callPackageDream ./tests/impure {});
|
||||||
|
};
|
||||||
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
# a dev shell for working on dream2nix
|
# a dev shell for working on dream2nix
|
||||||
@ -151,7 +162,7 @@
|
|||||||
'';
|
'';
|
||||||
});
|
});
|
||||||
|
|
||||||
checks = forAllSystems (system: pkgs: import ./checks.nix {
|
checks = forAllSystems (system: pkgs: import ./tests/pure {
|
||||||
inherit lib pkgs;
|
inherit lib pkgs;
|
||||||
dream2nix = dream2nixFor."${system}";
|
dream2nix = dream2nixFor."${system}";
|
||||||
});
|
});
|
||||||
|
@ -23,14 +23,17 @@ class AddCommand(Command):
|
|||||||
arguments = [
|
arguments = [
|
||||||
argument(
|
argument(
|
||||||
"source",
|
"source",
|
||||||
"source of the package, can be a path, tarball URL, or flake-style spec")
|
"Sources of the packages. Can be a paths, tarball URLs, or flake-style specs",
|
||||||
|
# multiple=True
|
||||||
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
options = [
|
options = [
|
||||||
option("translator", None, "which translator to use", flag=False),
|
option("translator", None, "which translator to use", flag=False),
|
||||||
option("target", None, "target file/directory for the dream-lock.json", flag=False),
|
option("target", None, "target file/directory for the dream-lock.json", flag=False),
|
||||||
|
option("attribute-name", None, "attribute name for new new pakcage", flag=False),
|
||||||
option(
|
option(
|
||||||
"--packages-root",
|
"packages-root",
|
||||||
None,
|
None,
|
||||||
"Put package under a new directory inside packages-root",
|
"Put package under a new directory inside packages-root",
|
||||||
flag=False
|
flag=False
|
||||||
@ -58,14 +61,92 @@ class AddCommand(Command):
|
|||||||
self.line(f"\n{self.description}\n")
|
self.line(f"\n{self.description}\n")
|
||||||
|
|
||||||
# parse extra args
|
# parse extra args
|
||||||
|
specified_extra_args = self.parse_extra_args()
|
||||||
|
|
||||||
|
# ensure packages-root
|
||||||
|
packages_root = self.find_packages_root()
|
||||||
|
|
||||||
|
|
||||||
|
lock, sourceSpec, specified_extra_args, translator =\
|
||||||
|
self.translate_from_source(specified_extra_args, self.argument("source"))
|
||||||
|
|
||||||
|
# get package name and version from lock
|
||||||
|
mainPackageName = lock['_generic']['mainPackageName']
|
||||||
|
mainPackageVersion = lock['_generic']['mainPackageVersion']
|
||||||
|
|
||||||
|
# calculate output directory and attribute name
|
||||||
|
mainPackageDirName = self.define_attribute_name(mainPackageName)
|
||||||
|
|
||||||
|
# calculate output files
|
||||||
|
filesToCreate, output = self.calc_outputs(mainPackageDirName, packages_root)
|
||||||
|
outputDreamLock = f"{output}/dream-lock.json"
|
||||||
|
outputDefaultNix = f"{output}/default.nix"
|
||||||
|
|
||||||
|
# add translator information to lock
|
||||||
|
self.extend_with_translator_info(lock, specified_extra_args, translator)
|
||||||
|
|
||||||
|
# add main package source
|
||||||
|
self.add_main_source(lock, mainPackageName, mainPackageVersion, sourceSpec)
|
||||||
|
|
||||||
|
# clean up dependency graph
|
||||||
|
if 'dependencies' in lock['_generic']:
|
||||||
|
self.postprocess_dep_graph(lock)
|
||||||
|
|
||||||
|
# calculate combined hash if --combined was specified
|
||||||
|
if self.option('combined'):
|
||||||
|
self.aggregate_hashes(lock, outputDreamLock)
|
||||||
|
|
||||||
|
# validate dream lock format
|
||||||
|
checkLockJSON(lock)
|
||||||
|
|
||||||
|
# format dream lock
|
||||||
|
lockStr = self.format_lock_str(lock)
|
||||||
|
|
||||||
|
# save dream lock file
|
||||||
|
with open(outputDreamLock, 'w') as f:
|
||||||
|
f.write(lockStr)
|
||||||
|
print(f"Created {output}/dream-lock.json")
|
||||||
|
|
||||||
|
# create default.nix
|
||||||
|
if 'default.nix' in filesToCreate:
|
||||||
|
self.create_default_nix(lock, output, outputDefaultNix, sources[0])
|
||||||
|
|
||||||
|
# add new package to git
|
||||||
|
if config['isRepo']:
|
||||||
|
sp.run(["git", "add", "-N", output])
|
||||||
|
|
||||||
|
def translate_from_source(self, specified_extra_args, source):
|
||||||
|
# get source path and spec
|
||||||
|
source, sourceSpec = self.parse_source(source)
|
||||||
|
# select translator
|
||||||
|
translator = self.select_translator(source)
|
||||||
|
# raise error if any specified extra arg is unknown
|
||||||
|
specified_extra_args = self.declare_extra_args(specified_extra_args, translator)
|
||||||
|
# do the translation and produce dream lock
|
||||||
|
lock = self.run_translate(source, specified_extra_args, translator)
|
||||||
|
return lock, sourceSpec, specified_extra_args, translator
|
||||||
|
|
||||||
|
def parse_extra_args(self):
|
||||||
specified_extra_args = {
|
specified_extra_args = {
|
||||||
arg[0]: arg[1] for arg in map(
|
arg[0]: arg[1] for arg in map(
|
||||||
lambda e: e.split('='),
|
lambda e: e.split('='),
|
||||||
self.option("arg"),
|
self.option("arg"),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
return specified_extra_args
|
||||||
|
|
||||||
# ensure packages-root
|
def create_default_nix(self, lock, output, outputDefaultNix, source):
|
||||||
|
template = callNixFunction(
|
||||||
|
'apps.apps.cli.templateDefaultNix',
|
||||||
|
dream2nixLocationRelative=os.path.relpath(dream2nix_src, output),
|
||||||
|
dreamLock=lock,
|
||||||
|
sourcePathRelative=os.path.relpath(source, os.path.dirname(outputDefaultNix))
|
||||||
|
)
|
||||||
|
with open(outputDefaultNix, 'w') as defaultNix:
|
||||||
|
defaultNix.write(template)
|
||||||
|
print(f"Created {output}/default.nix")
|
||||||
|
|
||||||
|
def find_packages_root(self):
|
||||||
if self.option("packages-root"):
|
if self.option("packages-root"):
|
||||||
packages_root = self.option("packages-root")
|
packages_root = self.option("packages-root")
|
||||||
elif config['packagesDir']:
|
elif config['packagesDir']:
|
||||||
@ -75,52 +156,268 @@ class AddCommand(Command):
|
|||||||
if not os.path.isdir(packages_root):
|
if not os.path.isdir(packages_root):
|
||||||
print(
|
print(
|
||||||
f"Packages direcotry {packages_root} does not exist. Please create.",
|
f"Packages direcotry {packages_root} does not exist. Please create.",
|
||||||
file = sys.stderr,
|
|
||||||
)
|
|
||||||
|
|
||||||
# verify source
|
|
||||||
source = self.argument("source")
|
|
||||||
if not source and not config['packagesDir']:
|
|
||||||
source = os.path.realpath('./.')
|
|
||||||
print(
|
|
||||||
f"Source not specified. Defaulting to current directory: {source}",
|
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
# else:
|
return packages_root
|
||||||
# print(
|
|
||||||
# f"Source not specified. Defaulting to current directory: {source}",
|
|
||||||
# file=sys.stderr,
|
|
||||||
# )
|
|
||||||
# check if source is valid fetcher spec
|
|
||||||
sourceSpec = {}
|
|
||||||
# handle source shortcuts
|
|
||||||
if source.partition(':')[0].split('+')[0] in os.environ.get("fetcherNames", None).split()\
|
|
||||||
or source.startswith('http'):
|
|
||||||
print(f"fetching source for '{source}'")
|
|
||||||
sourceSpec =\
|
|
||||||
callNixFunction("fetchers.translateShortcut", shortcut=source)
|
|
||||||
source =\
|
|
||||||
buildNixFunction("fetchers.fetchShortcut", shortcut=source, extract=True)
|
|
||||||
# handle source paths
|
|
||||||
else:
|
|
||||||
# check if source path exists
|
|
||||||
if not os.path.exists(source):
|
|
||||||
print(f"Input source '{source}' does not exist", file=sys.stdout)
|
|
||||||
exit(1)
|
|
||||||
source = os.path.realpath(source)
|
|
||||||
# handle source from dream-lock.json
|
|
||||||
if source.endswith('dream-lock.json'):
|
|
||||||
print(f"fetching source defined via existing dream-lock.json")
|
|
||||||
with open(source) as f:
|
|
||||||
sourceDreamLock = json.load(f)
|
|
||||||
sourceMainPackageName = sourceDreamLock['_generic']['mainPackageName']
|
|
||||||
sourceMainPackageVersion = sourceDreamLock['_generic']['mainPackageVersion']
|
|
||||||
sourceSpec =\
|
|
||||||
sourceDreamLock['sources'][sourceMainPackageName][sourceMainPackageVersion]
|
|
||||||
source = \
|
|
||||||
buildNixFunction("fetchers.fetchSource", source=sourceSpec, extract=True)
|
|
||||||
|
|
||||||
# select translator
|
def format_lock_str(self, lock):
|
||||||
|
lockStr = json.dumps(lock, indent=2, sort_keys=True)
|
||||||
|
lockStr = lockStr \
|
||||||
|
.replace("[\n ", "[ ") \
|
||||||
|
.replace("\"\n ]", "\" ]") \
|
||||||
|
.replace(",\n ", ", ")
|
||||||
|
return lockStr
|
||||||
|
|
||||||
|
def aggregate_hashes(self, lock, outputDreamLock):
|
||||||
|
print("Building FOD of combined sources to retrieve output hash")
|
||||||
|
# remove hashes from lock file and init sourcesCombinedHash with empty string
|
||||||
|
strip_hashes_from_lock(lock)
|
||||||
|
lock['_generic']['sourcesCombinedHash'] = ""
|
||||||
|
with open(outputDreamLock, 'w') as f:
|
||||||
|
json.dump(lock, f, indent=2)
|
||||||
|
# compute FOD hash of combined sources
|
||||||
|
proc = sp.run(
|
||||||
|
[
|
||||||
|
"nix", "build", "--impure", "-L", "--expr",
|
||||||
|
f"(import {dream2nix_src} {{}}).fetchSources {{ dreamLock = {outputDreamLock}; }}"
|
||||||
|
],
|
||||||
|
capture_output=True,
|
||||||
|
)
|
||||||
|
# read the output hash from the failed build log
|
||||||
|
match = re.search(r"FOD_PATH=(.*=)", proc.stderr.decode())
|
||||||
|
if not match:
|
||||||
|
print(proc.stderr.decode())
|
||||||
|
print(proc.stdout.decode())
|
||||||
|
raise Exception("Could not find FOD hash in FOD log")
|
||||||
|
hash = match.groups()[0]
|
||||||
|
print(f"Computed FOD hash: {hash}")
|
||||||
|
# store the hash in the lock
|
||||||
|
lock['_generic']['sourcesCombinedHash'] = hash
|
||||||
|
|
||||||
|
def postprocess_dep_graph(self, lock):
|
||||||
|
depGraph = lock['_generic']['dependencies']
|
||||||
|
# remove empty entries
|
||||||
|
if 'dependencies' in lock['_generic']:
|
||||||
|
for pname, deps in depGraph.copy().items():
|
||||||
|
if not deps:
|
||||||
|
del depGraph[pname]
|
||||||
|
# mark cyclic dependencies
|
||||||
|
edges = set()
|
||||||
|
for pname, versions in depGraph.items():
|
||||||
|
for version, deps in versions.items():
|
||||||
|
for dep in deps:
|
||||||
|
edges.add(((pname, version), tuple(dep)))
|
||||||
|
G = nx.DiGraph(sorted(list(edges)))
|
||||||
|
cycle_count = 0
|
||||||
|
removed_edges = []
|
||||||
|
for pname, versions in depGraph.items():
|
||||||
|
for version in versions.keys():
|
||||||
|
key = (pname, version)
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
cycle = nx.find_cycle(G, key)
|
||||||
|
cycle_count += 1
|
||||||
|
node_from, node_to = cycle[-1][0], cycle[-1][1]
|
||||||
|
G.remove_edge(node_from, node_to)
|
||||||
|
removed_edges.append((node_from, node_to))
|
||||||
|
except nx.NetworkXNoCycle:
|
||||||
|
continue
|
||||||
|
lock['cyclicDependencies'] = {}
|
||||||
|
if removed_edges:
|
||||||
|
cycles_text = 'Detected Cyclic dependencies:'
|
||||||
|
for node, removed in removed_edges:
|
||||||
|
n_name, n_ver = node[0], node[1]
|
||||||
|
r_name, r_ver = removed[0], removed[1]
|
||||||
|
cycles_text += \
|
||||||
|
f"\n {n_name}#{n_ver} -> {r_name}#{r_ver}"
|
||||||
|
if n_name not in lock['cyclicDependencies']:
|
||||||
|
lock['cyclicDependencies'][n_name] = {}
|
||||||
|
if n_ver not in lock['cyclicDependencies'][n_name]:
|
||||||
|
lock['cyclicDependencies'][n_name][n_ver] = []
|
||||||
|
lock['cyclicDependencies'][n_name][n_ver].append(removed)
|
||||||
|
print(cycles_text)
|
||||||
|
|
||||||
|
def add_main_source(self, lock, mainPackageName, mainPackageVersion, sourceSpec):
|
||||||
|
mainSource = sourceSpec.copy()
|
||||||
|
if not mainSource:
|
||||||
|
mainSource = dict(
|
||||||
|
type="unknown",
|
||||||
|
)
|
||||||
|
if mainPackageName not in lock['sources']:
|
||||||
|
lock['sources'][mainPackageName] = {
|
||||||
|
mainPackageVersion: mainSource
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
lock['sources'][mainPackageName][mainPackageVersion] = mainSource
|
||||||
|
|
||||||
|
def extend_with_translator_info(self, lock, specified_extra_args, translator):
|
||||||
|
t = translator
|
||||||
|
lock['_generic']['translatedBy'] = f"{t['subsystem']}.{t['type']}.{t['name']}"
|
||||||
|
lock['_generic']['translatorParams'] = " ".join(
|
||||||
|
[
|
||||||
|
'--translator',
|
||||||
|
f"{translator['subsystem']}.{translator['type']}.{translator['name']}",
|
||||||
|
] + (
|
||||||
|
["--combined"] if self.option('combined') else []
|
||||||
|
) + [
|
||||||
|
f"--arg {n}={v}" for n, v in specified_extra_args.items()
|
||||||
|
])
|
||||||
|
|
||||||
|
def calc_outputs(self, mainPackageDirName, packages_root):
|
||||||
|
if self.option('target'):
|
||||||
|
if self.option('target').startswith('/'):
|
||||||
|
output = self.option('target')
|
||||||
|
else:
|
||||||
|
output = f"{packages_root}/{self.option('target')}"
|
||||||
|
else:
|
||||||
|
output = f"{packages_root}/{mainPackageDirName}"
|
||||||
|
# collect files to create
|
||||||
|
filesToCreate = ['dream-lock.json']
|
||||||
|
if not os.path.isdir(output):
|
||||||
|
os.mkdir(output)
|
||||||
|
existingFiles = set(os.listdir(output))
|
||||||
|
if not self.option('no-default-nix') \
|
||||||
|
and not 'default.nix' in existingFiles \
|
||||||
|
and not config['packagesDir']:
|
||||||
|
if self.confirm(
|
||||||
|
'Create a default.nix for debugging purposes',
|
||||||
|
default=True):
|
||||||
|
filesToCreate.append('default.nix')
|
||||||
|
# overwrite existing files only if --force is set
|
||||||
|
if self.option('force'):
|
||||||
|
for f in filesToCreate:
|
||||||
|
if os.path.isfile(f):
|
||||||
|
os.remove(f)
|
||||||
|
# raise error if any file exists already
|
||||||
|
else:
|
||||||
|
if any(f in existingFiles for f in filesToCreate):
|
||||||
|
print(
|
||||||
|
f"output directory {output} already contains a 'default.nix' "
|
||||||
|
"or 'dream-lock.json'. Resolve via one of these:\n"
|
||||||
|
" - use --force to overwrite files\n"
|
||||||
|
" - use --target to specify another target dir",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
exit(1)
|
||||||
|
output = os.path.realpath(output)
|
||||||
|
return filesToCreate, output
|
||||||
|
|
||||||
|
def define_attribute_name(self, mainPackageName):
|
||||||
|
attributeName = self.option('attribute-name')
|
||||||
|
if attributeName:
|
||||||
|
return attributeName
|
||||||
|
|
||||||
|
attributeName = mainPackageName.strip('@').replace('/', '-')
|
||||||
|
|
||||||
|
# verify / change main package dir name
|
||||||
|
print(f"Current package attribute name is: {attributeName}")
|
||||||
|
new_name = self.ask(
|
||||||
|
"Specify new attribute name or leave empty to keep current:"
|
||||||
|
)
|
||||||
|
if new_name:
|
||||||
|
attributeName = newName
|
||||||
|
return attributeName
|
||||||
|
|
||||||
|
def run_translate(self, source, specified_extra_args, translator):
|
||||||
|
# build the translator bin
|
||||||
|
t = translator
|
||||||
|
translator_path = buildNixAttribute(
|
||||||
|
f"translators.translators.{t['subsystem']}.{t['type']}.{t['name']}.translateBin"
|
||||||
|
)
|
||||||
|
# direct outputs of translator to temporary file
|
||||||
|
with tempfile.NamedTemporaryFile("r") as output_temp_file:
|
||||||
|
# arguments for calling the translator nix module
|
||||||
|
translator_input = dict(
|
||||||
|
inputFiles=[],
|
||||||
|
inputDirectories=[source],
|
||||||
|
outputFile=output_temp_file.name,
|
||||||
|
)
|
||||||
|
translator_input.update(specified_extra_args)
|
||||||
|
|
||||||
|
# dump translator arguments to json file and execute translator
|
||||||
|
print("\nTranslating project metadata")
|
||||||
|
with tempfile.NamedTemporaryFile("w") as input_json_file:
|
||||||
|
json.dump(translator_input, input_json_file, indent=2)
|
||||||
|
input_json_file.seek(0) # flushes write cache
|
||||||
|
|
||||||
|
# execute translator
|
||||||
|
sp.run(
|
||||||
|
[f"{translator_path}", input_json_file.name]
|
||||||
|
)
|
||||||
|
|
||||||
|
# raise error if output wasn't produced
|
||||||
|
if not output_temp_file.read():
|
||||||
|
raise Exception(f"Translator failed to create dream-lock.json")
|
||||||
|
|
||||||
|
# read produced lock file
|
||||||
|
with open(output_temp_file.name) as f:
|
||||||
|
lock = json.load(f)
|
||||||
|
return lock
|
||||||
|
|
||||||
|
def declare_extra_args(self, specified_extra_args, translator):
|
||||||
|
unknown_extra_args = set(specified_extra_args.keys()) - set(translator['extraArgs'].keys())
|
||||||
|
if unknown_extra_args:
|
||||||
|
print(
|
||||||
|
f"Invalid extra args for translator '{translator['name']}': "
|
||||||
|
f" {', '.join(unknown_extra_args)}"
|
||||||
|
"\nPlease remove these parameters",
|
||||||
|
file=sys.stderr
|
||||||
|
)
|
||||||
|
exit(1)
|
||||||
|
# transform flags to bool
|
||||||
|
for argName, argVal in specified_extra_args.copy().items():
|
||||||
|
if translator['extraArgs'][argName]['type'] == 'flag':
|
||||||
|
if argVal.lower() in ('yes', 'y', 'true'):
|
||||||
|
specified_extra_args[argName] = True
|
||||||
|
elif argVal.lower() in ('no', 'n', 'false'):
|
||||||
|
specified_extra_args[argName] = False
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Invalid value {argVal} for argument {argName}",
|
||||||
|
file=sys.stderr
|
||||||
|
)
|
||||||
|
specified_extra_args = \
|
||||||
|
{k: (bool(v) if translator['extraArgs'][k]['type'] == 'flag' else v) \
|
||||||
|
for k, v in specified_extra_args.items()}
|
||||||
|
# on non-interactive session, assume defaults for unspecified extra args
|
||||||
|
if not self.io.is_interactive():
|
||||||
|
specified_extra_args.update(
|
||||||
|
{n: (True if v['type'] == 'flag' else v['default']) \
|
||||||
|
for n, v in translator['extraArgs'].items() \
|
||||||
|
if n not in specified_extra_args and 'default' in v}
|
||||||
|
)
|
||||||
|
unspecified_extra_args = \
|
||||||
|
{n: v for n, v in translator['extraArgs'].items() \
|
||||||
|
if n not in specified_extra_args}
|
||||||
|
# raise error if any extra arg unspecified in non-interactive session
|
||||||
|
if unspecified_extra_args:
|
||||||
|
if not self.io.is_interactive():
|
||||||
|
print(
|
||||||
|
f"Please specify the following extra arguments required by translator '{translator['name']}' :\n" \
|
||||||
|
', '.join(unspecified_extra_args.keys()),
|
||||||
|
file=sys.stderr
|
||||||
|
)
|
||||||
|
exit(1)
|
||||||
|
# interactively retrieve answers for unspecified extra arguments
|
||||||
|
else:
|
||||||
|
print(f"\nThe translator '{translator['name']}' requires additional options")
|
||||||
|
for arg_name, arg in unspecified_extra_args.items():
|
||||||
|
print('')
|
||||||
|
if arg['type'] == 'flag':
|
||||||
|
print(f"Please specify '{arg_name}'")
|
||||||
|
specified_extra_args[arg_name] = self.confirm(f"{arg['description']}:", False)
|
||||||
|
else:
|
||||||
|
print(f"Please specify '{arg_name}': {arg['description']}")
|
||||||
|
print(f"Example values: " + ', '.join(arg['examples']))
|
||||||
|
if 'default' in arg:
|
||||||
|
print(f"Leave empty for default ({arg['default']})")
|
||||||
|
while True:
|
||||||
|
specified_extra_args[arg_name] = self.ask(f"{arg_name}:", arg.get('default'))
|
||||||
|
if specified_extra_args[arg_name]:
|
||||||
|
break
|
||||||
|
return specified_extra_args
|
||||||
|
|
||||||
|
def select_translator(self, source):
|
||||||
translatorsSorted = sorted(
|
translatorsSorted = sorted(
|
||||||
list_translators_for_source(source),
|
list_translators_for_source(source),
|
||||||
key=lambda t: (
|
key=lambda t: (
|
||||||
@ -153,302 +450,48 @@ class AddCommand(Command):
|
|||||||
))[0]
|
))[0]
|
||||||
elif len(translator) == 1:
|
elif len(translator) == 1:
|
||||||
translator = list(filter(
|
translator = list(filter(
|
||||||
lambda t: [t['name']] == translator,
|
lambda t: [t['name']] == translator,
|
||||||
translatorsSorted,
|
translatorsSorted,
|
||||||
))[0]
|
))[0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
print(f"Could not find translator '{'.'.join(translator)}'", file=sys.stderr)
|
print(f"Could not find translator '{'.'.join(translator)}'", file=sys.stderr)
|
||||||
exit(1)
|
exit(1)
|
||||||
|
return translator
|
||||||
|
|
||||||
# raise error if any specified extra arg is unknown
|
def parse_source(self, source):
|
||||||
unknown_extra_args = set(specified_extra_args.keys()) - set(translator['extraArgs'].keys())
|
# verify source
|
||||||
if unknown_extra_args:
|
if not source and not config['packagesDir']:
|
||||||
|
source = os.path.realpath('./.')
|
||||||
print(
|
print(
|
||||||
f"Invalid extra args for translator '{translator['name']}': "
|
f"Source not specified. Defaulting to current directory: {source}",
|
||||||
f" {', '.join(unknown_extra_args)}"
|
file=sys.stderr,
|
||||||
"\nPlease remove these parameters",
|
|
||||||
file=sys.stderr
|
|
||||||
)
|
)
|
||||||
exit(1)
|
# check if source is a valid fetcher spec
|
||||||
|
sourceSpec = {}
|
||||||
# transform flags to bool
|
# handle source shortcuts
|
||||||
for argName, argVal in specified_extra_args.copy().items():
|
if source.partition(':')[0].split('+')[0] in os.environ.get("fetcherNames", None).split() \
|
||||||
if translator['extraArgs'][argName]['type'] == 'flag':
|
or source.startswith('http'):
|
||||||
if argVal.lower() in ('yes', 'y', 'true'):
|
print(f"fetching source for '{source}'")
|
||||||
specified_extra_args[argName] = True
|
sourceSpec = \
|
||||||
elif argVal.lower() in ('no', 'n', 'false'):
|
callNixFunction("fetchers.translateShortcut", shortcut=source)
|
||||||
specified_extra_args[argName] = False
|
source = \
|
||||||
else:
|
buildNixFunction("fetchers.fetchShortcut", shortcut=source, extract=True)
|
||||||
print(
|
# handle source paths
|
||||||
f"Invalid value {argVal} for argument {argName}",
|
else:
|
||||||
file=sys.stderr
|
# check if source path exists
|
||||||
)
|
if not os.path.exists(source):
|
||||||
|
print(f"Input source '{source}' does not exist", file=sys.stdout)
|
||||||
specified_extra_args =\
|
|
||||||
{k: (bool(v) if translator['extraArgs'][k]['type'] == 'flag' else v ) \
|
|
||||||
for k, v in specified_extra_args.items()}
|
|
||||||
|
|
||||||
# on non-interactive session, assume defaults for unspecified extra args
|
|
||||||
if not self.io.is_interactive():
|
|
||||||
specified_extra_args.update(
|
|
||||||
{n: (True if v['type'] == 'flag' else v['default']) \
|
|
||||||
for n, v in translator['extraArgs'].items() \
|
|
||||||
if n not in specified_extra_args and 'default' in v}
|
|
||||||
)
|
|
||||||
unspecified_extra_args = \
|
|
||||||
{n: v for n, v in translator['extraArgs'].items() \
|
|
||||||
if n not in specified_extra_args}
|
|
||||||
# raise error if any extra arg unspecified in non-interactive session
|
|
||||||
if unspecified_extra_args:
|
|
||||||
if not self.io.is_interactive():
|
|
||||||
print(
|
|
||||||
f"Please specify the following extra arguments required by translator '{translator['name']}' :\n" \
|
|
||||||
', '.join(unspecified_extra_args.keys()),
|
|
||||||
file=sys.stderr
|
|
||||||
)
|
|
||||||
exit(1)
|
exit(1)
|
||||||
# interactively retrieve answers for unspecified extra arguments
|
source = os.path.realpath(source)
|
||||||
else:
|
# handle source from dream-lock.json
|
||||||
print(f"\nThe translator '{translator['name']}' requires additional options")
|
if source.endswith('dream-lock.json'):
|
||||||
for arg_name, arg in unspecified_extra_args.items():
|
print(f"fetching source defined via existing dream-lock.json")
|
||||||
print('')
|
with open(source) as f:
|
||||||
if arg['type'] == 'flag':
|
sourceDreamLock = json.load(f)
|
||||||
print(f"Please specify '{arg_name}'")
|
sourceMainPackageName = sourceDreamLock['_generic']['mainPackageName']
|
||||||
specified_extra_args[arg_name] = self.confirm(f"{arg['description']}:", False)
|
sourceMainPackageVersion = sourceDreamLock['_generic']['mainPackageVersion']
|
||||||
else:
|
sourceSpec = \
|
||||||
print(f"Please specify '{arg_name}': {arg['description']}")
|
sourceDreamLock['sources'][sourceMainPackageName][sourceMainPackageVersion]
|
||||||
print(f"Example values: " + ', '.join(arg['examples']))
|
source = \
|
||||||
if 'default' in arg:
|
buildNixFunction("fetchers.fetchSource", source=sourceSpec, extract=True)
|
||||||
print(f"Leave empty for default ({arg['default']})")
|
return source, sourceSpec
|
||||||
while True:
|
|
||||||
specified_extra_args[arg_name] = self.ask(f"{arg_name}:", arg.get('default'))
|
|
||||||
if specified_extra_args[arg_name]:
|
|
||||||
break
|
|
||||||
|
|
||||||
# build the translator bin
|
|
||||||
t = translator
|
|
||||||
translator_path = buildNixAttribute(
|
|
||||||
f"translators.translators.{t['subsystem']}.{t['type']}.{t['name']}.translateBin"
|
|
||||||
)
|
|
||||||
|
|
||||||
# direct outputs of translator to temporary file
|
|
||||||
with tempfile.NamedTemporaryFile("r") as output_temp_file:
|
|
||||||
|
|
||||||
# arguments for calling the translator nix module
|
|
||||||
translator_input = dict(
|
|
||||||
inputFiles=[],
|
|
||||||
inputDirectories=[source],
|
|
||||||
outputFile=output_temp_file.name,
|
|
||||||
)
|
|
||||||
translator_input.update(specified_extra_args)
|
|
||||||
|
|
||||||
# dump translator arguments to json file and execute translator
|
|
||||||
print("\nTranslating project metadata")
|
|
||||||
with tempfile.NamedTemporaryFile("w") as input_json_file:
|
|
||||||
json.dump(translator_input, input_json_file, indent=2)
|
|
||||||
input_json_file.seek(0) # flushes write cache
|
|
||||||
|
|
||||||
# execute translator
|
|
||||||
sp.run(
|
|
||||||
[f"{translator_path}/bin/run", input_json_file.name]
|
|
||||||
)
|
|
||||||
|
|
||||||
# raise error if output wasn't produced
|
|
||||||
if not output_temp_file.read():
|
|
||||||
raise Exception(f"Translator failed to create dream-lock.json")
|
|
||||||
|
|
||||||
# read produced lock file
|
|
||||||
with open(output_temp_file.name) as f:
|
|
||||||
lock = json.load(f)
|
|
||||||
|
|
||||||
|
|
||||||
# get package name and version from lock
|
|
||||||
mainPackageName = lock['_generic']['mainPackageName']
|
|
||||||
mainPackageVersion = lock['_generic']['mainPackageVersion']
|
|
||||||
|
|
||||||
# calculate output directory
|
|
||||||
mainPackageDirName = mainPackageName.strip('@').replace('/', '-')
|
|
||||||
|
|
||||||
# verify / change main package dir name
|
|
||||||
def update_name(mainPackageDirName):
|
|
||||||
print(f"Current package attribute name is: {mainPackageDirName}")
|
|
||||||
new_name = self.ask(
|
|
||||||
"Specify new attribute name or leave empty to keep current:"
|
|
||||||
)
|
|
||||||
if new_name:
|
|
||||||
return new_name
|
|
||||||
return mainPackageDirName
|
|
||||||
|
|
||||||
mainPackageDirName = update_name(mainPackageDirName)
|
|
||||||
|
|
||||||
if self.option('target'):
|
|
||||||
if self.option('target').startswith('/'):
|
|
||||||
output = self.option('target')
|
|
||||||
else:
|
|
||||||
output = f"{packages_root}/{self.option('target')}"
|
|
||||||
else:
|
|
||||||
output = f"{packages_root}/{mainPackageDirName}"
|
|
||||||
|
|
||||||
# collect files to create
|
|
||||||
filesToCreate = ['dream-lock.json']
|
|
||||||
if not os.path.isdir(output):
|
|
||||||
os.mkdir(output)
|
|
||||||
existingFiles = set(os.listdir(output))
|
|
||||||
if not self.option('no-default-nix')\
|
|
||||||
and not 'default.nix' in existingFiles\
|
|
||||||
and not config['packagesDir']:
|
|
||||||
if self.confirm(
|
|
||||||
'Create a default.nix for debugging purposes',
|
|
||||||
default=True):
|
|
||||||
filesToCreate.append('default.nix')
|
|
||||||
|
|
||||||
# overwrite existing files only if --force is set
|
|
||||||
if self.option('force'):
|
|
||||||
for f in filesToCreate:
|
|
||||||
if os.path.isfile(f):
|
|
||||||
os.remove(f)
|
|
||||||
# raise error if any file exists already
|
|
||||||
else:
|
|
||||||
if any(f in existingFiles for f in filesToCreate):
|
|
||||||
print(
|
|
||||||
f"output directory {output} already contains a 'default.nix' "
|
|
||||||
"or 'dream-lock.json'. Resolve via one of these:\n"
|
|
||||||
" - use --force to overwrite files\n"
|
|
||||||
" - use --target to specify another target dir",
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
exit(1)
|
|
||||||
output = os.path.realpath(output)
|
|
||||||
outputDreamLock = f"{output}/dream-lock.json"
|
|
||||||
outputDefaultNix = f"{output}/default.nix"
|
|
||||||
|
|
||||||
# write translator information to lock file
|
|
||||||
combined = self.option('combined')
|
|
||||||
lock['_generic']['translatedBy'] = f"{t['subsystem']}.{t['type']}.{t['name']}"
|
|
||||||
lock['_generic']['translatorParams'] = " ".join([
|
|
||||||
'--translator',
|
|
||||||
f"{translator['subsystem']}.{translator['type']}.{translator['name']}",
|
|
||||||
] + (
|
|
||||||
["--combined"] if combined else []
|
|
||||||
) + [
|
|
||||||
f"--arg {n}={v}" for n, v in specified_extra_args.items()
|
|
||||||
])
|
|
||||||
|
|
||||||
# add main package source
|
|
||||||
mainSource = sourceSpec.copy()
|
|
||||||
if not mainSource:
|
|
||||||
mainSource = dict(
|
|
||||||
type="unknown",
|
|
||||||
)
|
|
||||||
if mainPackageName not in lock['sources']:
|
|
||||||
lock['sources'][mainPackageName] = {
|
|
||||||
mainPackageVersion: mainSource
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
lock['sources'][mainPackageName][mainPackageVersion] = mainSource
|
|
||||||
|
|
||||||
# clean up dependency graph
|
|
||||||
# remove empty entries
|
|
||||||
if 'dependencies' in lock['_generic']:
|
|
||||||
depGraph = lock['_generic']['dependencies']
|
|
||||||
if 'dependencies' in lock['_generic']:
|
|
||||||
for pname, deps in depGraph.copy().items():
|
|
||||||
if not deps:
|
|
||||||
del depGraph[pname]
|
|
||||||
|
|
||||||
# remove cyclic dependencies
|
|
||||||
edges = set()
|
|
||||||
for pname, versions in depGraph.items():
|
|
||||||
for version, deps in versions.items():
|
|
||||||
for dep in deps:
|
|
||||||
edges.add(((pname, version), tuple(dep)))
|
|
||||||
G = nx.DiGraph(sorted(list(edges)))
|
|
||||||
cycle_count = 0
|
|
||||||
removed_edges = []
|
|
||||||
for pname, versions in depGraph.items():
|
|
||||||
for version in versions.keys():
|
|
||||||
key = (pname, version)
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
cycle = nx.find_cycle(G, key)
|
|
||||||
cycle_count += 1
|
|
||||||
# remove_dependecy(indexed_pkgs, G, cycle[-1][0], cycle[-1][1])
|
|
||||||
node_from, node_to = cycle[-1][0], cycle[-1][1]
|
|
||||||
G.remove_edge(node_from, node_to)
|
|
||||||
removed_edges.append((node_from, node_to))
|
|
||||||
except nx.NetworkXNoCycle:
|
|
||||||
continue
|
|
||||||
lock['cyclicDependencies'] = {}
|
|
||||||
if removed_edges:
|
|
||||||
cycles_text = 'Detected Cyclic dependencies:'
|
|
||||||
for node, removed in removed_edges:
|
|
||||||
n_name, n_ver = node[0], node[1]
|
|
||||||
r_name, r_ver = removed[0], removed[1]
|
|
||||||
cycles_text +=\
|
|
||||||
f"\n {n_name}#{n_ver} -> {r_name}#{r_ver}"
|
|
||||||
if n_name not in lock['cyclicDependencies']:
|
|
||||||
lock['cyclicDependencies'][n_name] = {}
|
|
||||||
if n_ver not in lock['cyclicDependencies'][n_name]:
|
|
||||||
lock['cyclicDependencies'][n_name][n_ver] = []
|
|
||||||
lock['cyclicDependencies'][n_name][n_ver].append(removed)
|
|
||||||
print(cycles_text)
|
|
||||||
|
|
||||||
# calculate combined hash if --combined was specified
|
|
||||||
if combined:
|
|
||||||
|
|
||||||
print("Building FOD of combined sources to retrieve output hash")
|
|
||||||
|
|
||||||
# remove hashes from lock file and init sourcesCombinedHash with empty string
|
|
||||||
strip_hashes_from_lock(lock)
|
|
||||||
lock['_generic']['sourcesCombinedHash'] = ""
|
|
||||||
with open(outputDreamLock, 'w') as f:
|
|
||||||
json.dump(lock, f, indent=2)
|
|
||||||
|
|
||||||
# compute FOD hash of combined sources
|
|
||||||
proc = sp.run(
|
|
||||||
[
|
|
||||||
"nix", "build", "--impure", "-L", "--expr",
|
|
||||||
f"(import {dream2nix_src} {{}}).fetchSources {{ dreamLock = {outputDreamLock}; }}"
|
|
||||||
],
|
|
||||||
capture_output=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# read the output hash from the failed build log
|
|
||||||
match = re.search(r"FOD_PATH=(.*=)", proc.stderr.decode())
|
|
||||||
if not match:
|
|
||||||
print(proc.stderr.decode())
|
|
||||||
print(proc.stdout.decode())
|
|
||||||
raise Exception("Could not find FOD hash in FOD log")
|
|
||||||
hash = match.groups()[0]
|
|
||||||
print(f"Computed FOD hash: {hash}")
|
|
||||||
|
|
||||||
# store the hash in the lock
|
|
||||||
lock['_generic']['sourcesCombinedHash'] = hash
|
|
||||||
|
|
||||||
# re-write dream-lock.json
|
|
||||||
checkLockJSON(lock)
|
|
||||||
lockStr = json.dumps(lock, indent=2, sort_keys = True)
|
|
||||||
lockStr = lockStr\
|
|
||||||
.replace("[\n ", "[ ")\
|
|
||||||
.replace("\"\n ]", "\" ]")\
|
|
||||||
.replace(",\n ", ", ")
|
|
||||||
with open(outputDreamLock, 'w') as f:
|
|
||||||
f.write(lockStr)
|
|
||||||
|
|
||||||
# create default.nix
|
|
||||||
template = callNixFunction(
|
|
||||||
'apps.apps.cli.templateDefaultNix',
|
|
||||||
dream2nixLocationRelative=os.path.relpath(dream2nix_src, output),
|
|
||||||
dreamLock = lock,
|
|
||||||
sourcePathRelative = os.path.relpath(source, os.path.dirname(outputDefaultNix))
|
|
||||||
)
|
|
||||||
# with open(f"{dream2nix_src}/apps/cli2/templateDefault.nix") as template:
|
|
||||||
if 'default.nix' in filesToCreate:
|
|
||||||
with open(outputDefaultNix, 'w') as defaultNix:
|
|
||||||
defaultNix.write(template)
|
|
||||||
print(f"Created {output}/default.nix")
|
|
||||||
|
|
||||||
print(f"Created {output}/dream-lock.json")
|
|
||||||
|
|
||||||
if config['isRepo']:
|
|
||||||
sp.run(["git", "add", "-N", output])
|
|
||||||
|
@ -66,7 +66,7 @@ class UpdateCommand(Command):
|
|||||||
dreamLock=dreamLockFile,
|
dreamLock=dreamLockFile,
|
||||||
updater=updater,
|
updater=updater,
|
||||||
)
|
)
|
||||||
update_proc = sp.run([f"{update_script}/bin/run"], capture_output=True)
|
update_proc = sp.run([f"{update_script}"], capture_output=True)
|
||||||
version = update_proc.stdout.decode().strip()
|
version = update_proc.stdout.decode().strip()
|
||||||
print(f"Updating from version {old_version} to {version}")
|
print(f"Updating from version {old_version} to {version}")
|
||||||
|
|
||||||
|
@ -22,25 +22,22 @@ let
|
|||||||
|
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
program =
|
program =
|
||||||
let
|
utils.writePureShellScript
|
||||||
script = utils.writePureShellScript
|
[
|
||||||
[
|
gitMinimal
|
||||||
gitMinimal
|
nix
|
||||||
nix
|
]
|
||||||
]
|
''
|
||||||
''
|
# escape the temp dir created by writePureShellScript
|
||||||
# escape the temp dir created by writePureShellScript
|
cd - > /dev/null
|
||||||
cd - > /dev/null
|
|
||||||
|
|
||||||
# run the cli
|
# run the cli
|
||||||
dream2nixConfig=${configFile} \
|
dream2nixConfig=${configFile} \
|
||||||
dream2nixSrc=${dream2nixWithExternals} \
|
dream2nixSrc=${dream2nixWithExternals} \
|
||||||
fetcherNames="${b.toString (lib.attrNames fetchers.fetchers)}" \
|
fetcherNames="${b.toString (lib.attrNames fetchers.fetchers)}" \
|
||||||
${cliPython}/bin/python ${./.}/cli.py "$@"
|
${cliPython}/bin/python ${./.}/cli.py "$@"
|
||||||
'';
|
'';
|
||||||
in
|
|
||||||
"${script}/bin/run";
|
|
||||||
|
|
||||||
templateDefaultNix =
|
templateDefaultNix =
|
||||||
{
|
{
|
||||||
|
21
src/builders/go/gomod2nix/default.nix
Normal file
21
src/builders/go/gomod2nix/default.nix
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
lib,
|
||||||
|
pkgs,
|
||||||
|
externals,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
|
||||||
|
{
|
||||||
|
fetchedSources,
|
||||||
|
dreamLock,
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
gomod2nixTOML = fetchedSources.mapAttrs
|
||||||
|
dependencyObject.goName;
|
||||||
|
in
|
||||||
|
externals.gomod2nixBuilder rec {
|
||||||
|
pname = dreamLock.generic.mainPackage;
|
||||||
|
version = dreamLock.sources."${pname}".version;
|
||||||
|
src = fetchedSources."${pname}";
|
||||||
|
modules = ./gomod2nix.toml;
|
||||||
|
}
|
@ -57,6 +57,7 @@ let
|
|||||||
|
|
||||||
# like pkgs.callPackage, but includes all the dream2nix modules
|
# like pkgs.callPackage, but includes all the dream2nix modules
|
||||||
callPackageDream = f: args: pkgs.callPackage f (args // {
|
callPackageDream = f: args: pkgs.callPackage f (args // {
|
||||||
|
inherit apps;
|
||||||
inherit builders;
|
inherit builders;
|
||||||
inherit callPackageDream;
|
inherit callPackageDream;
|
||||||
inherit config;
|
inherit config;
|
||||||
@ -172,7 +173,7 @@ let
|
|||||||
fetchedSources;
|
fetchedSources;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
makeDreamLockForSource =
|
makeDreamLockForSource =
|
||||||
{
|
{
|
||||||
source,
|
source,
|
||||||
@ -330,7 +331,7 @@ let
|
|||||||
|
|
||||||
|
|
||||||
# produce outputs for a dream-lock or a source
|
# produce outputs for a dream-lock or a source
|
||||||
riseAndShine =
|
riseAndShine =
|
||||||
{
|
{
|
||||||
dreamLock ? null,
|
dreamLock ? null,
|
||||||
builder ? null,
|
builder ? null,
|
||||||
@ -403,12 +404,13 @@ let
|
|||||||
|
|
||||||
in
|
in
|
||||||
builderOutputs;
|
builderOutputs;
|
||||||
|
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
inherit
|
inherit
|
||||||
apps
|
apps
|
||||||
builders
|
builders
|
||||||
|
callPackageDream
|
||||||
dream2nixWithExternals
|
dream2nixWithExternals
|
||||||
fetchers
|
fetchers
|
||||||
fetchSources
|
fetchSources
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
|
|
||||||
{
|
{
|
||||||
|
|
||||||
# A derivation which outputs an executable at `/bin/run`.
|
# A derivation which outputs a single executable at `$out`.
|
||||||
# The executable will be called by dream2nix for translation
|
# The executable will be called by dream2nix for translation
|
||||||
# The input format is specified in /specifications/translator-call-example.json.
|
# The input format is specified in /specifications/translator-call-example.json.
|
||||||
# The first arg `$1` will be a json file containing the input parameters
|
# The first arg `$1` will be a json file containing the input parameters
|
||||||
@ -57,10 +57,10 @@
|
|||||||
# examples:
|
# examples:
|
||||||
# - ''.*requirements.*\.txt''
|
# - ''.*requirements.*\.txt''
|
||||||
# - ''.*package-lock\.json''
|
# - ''.*package-lock\.json''
|
||||||
inputDirectories = lib.filter
|
inputDirectories = lib.filter
|
||||||
(utils.containsMatchingFile [ ''TODO: regex1'' ''TODO: regex2'' ])
|
(utils.containsMatchingFile [ ''TODO: regex1'' ''TODO: regex2'' ])
|
||||||
args.inputDirectories;
|
args.inputDirectories;
|
||||||
|
|
||||||
inputFiles = [];
|
inputFiles = [];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -10,14 +10,14 @@
|
|||||||
dream2nixWithExternals,
|
dream2nixWithExternals,
|
||||||
utils,
|
utils,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
|
|
||||||
b = builtins;
|
b = builtins;
|
||||||
|
|
||||||
lib = pkgs.lib;
|
lib = pkgs.lib;
|
||||||
|
|
||||||
callTranslator = subsystem: type: name: file: args:
|
callTranslator = subsystem: type: name: file: args:
|
||||||
let
|
let
|
||||||
translator = callPackageDream file (args // {
|
translator = callPackageDream file (args // {
|
||||||
inherit externals;
|
inherit externals;
|
||||||
@ -37,7 +37,7 @@ let
|
|||||||
translator.translate
|
translator.translate
|
||||||
((getextraArgsDefaults translator.extraArgs or {}) // args);
|
((getextraArgsDefaults translator.extraArgs or {}) // args);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
subsystems = utils.dirNames ./.;
|
subsystems = utils.dirNames ./.;
|
||||||
|
|
||||||
@ -59,10 +59,10 @@ let
|
|||||||
nix eval --show-trace --impure --raw --expr "
|
nix eval --show-trace --impure --raw --expr "
|
||||||
let
|
let
|
||||||
dream2nix = import ${dream2nixWithExternals} {};
|
dream2nix = import ${dream2nixWithExternals} {};
|
||||||
dreamLock =
|
dreamLock =
|
||||||
dream2nix.translators.translators.${
|
dream2nix.translators.translators.${
|
||||||
lib.concatStringsSep "." translatorAttrPath
|
lib.concatStringsSep "." translatorAttrPath
|
||||||
}.translate
|
}.translate
|
||||||
(builtins.fromJSON (builtins.readFile '''$1'''));
|
(builtins.fromJSON (builtins.readFile '''$1'''));
|
||||||
in
|
in
|
||||||
dream2nix.utils.dreamLock.toJSON
|
dream2nix.utils.dreamLock.toJSON
|
||||||
@ -95,23 +95,62 @@ let
|
|||||||
# flat list of all translators
|
# flat list of all translators
|
||||||
translatorsList = lib.collect (v: v ? translateBin) translators;
|
translatorsList = lib.collect (v: v ? translateBin) translators;
|
||||||
|
|
||||||
# json file exposing all existing translators to CLI including their special args
|
# returns the list of translators including their special args
|
||||||
|
# and adds a flag `compatible` to each translator indicating
|
||||||
|
# if the translator is compatible to all given paths
|
||||||
translatorsForInput =
|
translatorsForInput =
|
||||||
{
|
{
|
||||||
inputDirectories,
|
inputDirectories,
|
||||||
inputFiles,
|
inputFiles,
|
||||||
}@args:
|
}@args:
|
||||||
lib.forEach translatorsList
|
lib.forEach translatorsList
|
||||||
(t: {
|
(t: rec {
|
||||||
inherit (t)
|
inherit (t)
|
||||||
name
|
name
|
||||||
extraArgs
|
extraArgs
|
||||||
subsystem
|
subsystem
|
||||||
type
|
type
|
||||||
;
|
;
|
||||||
compatible = t.compatiblePaths args == args;
|
compatiblePaths = t.compatiblePaths args;
|
||||||
|
compatible = compatiblePaths == args;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
# also includes subdirectories of the given paths up to a certain depth
|
||||||
|
# to check for translator compatibility
|
||||||
|
translatorsForInputRecursive =
|
||||||
|
{
|
||||||
|
inputDirectories,
|
||||||
|
inputFiles,
|
||||||
|
depth ? 2,
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
listDirsRec = dir: depth:
|
||||||
|
let
|
||||||
|
subDirs = (utils.listDirs dir);
|
||||||
|
in
|
||||||
|
if depth == 0 then
|
||||||
|
subDirs
|
||||||
|
else
|
||||||
|
subDirs
|
||||||
|
++
|
||||||
|
(lib.flatten
|
||||||
|
(map
|
||||||
|
(subDir: listDirsRec subDir (depth -1))
|
||||||
|
subDirs));
|
||||||
|
|
||||||
|
dirsToCheck =
|
||||||
|
lib.flatten
|
||||||
|
(map
|
||||||
|
(inputDir: listDirsRec inputDir depth)
|
||||||
|
inputDirectories);
|
||||||
|
|
||||||
|
in
|
||||||
|
translatorsForInput {
|
||||||
|
inputDirectories = dirsToCheck;
|
||||||
|
inherit inputFiles;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
# pupulates a translators special args with defaults
|
# pupulates a translators special args with defaults
|
||||||
getextraArgsDefaults = extraArgsDef:
|
getextraArgsDefaults = extraArgsDef:
|
||||||
lib.mapAttrs
|
lib.mapAttrs
|
||||||
|
68
src/translators/go/impure/gomod2nix/default.nix
Normal file
68
src/translators/go/impure/gomod2nix/default.nix
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
{
|
||||||
|
# dream2nix utils
|
||||||
|
utils,
|
||||||
|
dream2nixWithExternals,
|
||||||
|
|
||||||
|
bash,
|
||||||
|
coreutils,
|
||||||
|
jq,
|
||||||
|
lib,
|
||||||
|
nix,
|
||||||
|
writeScriptBin,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
|
||||||
|
{
|
||||||
|
|
||||||
|
# the input format is specified in /specifications/translator-call-example.json
|
||||||
|
# this script receives a json file including the input paths and specialArgs
|
||||||
|
translateBin = utils.writePureShellScript
|
||||||
|
[
|
||||||
|
bash
|
||||||
|
coreutils
|
||||||
|
jq
|
||||||
|
nix
|
||||||
|
]
|
||||||
|
''
|
||||||
|
# accroding to the spec, the translator reads the input from a json file
|
||||||
|
jsonInput=$1
|
||||||
|
|
||||||
|
# read the json input
|
||||||
|
outputFile=$(${jq}/bin/jq '.outputFile' -c -r $jsonInput)
|
||||||
|
inputDirectory=$(${jq}/bin/jq '.inputDirectories | .[0]' -c -r $jsonInput)
|
||||||
|
|
||||||
|
tmpBuild=$(mktemp -d)
|
||||||
|
cd $tmpBuild
|
||||||
|
cp -r $inputDirectory/* .
|
||||||
|
chmod -R +w .
|
||||||
|
# This should be in sync with gomod2nix version in flake.lock
|
||||||
|
nix run github:tweag/gomod2nix/67f22dd738d092c6ba88e420350ada0ed4992ae8
|
||||||
|
|
||||||
|
nix eval --show-trace --impure --raw --expr "import ${./translate.nix} ${dream2nixWithExternals} ./." > $outputFile
|
||||||
|
'';
|
||||||
|
|
||||||
|
|
||||||
|
# From a given list of paths, this function returns all paths which can be processed by this translator.
|
||||||
|
# This allows the framework to detect if the translator is compatible with the given inputs
|
||||||
|
# to automatically select the right translator.
|
||||||
|
compatiblePaths =
|
||||||
|
{
|
||||||
|
inputDirectories,
|
||||||
|
inputFiles,
|
||||||
|
}@args:
|
||||||
|
{
|
||||||
|
inputDirectories = lib.filter
|
||||||
|
(utils.containsMatchingFile [ ''go\.sum'' ''go\.mod'' ])
|
||||||
|
args.inputDirectories;
|
||||||
|
|
||||||
|
inputFiles = [];
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
# If the translator requires additional arguments, specify them here.
|
||||||
|
# There are only two types of arguments:
|
||||||
|
# - string argument (type = "argument")
|
||||||
|
# - boolean flag (type = "flag")
|
||||||
|
# String arguments contain a default value and examples. Flags do not.
|
||||||
|
extraArgs = {};
|
||||||
|
}
|
66
src/translators/go/impure/gomod2nix/translate.nix
Normal file
66
src/translators/go/impure/gomod2nix/translate.nix
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
dream2nixWithExternals:
|
||||||
|
cwd:
|
||||||
|
let
|
||||||
|
dream2nix = import dream2nixWithExternals { };
|
||||||
|
b = builtins;
|
||||||
|
parsed = b.fromTOML (builtins.readFile "${cwd}/gomod2nix.toml");
|
||||||
|
pkgs = import <nixpkgs> { };
|
||||||
|
lib = pkgs.lib;
|
||||||
|
serializePackages = inputData:
|
||||||
|
lib.mapAttrsToList
|
||||||
|
(goName: depAttrs: depAttrs // { inherit goName; })
|
||||||
|
parsed;
|
||||||
|
translated = dream2nix.utils.simpleTranslate "gomod2nix" rec {
|
||||||
|
|
||||||
|
inputData = parsed;
|
||||||
|
|
||||||
|
mainPackageName =
|
||||||
|
let
|
||||||
|
firstLine = (b.elemAt (lib.splitString "\n" (b.readFile "${cwd}/go.mod")) 0);
|
||||||
|
in
|
||||||
|
lib.last (lib.splitString "/" (b.elemAt (lib.splitString " " firstLine) 1));
|
||||||
|
|
||||||
|
mainPackageVersion = "unknown";
|
||||||
|
|
||||||
|
subsystemName = "go";
|
||||||
|
|
||||||
|
subsystemAttrs = { };
|
||||||
|
|
||||||
|
inherit serializePackages;
|
||||||
|
|
||||||
|
mainPackageDependencies =
|
||||||
|
lib.forEach
|
||||||
|
(serializePackages parsed)
|
||||||
|
(dep: {
|
||||||
|
name = getName dep;
|
||||||
|
version = getVersion dep;
|
||||||
|
});
|
||||||
|
|
||||||
|
getOriginalID = dependencyObject:
|
||||||
|
null;
|
||||||
|
|
||||||
|
getName = dependencyObject:
|
||||||
|
dependencyObject.goName;
|
||||||
|
|
||||||
|
getVersion = dependencyObject:
|
||||||
|
lib.removePrefix "v" dependencyObject.sumVersion;
|
||||||
|
|
||||||
|
getDependencies = dependencyObject: getDepByNameVer: dependenciesByOriginalID:
|
||||||
|
[];
|
||||||
|
|
||||||
|
getSourceType = dependencyObject: "git";
|
||||||
|
|
||||||
|
sourceConstructors = {
|
||||||
|
git = dependencyObject:
|
||||||
|
{
|
||||||
|
type = "git";
|
||||||
|
version = getVersion dependencyObject;
|
||||||
|
hash = dependencyObject.fetch.sha256;
|
||||||
|
url = dependencyObject.fetch.url;
|
||||||
|
rev = dependencyObject.fetch.rev;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
};
|
||||||
|
in
|
||||||
|
dream2nix.utils.dreamLock.toJSON translated
|
@ -35,8 +35,8 @@
|
|||||||
cat package-lock.json
|
cat package-lock.json
|
||||||
|
|
||||||
jq ".inputDirectories[0] = \"$(pwd)\"" -c -r $jsonInput > ./newJsonInput
|
jq ".inputDirectories[0] = \"$(pwd)\"" -c -r $jsonInput > ./newJsonInput
|
||||||
|
|
||||||
${translators.translators.nodejs.pure.package-lock.translateBin}/bin/run $(realpath ./newJsonInput)
|
${translators.translators.nodejs.pure.package-lock.translateBin} $(realpath ./newJsonInput)
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
|
||||||
@ -49,10 +49,10 @@
|
|||||||
inputFiles,
|
inputFiles,
|
||||||
}@args:
|
}@args:
|
||||||
{
|
{
|
||||||
inputDirectories = lib.filter
|
inputDirectories = lib.filter
|
||||||
(utils.containsMatchingFile [ ''.*package.json'' ])
|
(utils.containsMatchingFile [ ''.*package.json'' ])
|
||||||
args.inputDirectories;
|
args.inputDirectories;
|
||||||
|
|
||||||
inputFiles = [];
|
inputFiles = [];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
lib,
|
lib,
|
||||||
|
|
||||||
externals,
|
externals,
|
||||||
|
nodejs,
|
||||||
translatorName,
|
translatorName,
|
||||||
utils,
|
utils,
|
||||||
...
|
...
|
||||||
@ -16,14 +17,15 @@
|
|||||||
# extraArgs
|
# extraArgs
|
||||||
name,
|
name,
|
||||||
noDev,
|
noDev,
|
||||||
|
nodejs,
|
||||||
peer,
|
peer,
|
||||||
...
|
...
|
||||||
}:
|
}@args:
|
||||||
|
|
||||||
let
|
let
|
||||||
b = builtins;
|
b = builtins;
|
||||||
dev = ! noDev;
|
dev = ! noDev;
|
||||||
|
|
||||||
sourceDir = lib.elemAt inputDirectories 0;
|
sourceDir = lib.elemAt inputDirectories 0;
|
||||||
yarnLock = utils.readTextFile "${sourceDir}/yarn.lock";
|
yarnLock = utils.readTextFile "${sourceDir}/yarn.lock";
|
||||||
packageJSON = b.fromJSON (b.readFile "${sourceDir}/package.json");
|
packageJSON = b.fromJSON (b.readFile "${sourceDir}/package.json");
|
||||||
@ -43,7 +45,7 @@
|
|||||||
${lib.substring failureOffset 50 tryParse.value.str}
|
${lib.substring failureOffset 50 tryParse.value.str}
|
||||||
'';
|
'';
|
||||||
in
|
in
|
||||||
|
|
||||||
utils.simpleTranslate translatorName rec {
|
utils.simpleTranslate translatorName rec {
|
||||||
|
|
||||||
inputData = parsedLock;
|
inputData = parsedLock;
|
||||||
@ -60,9 +62,7 @@
|
|||||||
|
|
||||||
subsystemName = "nodejs";
|
subsystemName = "nodejs";
|
||||||
|
|
||||||
subsystemAttrs = {
|
subsystemAttrs = { nodejsVersion = args.nodejs; };
|
||||||
nodejsVersion = 14;
|
|
||||||
};
|
|
||||||
|
|
||||||
mainPackageDependencies =
|
mainPackageDependencies =
|
||||||
lib.mapAttrsToList
|
lib.mapAttrsToList
|
||||||
@ -116,13 +116,13 @@
|
|||||||
in
|
in
|
||||||
lib.forEach
|
lib.forEach
|
||||||
dependencies
|
dependencies
|
||||||
(dependency:
|
(dependency:
|
||||||
builtins.head (
|
builtins.head (
|
||||||
lib.mapAttrsToList
|
lib.mapAttrsToList
|
||||||
(name: versionSpec:
|
(name: versionSpec:
|
||||||
let
|
let
|
||||||
yarnName = "${name}@${versionSpec}";
|
yarnName = "${name}@${versionSpec}";
|
||||||
depObject = dependenciesByOriginalID."${yarnName}";
|
depObject = dependenciesByOriginalID."${yarnName}";
|
||||||
version = depObject.version;
|
version = depObject.version;
|
||||||
in
|
in
|
||||||
if ! dependenciesByOriginalID ? ${yarnName} then
|
if ! dependenciesByOriginalID ? ${yarnName} then
|
||||||
@ -153,7 +153,7 @@
|
|||||||
&& lib.hasInfix "codeload.github.com/" dObj.resolved
|
&& lib.hasInfix "codeload.github.com/" dObj.resolved
|
||||||
|
|
||||||
|| lib.hasInfix "@git+" dObj.yarnName
|
|| lib.hasInfix "@git+" dObj.yarnName
|
||||||
|
|
||||||
# example:
|
# example:
|
||||||
# "jest-image-snapshot@https://github.com/machard/jest-image-snapshot#machard-patch-1":
|
# "jest-image-snapshot@https://github.com/machard/jest-image-snapshot#machard-patch-1":
|
||||||
# version "4.2.0"
|
# version "4.2.0"
|
||||||
@ -175,7 +175,7 @@
|
|||||||
else
|
else
|
||||||
"http";
|
"http";
|
||||||
|
|
||||||
|
|
||||||
sourceConstructors = {
|
sourceConstructors = {
|
||||||
git = dependencyObject:
|
git = dependencyObject:
|
||||||
if utils.identifyGitUrl dependencyObject.resolved then
|
if utils.identifyGitUrl dependencyObject.resolved then
|
||||||
@ -215,13 +215,13 @@
|
|||||||
path = dependencyObject:
|
path = dependencyObject:
|
||||||
if lib.hasInfix "@link:" dependencyObject.yarnName then
|
if lib.hasInfix "@link:" dependencyObject.yarnName then
|
||||||
{
|
{
|
||||||
version = dependencyObject.version;
|
version = dependencyObject.version;
|
||||||
path =
|
path =
|
||||||
lib.last (lib.splitString "@link:" dependencyObject.yarnName);
|
lib.last (lib.splitString "@link:" dependencyObject.yarnName);
|
||||||
}
|
}
|
||||||
else if lib.hasInfix "@file:" dependencyObject.yarnName then
|
else if lib.hasInfix "@file:" dependencyObject.yarnName then
|
||||||
{
|
{
|
||||||
version = dependencyObject.version;
|
version = dependencyObject.version;
|
||||||
path =
|
path =
|
||||||
lib.last (lib.splitString "@file:" dependencyObject.yarnName);
|
lib.last (lib.splitString "@file:" dependencyObject.yarnName);
|
||||||
}
|
}
|
||||||
@ -231,7 +231,7 @@
|
|||||||
http = dependencyObject:
|
http = dependencyObject:
|
||||||
{
|
{
|
||||||
type = "http";
|
type = "http";
|
||||||
version = dependencyObject.version;
|
version = dependencyObject.version;
|
||||||
hash =
|
hash =
|
||||||
if dependencyObject ? integrity then
|
if dependencyObject ? integrity then
|
||||||
dependencyObject.integrity
|
dependencyObject.integrity
|
||||||
@ -256,7 +256,7 @@
|
|||||||
|
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
# From a given list of paths, this function returns all paths which can be processed by this translator.
|
# From a given list of paths, this function returns all paths which can be processed by this translator.
|
||||||
# This allows the framework to detect if the translator is compatible with the given inputs
|
# This allows the framework to detect if the translator is compatible with the given inputs
|
||||||
@ -267,7 +267,7 @@
|
|||||||
inputFiles,
|
inputFiles,
|
||||||
}@args:
|
}@args:
|
||||||
{
|
{
|
||||||
inputDirectories = lib.filter
|
inputDirectories = lib.filter
|
||||||
(utils.containsMatchingFile [ ''.*yarn\.lock'' ''.*package.json'' ])
|
(utils.containsMatchingFile [ ''.*yarn\.lock'' ''.*package.json'' ])
|
||||||
args.inputDirectories;
|
args.inputDirectories;
|
||||||
|
|
||||||
@ -297,6 +297,16 @@
|
|||||||
type = "flag";
|
type = "flag";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
nodejs = {
|
||||||
|
description = "nodejs version to use for building";
|
||||||
|
default = lib.elemAt (lib.splitString "." nodejs.version) 0;
|
||||||
|
examples = [
|
||||||
|
"14"
|
||||||
|
"16"
|
||||||
|
];
|
||||||
|
type = "argument";
|
||||||
|
};
|
||||||
|
|
||||||
peer = {
|
peer = {
|
||||||
description = "Include peer dependencies";
|
description = "Include peer dependencies";
|
||||||
type = "flag";
|
type = "flag";
|
||||||
|
@ -4,8 +4,9 @@
|
|||||||
fetchzip,
|
fetchzip,
|
||||||
lib,
|
lib,
|
||||||
nix,
|
nix,
|
||||||
|
pkgs,
|
||||||
runCommand,
|
runCommand,
|
||||||
writeScriptBin,
|
writeScript,
|
||||||
|
|
||||||
# dream2nix inputs
|
# dream2nix inputs
|
||||||
callPackageDream,
|
callPackageDream,
|
||||||
@ -15,7 +16,7 @@
|
|||||||
let
|
let
|
||||||
|
|
||||||
b = builtins;
|
b = builtins;
|
||||||
|
|
||||||
dreamLockUtils = callPackageDream ./dream-lock.nix {};
|
dreamLockUtils = callPackageDream ./dream-lock.nix {};
|
||||||
|
|
||||||
overrideUtils = callPackageDream ./override.nix {};
|
overrideUtils = callPackageDream ./override.nix {};
|
||||||
@ -58,6 +59,8 @@ rec {
|
|||||||
|
|
||||||
listFiles = path: lib.attrNames (lib.filterAttrs (n: v: v == "regular") (builtins.readDir path));
|
listFiles = path: lib.attrNames (lib.filterAttrs (n: v: v == "regular") (builtins.readDir path));
|
||||||
|
|
||||||
|
listDirs = path: lib.attrNames (lib.filterAttrs (n: v: v == "directory") (builtins.readDir path));
|
||||||
|
|
||||||
# directory names of a given directory
|
# directory names of a given directory
|
||||||
dirNames = dir: lib.attrNames (lib.filterAttrs (name: type: type == "directory") (builtins.readDir dir));
|
dirNames = dir: lib.attrNames (lib.filterAttrs (name: type: type == "directory") (builtins.readDir dir));
|
||||||
|
|
||||||
@ -87,11 +90,13 @@ rec {
|
|||||||
b.readFile hashFile;
|
b.readFile hashFile;
|
||||||
|
|
||||||
# builder to create a shell script that has it's own PATH
|
# builder to create a shell script that has it's own PATH
|
||||||
writePureShellScript = availablePrograms: script: writeScriptBin "run" ''
|
writePureShellScript = availablePrograms: script: writeScript "script.sh" ''
|
||||||
#!${bash}/bin/bash
|
#!${bash}/bin/bash
|
||||||
set -Eeuo pipefail
|
set -Eeuo pipefail
|
||||||
|
|
||||||
export PATH="${lib.makeBinPath availablePrograms}"
|
export PATH="${lib.makeBinPath availablePrograms}"
|
||||||
|
export NIX_PATH=nixpkgs=${pkgs.path}
|
||||||
|
|
||||||
tmpdir=$(${coreutils}/bin/mktemp -d)
|
tmpdir=$(${coreutils}/bin/mktemp -d)
|
||||||
cd $tmpdir
|
cd $tmpdir
|
||||||
|
|
||||||
@ -118,7 +123,7 @@ rec {
|
|||||||
''
|
''
|
||||||
+ old.postFetch;
|
+ old.postFetch;
|
||||||
});
|
});
|
||||||
|
|
||||||
sanitizeDerivationName = name:
|
sanitizeDerivationName = name:
|
||||||
lib.replaceStrings [ "@" "/" ] [ "__at__" "__slash__" ] name;
|
lib.replaceStrings [ "@" "/" ] [ "__at__" "__slash__" ] name;
|
||||||
|
|
||||||
@ -165,7 +170,7 @@ rec {
|
|||||||
|
|
||||||
satisfiesSemver = poetry2nixSemver.satisfiesSemver;
|
satisfiesSemver = poetry2nixSemver.satisfiesSemver;
|
||||||
|
|
||||||
# like nixpkgs recursiveUpdateUntil, but the depth of the
|
# like nixpkgs recursiveUpdateUntil, but the depth of the
|
||||||
recursiveUpdateUntilDepth = depth: lhs: rhs:
|
recursiveUpdateUntilDepth = depth: lhs: rhs:
|
||||||
lib.recursiveUpdateUntil (path: l: r: (b.length path) > depth) lhs rhs;
|
lib.recursiveUpdateUntil (path: l: r: (b.length path) > depth) lhs rhs;
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ let
|
|||||||
|
|
||||||
b = builtins;
|
b = builtins;
|
||||||
|
|
||||||
readDreamLock =
|
readDreamLock =
|
||||||
{
|
{
|
||||||
dreamLock,
|
dreamLock,
|
||||||
}@args:
|
}@args:
|
||||||
@ -49,7 +49,7 @@ let
|
|||||||
b.filter
|
b.filter
|
||||||
(dep: ! b.elem dep cyclicDependencies."${pname}"."${version}" or [])
|
(dep: ! b.elem dep cyclicDependencies."${pname}"."${version}" or [])
|
||||||
dependencyGraph."${pname}"."${version}" or [];
|
dependencyGraph."${pname}"."${version}" or [];
|
||||||
|
|
||||||
getCyclicDependencies = pname: version:
|
getCyclicDependencies = pname: version:
|
||||||
cyclicDependencies."${pname}"."${version}" or [];
|
cyclicDependencies."${pname}"."${version}" or [];
|
||||||
|
|
||||||
@ -100,7 +100,7 @@ let
|
|||||||
getSubDreamLock = dreamLock: name: version:
|
getSubDreamLock = dreamLock: name: version:
|
||||||
let
|
let
|
||||||
lock = (readDreamLock { inherit dreamLock; }).lock;
|
lock = (readDreamLock { inherit dreamLock; }).lock;
|
||||||
|
|
||||||
in
|
in
|
||||||
lock // {
|
lock // {
|
||||||
_generic = lock._generic // {
|
_generic = lock._generic // {
|
||||||
|
34
tests/impure/default.nix
Normal file
34
tests/impure/default.nix
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
lib,
|
||||||
|
|
||||||
|
# dream2nix
|
||||||
|
callPackageDream,
|
||||||
|
utils,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
|
||||||
|
l = lib // builtins;
|
||||||
|
|
||||||
|
allTestFiles =
|
||||||
|
l.attrNames
|
||||||
|
(l.filterAttrs
|
||||||
|
(name: type: type == "regular" && l.hasPrefix "test_" name)
|
||||||
|
(l.readDir ./.));
|
||||||
|
|
||||||
|
allTests =
|
||||||
|
l.map
|
||||||
|
(file: callPackageDream ("${./.}/${file}") {})
|
||||||
|
allTestFiles;
|
||||||
|
|
||||||
|
executeAll = utils.writePureShellScript
|
||||||
|
[]
|
||||||
|
''
|
||||||
|
for test in ${toString allTests}; do
|
||||||
|
$test
|
||||||
|
done
|
||||||
|
'';
|
||||||
|
|
||||||
|
|
||||||
|
in
|
||||||
|
executeAll
|
23
tests/impure/test_go.nix
Normal file
23
tests/impure/test_go.nix
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
lib,
|
||||||
|
|
||||||
|
# dream2nix
|
||||||
|
apps,
|
||||||
|
utils,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
|
||||||
|
l = lib // builtins;
|
||||||
|
|
||||||
|
cli = apps.cli.program;
|
||||||
|
|
||||||
|
in
|
||||||
|
utils.writePureShellScript
|
||||||
|
[]
|
||||||
|
''
|
||||||
|
${cli} add github:tweag/gomod2nix/67f22dd738d092c6ba88e420350ada0ed4992ae8 \
|
||||||
|
--no-default-nix \
|
||||||
|
--translator gomod2nix \
|
||||||
|
--attribute-name gomod2nix
|
||||||
|
''
|
26
tests/impure/test_package-lock.nix
Normal file
26
tests/impure/test_package-lock.nix
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
{
|
||||||
|
lib,
|
||||||
|
|
||||||
|
# dream2nix
|
||||||
|
apps,
|
||||||
|
utils,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
|
||||||
|
l = lib // builtins;
|
||||||
|
|
||||||
|
cli = apps.cli.program;
|
||||||
|
|
||||||
|
in
|
||||||
|
utils.writePureShellScript
|
||||||
|
[]
|
||||||
|
''
|
||||||
|
${cli} add github:mattermost/mattermost-webapp/v6.1.0 \
|
||||||
|
--no-default-nix \
|
||||||
|
--translator package-lock \
|
||||||
|
--attribute-name mattermost-webapp \
|
||||||
|
--arg name="{automatic}" \
|
||||||
|
--arg noDev=false \
|
||||||
|
--arg nodejs=14
|
||||||
|
''
|
27
tests/impure/test_yarn-lock-prettier.nix
Normal file
27
tests/impure/test_yarn-lock-prettier.nix
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
lib,
|
||||||
|
|
||||||
|
# dream2nix
|
||||||
|
apps,
|
||||||
|
utils,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
|
||||||
|
l = lib // builtins;
|
||||||
|
|
||||||
|
cli = apps.cli.program;
|
||||||
|
|
||||||
|
in
|
||||||
|
utils.writePureShellScript
|
||||||
|
[]
|
||||||
|
''
|
||||||
|
${cli} add github:prettier/prettier/2.4.1 \
|
||||||
|
--no-default-nix \
|
||||||
|
--translator yarn-lock \
|
||||||
|
--attribute-name prettier \
|
||||||
|
--arg name="{automatic}" \
|
||||||
|
--arg noDev=false \
|
||||||
|
--arg nodejs=14 \
|
||||||
|
--arg peer=false
|
||||||
|
''
|
@ -28,9 +28,15 @@ let
|
|||||||
url = "https://github.com/prettier/prettier/tarball/2.4.1";
|
url = "https://github.com/prettier/prettier/tarball/2.4.1";
|
||||||
sha256 = "19b37qakhlsnr2n5bgv83aih5npgzbad1d2p2rs3zbq5syqbxdyi";
|
sha256 = "19b37qakhlsnr2n5bgv83aih5npgzbad1d2p2rs3zbq5syqbxdyi";
|
||||||
};
|
};
|
||||||
cmds = outputs: [
|
cmds = outputs:
|
||||||
"${outputs.defaultPackage}/bin/prettier --version | grep -q 2.4.1 && mkdir $out"
|
let
|
||||||
];
|
prettier = outputs.defaultPackage.overrideAttrs (old: {
|
||||||
|
dontBuild = true;
|
||||||
|
});
|
||||||
|
in
|
||||||
|
[
|
||||||
|
"${prettier}/bin/prettier --version | grep -q 2.4.1 && mkdir $out"
|
||||||
|
];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -38,6 +44,6 @@ let
|
|||||||
lib.mapAttrs
|
lib.mapAttrs
|
||||||
(name: args: makeTest (args // { inherit name; }))
|
(name: args: makeTest (args // { inherit name; }))
|
||||||
projects;
|
projects;
|
||||||
|
|
||||||
in
|
in
|
||||||
allTests
|
allTests
|
Loading…
Reference in New Issue
Block a user