add: update cli app for automatic package update

This commit is contained in:
DavHau 2021-10-19 00:22:37 +07:00
parent a6f52f96a9
commit bf4b807e34
17 changed files with 695 additions and 490 deletions

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
.*/ .*/
result result
interpreter

View File

@ -1,471 +1,11 @@
import json from cleo import Application
from jsonschema import validate
import os
import re
import subprocess as sp
import sys
import tempfile
from cleo import Application, Command
from cleo.helpers import option
import networkx as nx
dream2nix_src = os.environ.get("dream2nixSrc")
class PackageCommand(Command):
description = (
"Package a software project using nix"
)
name = "package"
options = [
option(
"source",
None,
"source of the package, can be a path or flake-like spec",
flag=False,
multiple=False
),
option("translator", None, "which translator to use", flag=False),
option("output", None, "output file/directory for the dream.lock", flag=False),
option(
"combined",
None,
"store only one hash for all sources combined"
" (smaller lock file but larger FOD)",
flag=False
),
option(
"extra-arg",
None,
"extra arguments for selected translator",
flag=False,
multiple=True
),
option("force", None, "override existing files", flag=True),
option("default-nix", None, "create default.nix", flag=True),
]
def handle(self):
if self.io.is_interactive():
self.line(f"\n{self.description}\n")
# parse extra args
specified_extra_args = {
arg[0]: arg[1] for arg in map(
lambda e: e.split('='),
self.option("extra-arg"),
)
}
# ensure output directory
output = self.option("output")
if not output:
output = './.'
if not os.path.isdir(output):
os.mkdir(output)
filesToCreate = ['dream.lock']
if self.option('default-nix'):
filesToCreate.append('default.nix')
if self.option('force'):
for f in filesToCreate:
if os.path.isfile(f):
os.remove(f)
else:
existingFiles = set(os.listdir(output))
if any(f in existingFiles for f in filesToCreate):
print(
f"output directory {output} already contains a 'default.nix' "
"or 'dream.lock'. Delete first, or user '--force'.",
file=sys.stderr,
)
exit(1)
output = os.path.realpath(output)
outputDreamLock = f"{output}/dream.lock"
outputDefaultNix = f"{output}/default.nix"
# verify source
source = self.option("source")
if not source:
source = os.path.realpath('./.')
print(
f"Source not specified. Defaulting to current directory: {source}",
file=sys.stderr,
)
# check if source is valid fetcher spec
sourceSpec = {}
if source.partition(':')[0] in os.environ.get("fetcherNames", None).split():
print(f"fetching source for '{source}'")
sourceSpec =\
callNixFunction("fetchers.translateShortcut", shortcut=source)
source =\
buildNixFunction("fetchers.fetchShortcut", shortcut=source)
else:
# check if source path exists
if not os.path.exists(source):
print(f"Input source '{source}' does not exist", file=sys.stdout)
exit(1)
source = os.path.realpath(source)
# select translator
translatorsSorted = sorted(
list_translators_for_source(source),
key=lambda t: (
not t['compatible'],
['pure', 'ifd', 'impure'].index(t['type'])
)
)
translator = self.option("translator")
if not translator:
chosen = self.choice(
'Select translator',
list(map(
lambda t: f"{t['subsystem']}.{t['type']}.{t['name']}{' (compatible)' if t['compatible'] else ''}",
translatorsSorted
)),
0
)
translator = chosen
translator = list(filter(
lambda t: [t['subsystem'], t['type'], t['name']] == translator.split(' (')[0].split('.'),
translatorsSorted,
))[0]
else:
translator = translator.split('.')
try:
if len(translator) == 3:
translator = list(filter(
lambda t: [t['subsystem'], t['type'], t['name']] == translator,
translatorsSorted,
))[0]
elif len(translator) == 1:
translator = list(filter(
lambda t: [t['name']] == translator,
translatorsSorted,
))[0]
except IndexError:
print(f"Could not find translator '{'.'.join(translator)}'", file=sys.stderr)
exit(1)
# raise error if any specified extra arg is unknown
unknown_extra_args = set(specified_extra_args.keys()) - set(translator['specialArgs'].keys())
if unknown_extra_args:
print(
f"Invalid extra args for translator '{translator['name']}': "
f" {', '.join(unknown_extra_args)}"
"\nPlease remove these parameters",
file=sys.stderr
)
exit(1)
# transform flags to bool
for argName, argVal in specified_extra_args.copy().items():
if translator['specialArgs'][argName]['type'] == 'flag':
if argVal.lower() in ('yes', 'y', 'true'):
specified_extra_args[argName] = True
elif argVal.lower() in ('no', 'n', 'false'):
specified_extra_args[argName] = False
else:
print(
f"Invalid value {argVal} for argument {argName}",
file=sys.stderr
)
specified_extra_args =\
{k: (bool(v) if translator['specialArgs'][k]['type'] == 'flag' else v ) \
for k, v in specified_extra_args.items()}
# on non-interactive session, assume defaults for unspecified extra args
if not self.io.is_interactive():
specified_extra_args.update(
{n: (True if v['type'] == 'flag' else v['default']) \
for n, v in translator['specialArgs'].items() \
if n not in specified_extra_args and 'default' in v}
)
unspecified_extra_args = \
{n: v for n, v in translator['specialArgs'].items() \
if n not in specified_extra_args}
# raise error if any extra arg unspecified in non-interactive session
if unspecified_extra_args:
if not self.io.is_interactive():
print(
f"Please specify the following extra arguments required by translator '{translator['name']}' :\n" \
', '.join(unspecified_extra_args.keys()),
file=sys.stderr
)
exit(1)
# interactively retrieve unswers for unspecified extra arguments
else:
print(f"\nThe translator '{translator['name']}' requires additional options")
for arg_name, arg in unspecified_extra_args.items():
print('')
if arg['type'] == 'flag':
print(f"Please specify '{arg_name}': {arg['description']}")
specified_extra_args[arg_name] = self.confirm(f"{arg['description']}:", False)
else:
print(f"Please specify '{arg_name}': {arg['description']}")
print(f"Example values: " + ', '.join(arg['examples']))
specified_extra_args[arg_name] = self.ask(f"{arg_name}:", arg.get('default'))
# arguments for calling the translator nix module
translator_input = dict(
inputFiles=[],
inputDirectories=[source],
outputFile=outputDreamLock,
)
translator_input.update(specified_extra_args)
# build the translator bin
t = translator
translator_path = buildNixAttribute(
f"translators.translators.{t['subsystem']}.{t['type']}.{t['name']}.translateBin"
)
# dump translator arguments to json file and execute translator
print("\nTranslating upstream metadata")
with tempfile.NamedTemporaryFile("w") as input_json_file:
json.dump(translator_input, input_json_file, indent=2)
input_json_file.seek(0) # flushes write cache
# execute translator
sp.run(
[f"{translator_path}/bin/translate", input_json_file.name]
)
# raise error if output wasn't produced
if not os.path.isfile(outputDreamLock):
raise Exception(f"Translator failed to create dream.lock")
# read produced lock file
with open(outputDreamLock) as f:
lock = json.load(f)
# write translator information to lock file
lock['generic']['translatedBy'] = f"{t['subsystem']}.{t['type']}.{t['name']}"
lock['generic']['translatorParams'] = " ".join(sys.argv[2:])
# add main package source
mainPackage = lock['generic']['mainPackage']
if mainPackage:
mainSource = sourceSpec.copy()
if not mainSource:
mainSource = dict(
type="unknown",
version="unknown",
)
else:
for field in ('versionField',):
if field in mainSource:
del mainSource[field]
mainSource['version'] = sourceSpec[sourceSpec['versionField']]
lock['sources'][mainPackage] = mainSource
# clean up dependency graph
# remove empty entries
depGraph = lock['generic']['dependencyGraph']
if 'dependencyGraph' in lock['generic']:
for pname, deps in depGraph.copy().items():
if not deps:
del depGraph[pname]
# remove cyclic dependencies
edges = set()
for pname, deps in depGraph.items():
for dep in deps:
edges.add((pname, dep))
G = nx.DiGraph(sorted(list(edges)))
cycle_count = 0
removed_edges = []
for pname in list(depGraph.keys()):
try:
while True:
cycle = nx.find_cycle(G, pname)
cycle_count += 1
# remove_dependecy(indexed_pkgs, G, cycle[-1][0], cycle[-1][1])
node_from, node_to = cycle[-1][0], cycle[-1][1]
G.remove_edge(node_from, node_to)
depGraph[node_from].remove(node_to)
removed_edges.append((node_from, node_to))
except nx.NetworkXNoCycle:
continue
lock['generic']['dependenciesRemoved'] = {}
if removed_edges:
lock['generic']['dependenciesRemoved'] = {}
removed_cycles_text = 'Removed Cyclic dependencies:'
for node, removed_node in removed_edges:
removed_cycles_text += f"\n {node} -> {removed_node}"
if node not in lock['generic']['dependenciesRemoved']:
lock['generic']['dependenciesRemoved'][node] = []
lock['generic']['dependenciesRemoved'][node].append(removed_node)
print(removed_cycles_text)
# calculate combined hash if --combined was specified
if self.option('combined'):
print("Building FOD of combined sources to retrieve output hash")
# remove hashes from lock file and init sourcesCombinedHash with emtpy string
strip_hashes_from_lock(lock)
lock['generic']['sourcesCombinedHash'] = ""
with open(outputDreamLock, 'w') as f:
json.dump(lock, f, indent=2)
# compute FOD hash of combined sources
proc = sp.run(
[
"nix", "build", "--impure", "-L", "--expr",
f"(import {dream2nix_src} {{}}).fetchSources {{ dreamLock = {outputDreamLock}; }}"
],
capture_output=True,
)
# read the output hash from the failed build log
match = re.search(r"FOD_PATH=(.*=)", proc.stderr.decode())
if not match:
print(proc.stderr.decode())
print(proc.stdout.decode())
raise Exception("Could not find FOD hash in FOD log")
hash = match.groups()[0]
print(f"Computed FOD hash: {hash}")
# store the hash in the lock
lock['generic']['sourcesCombinedHash'] = hash
# re-write dream.lock
checkLockJSON(order_dict(lock))
with open(outputDreamLock, 'w') as f:
json.dump(order_dict(lock), f, indent=2)
# create default.nix
template = callNixFunction(
'apps.apps.cli.templateDefaultNix',
dream2nixLocationRelative=os.path.relpath(dream2nix_src, output),
dreamLock = lock,
sourcePathRelative = os.path.relpath(source, os.path.dirname(outputDefaultNix))
)
# with open(f"{dream2nix_src}/apps/cli2/templateDefault.nix") as template:
if self.option('default-nix'):
with open(outputDefaultNix, 'w') as defaultNix:
defaultNix.write(template)
print(f"Created {output}/default.nix")
print(f"Created {output}/dream.lock")
def checkLockJSON(lock):
try:
lock_schema_raw=open(dream2nix_src+"/specifications/dream-lock-schema.json").read()
lock_schema=json.loads(lock_schema_raw)
except Exception as e:
print(e)
try:
validate(lock,schema=lock_schema)
except Exception as e1:
print(e1)
def callNixFunction(function_path, **kwargs):
with tempfile.NamedTemporaryFile("w") as input_json_file:
json.dump(dict(**kwargs), input_json_file, indent=2)
input_json_file.seek(0) # flushes write cache
env = os.environ.copy()
env.update(dict(
FUNC_ARGS=input_json_file.name
))
proc = sp.run(
[
"nix", "eval", "--impure", "--raw", "--expr",
f'''
let
d2n = (import {dream2nix_src} {{}});
in
builtins.toJSON (
(d2n.utils.makeCallableViaEnv d2n.{function_path}) {{}}
)
''',
],
capture_output=True,
env=env
)
if proc.returncode:
print(f"Failed calling nix function '{function_path}'", file=sys.stderr)
print(proc.stderr.decode(), file=sys.stderr)
exit(1)
# parse result data
return json.loads(proc.stdout)
def buildNixFunction(function_path, **kwargs):
with tempfile.NamedTemporaryFile("w") as input_json_file:
json.dump(dict(**kwargs), input_json_file, indent=2)
input_json_file.seek(0) # flushes write cache
env = os.environ.copy()
env.update(dict(
FUNC_ARGS=input_json_file.name
))
proc = sp.run(
[
"nix", "build", "--impure", "-o", "tmp-result", "--expr",
f'''
let
d2n = (import {dream2nix_src} {{}});
in
(d2n.utils.makeCallableViaEnv d2n.{function_path}) {{}}
''',
],
capture_output=True,
env=env
)
if proc.returncode:
print(f"Failed calling nix function '{function_path}'", file=sys.stderr)
print(proc.stderr.decode(), file=sys.stderr)
exit(1)
# return store path of result
result = os.path.realpath("tmp-result")
os.remove("tmp-result")
return result
def buildNixAttribute(attribute_path):
proc = sp.run(
[
"nix", "build", "--impure", "-o", "tmp-result", "--expr",
f"(import {dream2nix_src} {{}}).{attribute_path}",
],
capture_output=True,
)
if proc.returncode:
print(f"Failed to build '{attribute_path}'", file=sys.stderr)
print(proc.stderr.decode(), file=sys.stderr)
exit(1)
result = os.path.realpath("tmp-result")
os.remove("tmp-result")
return result
def list_translators_for_source(sourcePath):
translatorsList = callNixFunction(
"translators.translatorsForInput",
inputDirectories=[sourcePath],
inputFiles=[],
)
return list(sorted(translatorsList, key=lambda t: t['compatible']))
def order_dict(d):
return {k: order_dict(v) if isinstance(v, dict) else v
for k, v in sorted(d.items())}
from commands.package import PackageCommand
from commands.update import UpdateCommand
application = Application("package") application = Application("package")
application.add(PackageCommand()) application.add(PackageCommand())
application.add(UpdateCommand())
if __name__ == '__main__': if __name__ == '__main__':
application.run() application.run()

View File

View File

@ -0,0 +1,370 @@
import json
import os
import re
import subprocess as sp
import sys
import tempfile
import networkx as nx
from cleo import Command, option
from utils import dream2nix_src, checkLockJSON, callNixFunction, buildNixFunction, buildNixAttribute, \
list_translators_for_source, order_dict, strip_hashes_from_lock
class PackageCommand(Command):
description = (
"Package a software project using nix"
)
name = "package"
options = [
option(
"source",
None,
"source of the package, can be a path or flake-like spec",
flag=False,
multiple=False
),
option("translator", None, "which translator to use", flag=False),
option("output", None, "output file/directory for the dream.lock", flag=False),
option(
"combined",
None,
"store only one hash for all sources combined"
" (smaller lock file but larger FOD)",
flag=False
),
option(
"extra-arg",
None,
"extra arguments for selected translator",
flag=False,
multiple=True
),
option("force", None, "override existing files", flag=True),
option("default-nix", None, "create default.nix", flag=True),
]
def handle(self):
if self.io.is_interactive():
self.line(f"\n{self.description}\n")
# parse extra args
specified_extra_args = {
arg[0]: arg[1] for arg in map(
lambda e: e.split('='),
self.option("extra-arg"),
)
}
# ensure output directory
output = self.option("output")
if not output:
output = './.'
if not os.path.isdir(output):
os.mkdir(output)
filesToCreate = ['dream.lock']
if self.option('default-nix'):
filesToCreate.append('default.nix')
if self.option('force'):
for f in filesToCreate:
if os.path.isfile(f):
os.remove(f)
else:
existingFiles = set(os.listdir(output))
if any(f in existingFiles for f in filesToCreate):
print(
f"output directory {output} already contains a 'default.nix' "
"or 'dream.lock'. Delete first, or user '--force'.",
file=sys.stderr,
)
exit(1)
output = os.path.realpath(output)
outputDreamLock = f"{output}/dream.lock"
outputDefaultNix = f"{output}/default.nix"
# verify source
source = self.option("source")
if not source:
source = os.path.realpath('./.')
print(
f"Source not specified. Defaulting to current directory: {source}",
file=sys.stderr,
)
# check if source is valid fetcher spec
sourceSpec = {}
# handle source shortcuts
if source.partition(':')[0].split('+')[0] in os.environ.get("fetcherNames", None).split():
print(f"fetching source for '{source}'")
sourceSpec =\
callNixFunction("fetchers.translateShortcut", shortcut=source)
source =\
buildNixFunction("fetchers.fetchShortcut", shortcut=source)
# handle source paths
else:
# check if source path exists
if not os.path.exists(source):
print(f"Input source '{source}' does not exist", file=sys.stdout)
exit(1)
source = os.path.realpath(source)
# handle source from dream.lock
if source.endswith('dream.lock'):
print(f"fetching source defined via existing dream.lock")
with open(source) as f:
sourceDreamLock = json.load(f)
sourceSpec =\
sourceDreamLock['sources'][sourceDreamLock['generic']['mainPackage']]
source = \
buildNixFunction("fetchers.fetchSource", source=sourceSpec)
# select translator
translatorsSorted = sorted(
list_translators_for_source(source),
key=lambda t: (
not t['compatible'],
['pure', 'ifd', 'impure'].index(t['type'])
)
)
translator = self.option("translator")
if not translator:
chosen = self.choice(
'Select translator',
list(map(
lambda t: f"{t['subsystem']}.{t['type']}.{t['name']}{' (compatible)' if t['compatible'] else ''}",
translatorsSorted
)),
0
)
translator = chosen
translator = list(filter(
lambda t: [t['subsystem'], t['type'], t['name']] == translator.split(' (')[0].split('.'),
translatorsSorted,
))[0]
else:
translator = translator.split('.')
try:
if len(translator) == 3:
translator = list(filter(
lambda t: [t['subsystem'], t['type'], t['name']] == translator,
translatorsSorted,
))[0]
elif len(translator) == 1:
translator = list(filter(
lambda t: [t['name']] == translator,
translatorsSorted,
))[0]
except IndexError:
print(f"Could not find translator '{'.'.join(translator)}'", file=sys.stderr)
exit(1)
# raise error if any specified extra arg is unknown
unknown_extra_args = set(specified_extra_args.keys()) - set(translator['specialArgs'].keys())
if unknown_extra_args:
print(
f"Invalid extra args for translator '{translator['name']}': "
f" {', '.join(unknown_extra_args)}"
"\nPlease remove these parameters",
file=sys.stderr
)
exit(1)
# transform flags to bool
for argName, argVal in specified_extra_args.copy().items():
if translator['specialArgs'][argName]['type'] == 'flag':
if argVal.lower() in ('yes', 'y', 'true'):
specified_extra_args[argName] = True
elif argVal.lower() in ('no', 'n', 'false'):
specified_extra_args[argName] = False
else:
print(
f"Invalid value {argVal} for argument {argName}",
file=sys.stderr
)
specified_extra_args =\
{k: (bool(v) if translator['specialArgs'][k]['type'] == 'flag' else v ) \
for k, v in specified_extra_args.items()}
# on non-interactive session, assume defaults for unspecified extra args
if not self.io.is_interactive():
specified_extra_args.update(
{n: (True if v['type'] == 'flag' else v['default']) \
for n, v in translator['specialArgs'].items() \
if n not in specified_extra_args and 'default' in v}
)
unspecified_extra_args = \
{n: v for n, v in translator['specialArgs'].items() \
if n not in specified_extra_args}
# raise error if any extra arg unspecified in non-interactive session
if unspecified_extra_args:
if not self.io.is_interactive():
print(
f"Please specify the following extra arguments required by translator '{translator['name']}' :\n" \
', '.join(unspecified_extra_args.keys()),
file=sys.stderr
)
exit(1)
# interactively retrieve unswers for unspecified extra arguments
else:
print(f"\nThe translator '{translator['name']}' requires additional options")
for arg_name, arg in unspecified_extra_args.items():
print('')
if arg['type'] == 'flag':
print(f"Please specify '{arg_name}': {arg['description']}")
specified_extra_args[arg_name] = self.confirm(f"{arg['description']}:", False)
else:
print(
f"Please specify '{arg_name}': {arg['description']}"
f"\nLeave emtpy for default ({arg['default']})")
print(f"Example values: " + ', '.join(arg['examples']))
specified_extra_args[arg_name] = self.ask(f"{arg_name}:", arg.get('default'))
# arguments for calling the translator nix module
translator_input = dict(
inputFiles=[],
inputDirectories=[source],
outputFile=outputDreamLock,
)
translator_input.update(specified_extra_args)
# build the translator bin
t = translator
translator_path = buildNixAttribute(
f"translators.translators.{t['subsystem']}.{t['type']}.{t['name']}.translateBin"
)
# dump translator arguments to json file and execute translator
print("\nTranslating upstream metadata")
with tempfile.NamedTemporaryFile("w") as input_json_file:
json.dump(translator_input, input_json_file, indent=2)
input_json_file.seek(0) # flushes write cache
# execute translator
sp.run(
[f"{translator_path}/bin/translate", input_json_file.name]
)
# raise error if output wasn't produced
if not os.path.isfile(outputDreamLock):
raise Exception(f"Translator failed to create dream.lock")
# read produced lock file
with open(outputDreamLock) as f:
lock = json.load(f)
# write translator information to lock file
combined = self.option('combined')
lock['generic']['translatedBy'] = f"{t['subsystem']}.{t['type']}.{t['name']}"
lock['generic']['translatorParams'] = " ".join([
'--translator',
f"{translator['subsystem']}.{translator['type']}.{translator['name']}",
] + (
["--combined", combined] if combined else []
) + [
f"--extra-arg {n}={v}" for n, v in specified_extra_args.items()
])
# add main package source
mainPackage = lock['generic']['mainPackage']
if mainPackage:
mainSource = sourceSpec.copy()
if not mainSource:
mainSource = dict(
type="unknown",
version="unknown",
)
lock['sources'][mainPackage] = mainSource
# clean up dependency graph
# remove empty entries
depGraph = lock['generic']['dependencyGraph']
if 'dependencyGraph' in lock['generic']:
for pname, deps in depGraph.copy().items():
if not deps:
del depGraph[pname]
# remove cyclic dependencies
edges = set()
for pname, deps in depGraph.items():
for dep in deps:
edges.add((pname, dep))
G = nx.DiGraph(sorted(list(edges)))
cycle_count = 0
removed_edges = []
for pname in list(depGraph.keys()):
try:
while True:
cycle = nx.find_cycle(G, pname)
cycle_count += 1
# remove_dependecy(indexed_pkgs, G, cycle[-1][0], cycle[-1][1])
node_from, node_to = cycle[-1][0], cycle[-1][1]
G.remove_edge(node_from, node_to)
depGraph[node_from].remove(node_to)
removed_edges.append((node_from, node_to))
except nx.NetworkXNoCycle:
continue
lock['generic']['dependenciesRemoved'] = {}
if removed_edges:
lock['generic']['dependenciesRemoved'] = {}
removed_cycles_text = 'Removed Cyclic dependencies:'
for node, removed_node in removed_edges:
removed_cycles_text += f"\n {node} -> {removed_node}"
if node not in lock['generic']['dependenciesRemoved']:
lock['generic']['dependenciesRemoved'][node] = []
lock['generic']['dependenciesRemoved'][node].append(removed_node)
print(removed_cycles_text)
# calculate combined hash if --combined was specified
if combined:
print("Building FOD of combined sources to retrieve output hash")
# remove hashes from lock file and init sourcesCombinedHash with emtpy string
strip_hashes_from_lock(lock)
lock['generic']['sourcesCombinedHash'] = ""
with open(outputDreamLock, 'w') as f:
json.dump(lock, f, indent=2)
# compute FOD hash of combined sources
proc = sp.run(
[
"nix", "build", "--impure", "-L", "--expr",
f"(import {dream2nix_src} {{}}).fetchSources {{ dreamLock = {outputDreamLock}; }}"
],
capture_output=True,
)
# read the output hash from the failed build log
match = re.search(r"FOD_PATH=(.*=)", proc.stderr.decode())
if not match:
print(proc.stderr.decode())
print(proc.stdout.decode())
raise Exception("Could not find FOD hash in FOD log")
hash = match.groups()[0]
print(f"Computed FOD hash: {hash}")
# store the hash in the lock
lock['generic']['sourcesCombinedHash'] = hash
# re-write dream.lock
checkLockJSON(order_dict(lock))
with open(outputDreamLock, 'w') as f:
json.dump(order_dict(lock), f, indent=2)
# create default.nix
template = callNixFunction(
'apps.apps.cli.templateDefaultNix',
dream2nixLocationRelative=os.path.relpath(dream2nix_src, output),
dreamLock = lock,
sourcePathRelative = os.path.relpath(source, os.path.dirname(outputDefaultNix))
)
# with open(f"{dream2nix_src}/apps/cli2/templateDefault.nix") as template:
if self.option('default-nix'):
with open(outputDefaultNix, 'w') as defaultNix:
defaultNix.write(template)
print(f"Created {output}/default.nix")
print(f"Created {output}/dream.lock")

View File

@ -0,0 +1,78 @@
import json
import os
import subprocess as sp
import sys
import tempfile
from cleo import Command, option
from utils import buildNixFunction, callNixFunction
class UpdateCommand(Command):
description = (
"Update an existing dream2nix based package"
)
name = "update"
options = [
option("dream-lock", None, "dream.lock file or its parent directory", flag=False, value_required=True),
option("updater", None, "name of the updater module to use", flag=False),
option("new-version", None, "new package version", flag=False),
]
def handle(self):
if self.io.is_interactive():
self.line(f"\n{self.description}\n")
dreamLockFile = os.path.abspath(self.option("dream-lock"))
if not dreamLockFile.endswith('dream.lock'):
dreamLockFile = os.path.abspath(dreamLockFile + "/dream.lock")
# parse dream lock
with open(dreamLockFile) as f:
lock = json.load(f)
# find right updater
updater = self.option('updater')
if not updater:
updater = callNixFunction("updaters.getUpdaterName", dreamLock=dreamLockFile)
if updater is None:
print(
f"Could not find updater for this package. Specify manually via --updater",
file=sys.stderr,
)
exit(1)
# find new version
version = self.option('version')
if not version:
update_script = buildNixFunction(
"updaters.makeUpdateScript",
dreamLock=dreamLockFile,
updater=updater,
)
update_proc = sp.run([f"{update_script}/bin/run"], capture_output=True)
version = update_proc.stdout.decode().strip()
print(f"\nUpdating to version {version}")
cli_py = os.path.abspath(f"{__file__}/../../cli.py")
# delete the hash
mainPackageSource = lock['sources'][lock['generic']['mainPackage']]
updatedSourceSpec = callNixFunction(
"fetchers.updateSource",
source=mainPackageSource,
newVersion=version,
)
lock['sources'][lock['generic']['mainPackage']] = updatedSourceSpec
with tempfile.NamedTemporaryFile("w", suffix="dream.lock") as tmpDreamLock:
json.dump(lock, tmpDreamLock, indent=2)
tmpDreamLock.seek(0) # flushes write cache
sp.run(
[
sys.executable, f"{cli_py}", "package", "--force", "--source", tmpDreamLock.name,
"--output", os.path.abspath(os.path.dirname(dreamLockFile))
]
+ lock['generic']['translatorParams'].split()
)

View File

@ -23,7 +23,7 @@ in
program = writeScript "cli" '' program = writeScript "cli" ''
dream2nixSrc=${dream2nixWithExternals} \ dream2nixSrc=${dream2nixWithExternals} \
fetcherNames="${b.toString (lib.attrNames fetchers.fetchers)}" \ fetcherNames="${b.toString (lib.attrNames fetchers.fetchers)}" \
${cliPython}/bin/python ${./cli.py} "$@" ${cliPython}/bin/python ${./.}/cli.py "$@"
''; '';
templateDefaultNix = templateDefaultNix =

122
src/apps/cli/utils.py Normal file
View File

@ -0,0 +1,122 @@
import json
import os
import subprocess as sp
import sys
import tempfile
from jsonschema import validate
dream2nix_src = os.environ.get("dream2nixSrc")
def checkLockJSON(lock):
try:
lock_schema_raw=open(dream2nix_src+"/specifications/dream-lock-schema.json").read()
lock_schema=json.loads(lock_schema_raw)
except Exception as e:
print(e)
try:
validate(lock, schema=lock_schema)
except Exception as e1:
print(e1)
def callNixFunction(function_path, **kwargs):
with tempfile.NamedTemporaryFile("w") as input_json_file:
json.dump(dict(**kwargs), input_json_file, indent=2)
input_json_file.seek(0) # flushes write cache
env = os.environ.copy()
env.update(dict(
FUNC_ARGS=input_json_file.name
))
proc = sp.run(
[
"nix", "eval", "--show-trace", "--impure", "--raw", "--expr",
f'''
let
d2n = (import {dream2nix_src} {{}});
in
builtins.toJSON (
(d2n.utils.makeCallableViaEnv d2n.{function_path}) {{}}
)
''',
],
capture_output=True,
env=env
)
if proc.returncode:
print(f"Failed calling nix function '{function_path}'", file=sys.stderr)
print(proc.stderr.decode(), file=sys.stderr)
exit(1)
# parse result data
return json.loads(proc.stdout)
def buildNixFunction(function_path, **kwargs):
with tempfile.NamedTemporaryFile("w") as input_json_file:
json.dump(dict(**kwargs), input_json_file, indent=2)
input_json_file.seek(0) # flushes write cache
env = os.environ.copy()
env.update(dict(
FUNC_ARGS=input_json_file.name
))
proc = sp.run(
[
"nix", "build", "--show-trace", "--impure", "-o", "tmp-result", "--expr",
f'''
let
d2n = (import {dream2nix_src} {{}});
in
(d2n.utils.makeCallableViaEnv d2n.{function_path}) {{}}
''',
],
capture_output=True,
env=env
)
if proc.returncode:
print(f"Failed calling nix function '{function_path}'", file=sys.stderr)
print(proc.stderr.decode(), file=sys.stderr)
exit(1)
# return store path of result
result = os.path.realpath("tmp-result")
os.remove("tmp-result")
return result
def buildNixAttribute(attribute_path):
proc = sp.run(
[
"nix", "build", "--show-trace", "--impure", "-o", "tmp-result", "--expr",
f"(import {dream2nix_src} {{}}).{attribute_path}",
],
capture_output=True,
)
if proc.returncode:
print(f"Failed to build '{attribute_path}'", file=sys.stderr)
print(proc.stderr.decode(), file=sys.stderr)
exit(1)
result = os.path.realpath("tmp-result")
os.remove("tmp-result")
return result
def list_translators_for_source(sourcePath):
translatorsList = callNixFunction(
"translators.translatorsForInput",
inputDirectories=[sourcePath],
inputFiles=[],
)
return list(sorted(translatorsList, key=lambda t: t['compatible']))
def order_dict(d):
return {k: order_dict(v) if isinstance(v, dict) else v
for k, v in sorted(d.items())}
def strip_hashes_from_lock(lock):
for source in lock['sources'].values():
if 'hash' in source:
del source['hash']

View File

@ -50,7 +50,7 @@ let
}; };
# updater modules to find newest package versions # updater modules to find newest package versions
finders = callPackageDream ./finders {}; updaters = callPackageDream ./updaters {};
# the translator modules and utils for all subsystems # the translator modules and utils for all subsystems
translators = callPackageDream ./translators {}; translators = callPackageDream ./translators {};
@ -72,7 +72,7 @@ in
rec { rec {
inherit apps builders fetchers finders dream2nixWithExternals translators utils; inherit apps builders dream2nixWithExternals fetchers translators updaters utils;
# automatically find a suitable builder for a given generic lock # automatically find a suitable builder for a given generic lock
findBuilder = dreamLock: findBuilder = dreamLock:

View File

@ -1,9 +1,9 @@
{ {
callPackageDream,
lib, lib,
# dream2nix
callPackageDream,
utils, utils,
# config
allowBuiltinFetchers, allowBuiltinFetchers,
... ...
}: }:
@ -25,6 +25,42 @@ rec {
combinedFetcher = callPackageDream ./combined-fetcher.nix { inherit defaultFetcher; }; combinedFetcher = callPackageDream ./combined-fetcher.nix { inherit defaultFetcher; };
constructSource =
{
type,
...
}@args:
let
fetcher = fetchers."${type}";
namesKeep = fetcher.inputs ++ [ "version" "type" "hash" ];
argsKeep = lib.filterAttrs (n: v: b.elem n namesKeep) args;
fetcherOutputs = fetcher.outputs args;
in
argsKeep
# if version was not provided, use the default version field
// (lib.optionalAttrs (! args ? version) {
version = args."${fetcher.versionField}";
})
# if the hash was not provided, calculate hash on the fly (impure)
// (lib.optionalAttrs (! args ? hash) {
hash = fetcherOutputs.calcHash "sha256";
});
updateSource =
{
source,
newVersion,
...
}:
let
fetcher = fetchers."${source.type}";
argsKeep = b.removeAttrs source [ "hash" ];
in
constructSource (argsKeep // {
version = newVersion;
"${fetcher.versionField}" = newVersion;
});
fetchSource = { source, }: fetchSource = { source, }:
let let
fetcher = fetchers."${source.type}"; fetcher = fetchers."${source.type}";
@ -56,11 +92,10 @@ rec {
fetcher = fetchers.fetchurl; fetcher = fetchers.fetchurl;
fetcherOutputs = fetchers.http.outputs { url = shortcut; }; fetcherOutputs = fetchers.http.outputs { url = shortcut; };
in in
{ constructSource {
type = "fetchurl"; type = "fetchurl";
hash = fetcherOutputs.calcHash "sha256"; hash = fetcherOutputs.calcHash "sha256";
url = shortcut; url = shortcut;
versionField = fetcher.versionField;
}; };
translateGitShortcut = translateGitShortcut =
@ -80,11 +115,10 @@ rec {
args = params // { inherit url; }; args = params // { inherit url; };
fetcherOutputs = fetcher.outputs (checkArgs "git" args); fetcherOutputs = fetcher.outputs (checkArgs "git" args);
in in
{ constructSource {
type = "git"; type = "git";
hash = fetcherOutputs.calcHash "sha256"; hash = fetcherOutputs.calcHash "sha256";
inherit url; inherit url;
versionField = fetcher.versionField;
}; };
translateRegularShortcut = translateRegularShortcut =
@ -111,11 +145,10 @@ rec {
Should be ${fetcherName}:${lib.concatStringsSep "/" fetcher.inputs} Should be ${fetcherName}:${lib.concatStringsSep "/" fetcher.inputs}
'' ''
else else
args // { constructSource (args // {
type = fetcherName; type = fetcherName;
hash = fetcherOutputs.calcHash "sha256"; hash = fetcherOutputs.calcHash "sha256";
versionField = fetcher.versionField; });
};
in in
if lib.hasPrefix "git+" (lib.head (lib.splitString ":" shortcut)) then if lib.hasPrefix "git+" (lib.head (lib.splitString ":" shortcut)) then
translateGitShortcut translateGitShortcut
@ -123,4 +156,5 @@ rec {
translateHttpUrl translateHttpUrl
else else
translateRegularShortcut; translateRegularShortcut;
} }

View File

@ -19,9 +19,9 @@ in
versionField = "rev"; versionField = "rev";
outputs = { url, rev }@inp: outputs = { url, rev, ... }@inp:
if b.match "refs/(heads|tags)/.*" rev == null && builtins.match "[a-f0-9]*" rev == null then if b.match "refs/(heads|tags)/.*" rev == null && builtins.match "[a-f0-9]*" rev == null then
throw ''rev must either be a sha1 hash or "refs/heads/branch-name" or "refs/tags/tag-name"'' throw ''rev must either be a sha1 revision or "refs/heads/branch-name" or "refs/tags/tag-name"''
else else
let let

View File

@ -19,6 +19,8 @@
versionField = "rev"; versionField = "rev";
defaultUpdater = "githubNewestReleaseTag";
outputs = { owner, repo, rev, ... }@inp: outputs = { owner, repo, rev, ... }@inp:
let let
b = builtins; b = builtins;
@ -60,15 +62,17 @@
fetched = hash: fetched = hash:
if hash == null then if hash == null then
if allowBuiltinFetchers then b.trace "using fetchGit"
(if allowBuiltinFetchers then
builtins.fetchGit { builtins.fetchGit {
inherit rev; inherit rev;
allRefs = true; allRefs = true;
url = "https://github.com/${owner}/${repo}"; url = "https://github.com/${owner}/${repo}";
} }
else else
throw githubMissingHashErrorText (inp.pname or repo) throw githubMissingHashErrorText (inp.pname or repo))
else else
b.trace "using fetchFromGithub"
fetchFromGitHub { fetchFromGitHub {
inherit owner repo rev hash; inherit owner repo rev hash;
}; };

View File

@ -15,6 +15,8 @@
versionField = "version"; versionField = "version";
defaultUpdater = "pypiNewestReleaseVersion";
outputs = { pname, version, extension ? "tar.gz", ... }@inp: outputs = { pname, version, extension ? "tar.gz", ... }@inp:
let let
b = builtins; b = builtins;

View File

@ -148,7 +148,7 @@
}; };
nodejs = { nodejs = {
description = "specify nodejs version"; description = "nodejs version to use for building";
default = lib.elemAt (lib.splitString "." nodejs.version) 0; default = lib.elemAt (lib.splitString "." nodejs.version) 0;
examples = [ examples = [
"14" "14"

49
src/updaters/default.nix Normal file
View File

@ -0,0 +1,49 @@
{
curl,
gnugrep,
jq,
lib,
python3,
writeText,
# dream2nix inputs
callPackageDream,
fetchers,
utils,
...
}:
let
lockUtils = utils.dreamLock;
updaters = callPackageDream ./updaters.nix {};
getUpdaterName =
{
dreamLock,
}:
let
lock = utils.readDreamLock { inherit dreamLock; };
source = lockUtils.getMainPackageSource lock;
in
lock.updater
or fetchers.fetchers."${source.type}".defaultUpdater
or null;
makeUpdateScript =
{
dreamLock,
updater ? getUpdaterName { inherit dreamLock; },
}:
let
lock = utils.readDreamLock { inherit dreamLock; };
source = lockUtils.getMainPackageSource lock;
updater' = updaters."${updater}";
in
updater' source;
in
{
inherit getUpdaterName makeUpdateScript updaters;
}

View File

@ -10,7 +10,6 @@
utils, utils,
... ...
}: }:
{ {
githubNewestReleaseTag = githubNewestReleaseTag =
{ {

View File

@ -1,4 +1,5 @@
{ {
bash,
coreutils, coreutils,
lib, lib,
nix, nix,
@ -21,15 +22,15 @@ let
in in
dreamLockUtils
//
overrideUtils overrideUtils
// //
translatorUtils translatorUtils
// //
rec { rec {
dreamLock = dreamLockUtils;
inherit (dreamLockUtils) readDreamLock;
readTextFile = file: lib.replaceStrings [ "\r\n" ] [ "\n" ] (b.readFile file); readTextFile = file: lib.replaceStrings [ "\r\n" ] [ "\n" ] (b.readFile file);
@ -69,6 +70,8 @@ rec {
# builder to create a shell script that has it's own PATH # builder to create a shell script that has it's own PATH
writePureShellScript = availablePrograms: script: writeScriptBin "run" '' writePureShellScript = availablePrograms: script: writeScriptBin "run" ''
#!${bash}/bin/bash
export PATH="${lib.makeBinPath availablePrograms}" export PATH="${lib.makeBinPath availablePrograms}"
tmpdir=$(${coreutils}/bin/mktemp -d) tmpdir=$(${coreutils}/bin/mktemp -d)
cd $tmpdir cd $tmpdir

View File

@ -12,7 +12,7 @@ let
}@args: }@args:
let let
lock = lock =
if b.isPath dreamLock then if b.isPath dreamLock || b.isString dreamLock then
b.fromJSON (b.readFile dreamLock) b.fromJSON (b.readFile dreamLock)
else else
dreamLock; dreamLock;
@ -21,8 +21,11 @@ let
in in
lock; lock;
getMainPackageSource = dreamLock:
dreamLock.sources."${dreamLock.generic.mainPackage}";
in in
{ {
inherit readDreamLock; inherit getMainPackageSource readDreamLock;
} }