nixpkgs/pkgs/development/compilers/graalvm/001_mx.py.patch

292 lines
14 KiB
Diff
Raw Normal View History

2019-04-14 23:36:23 +03:00
diff --git a/mx.py b/mx.py
index a0b9315..b7d67a0 100755
2019-04-14 23:36:23 +03:00
--- a/mx.py
+++ b/mx.py
@@ -238,21 +238,7 @@ def _check_file_with_sha1(path, sha1, sha1path, mustExist=True, newFile=False, l
f.write(value or sha1OfFile(path))
2019-04-14 23:36:23 +03:00
if exists(path):
- if sha1Check and sha1:
- if not _sha1CachedValid() or (newFile and sha1 != _sha1Cached()):
- logv('Create/update SHA1 cache file ' + sha1path)
- _writeSha1Cached()
2019-04-14 23:36:23 +03:00
-
- if sha1 != _sha1Cached():
- computedSha1 = sha1OfFile(path)
- if sha1 == computedSha1:
- warn('Fixing corrupt SHA1 cache file ' + sha1path)
- _writeSha1Cached(computedSha1)
- return True
- if logErrors:
- size = os.path.getsize(path)
- log_error('SHA1 of {} [size: {}] ({}) does not match expected value ({})'.format(TimeStampFile(path), size, computedSha1, sha1))
- return False
+ return True
elif mustExist:
if logErrors:
log_error("'{}' does not exist".format(path))
@@ -1057,46 +1043,8 @@ class SuiteImport:
2019-04-14 23:36:23 +03:00
version = import_dict.get("version")
suite_dir = None
version_from = import_dict.get("versionFrom")
- if version_from and version:
- abort("In import for '{}': 'version' and 'versionFrom' can not be both set".format(name), context=context)
- if version is None and version_from is None:
- if not (in_subdir and (importer.vc_dir != importer.dir or isinstance(importer, BinarySuite))):
- abort("In import for '{}': No version given and not a 'subdir' suite of the same repository".format(name), context=context)
- if importer.isSourceSuite():
- suite_dir = join(importer.vc_dir, name)
- version = importer.version()
- if urls is None:
- if not in_subdir:
- if import_dict.get("subdir") is None and importer.vc_dir != importer.dir:
- warn("In import for '{}': No urls given but 'subdir' is not set, assuming 'subdir=True'".format(name), context)
- in_subdir = True
- else:
- abort("In import for '{}': No urls given and not a 'subdir' suite".format(name), context=context)
- return SuiteImport(name, version, None, None, dynamicImport=dynamicImport, in_subdir=in_subdir, version_from=version_from, suite_dir=suite_dir)
- # urls a list of alternatives defined as dicts
- if not isinstance(urls, list):
- abort('suite import urls must be a list', context=context)
- urlinfos = []
- mainKind = None
- for urlinfo in urls:
- if isinstance(urlinfo, dict) and urlinfo.get('url') and urlinfo.get('kind'):
- kind = urlinfo.get('kind')
- if not VC.is_valid_kind(kind):
- abort('suite import kind ' + kind + ' illegal', context=context)
- else:
- abort('suite import url must be a dict with {"url", kind", attributes', context=context)
- vc = vc_system(kind)
- if kind != 'binary':
- assert not mainKind or mainKind == kind, "Only expecting one non-binary kind"
- mainKind = kind
- url = mx_urlrewrites.rewriteurl(urlinfo.get('url'))
- urlinfos.append(SuiteImportURLInfo(url, kind, vc))
- vc_kind = None
- if mainKind:
- vc_kind = mainKind
- elif urlinfos:
- vc_kind = 'binary'
- return SuiteImport(name, version, urlinfos, vc_kind, dynamicImport=dynamicImport, in_subdir=in_subdir, version_from=version_from, suite_dir=suite_dir)
+ suite_dir = join(get_env('MX_GIT_CACHE_DIR'), name)
+ return SuiteImport(name, version, [], None, True, in_subdir=in_subdir, version_from=version_from, suite_dir=suite_dir)
@staticmethod
def get_source_urls(source, kind=None):
@@ -1467,8 +1415,6 @@ class Suite(object):
2019-04-14 23:36:23 +03:00
:type dists: list[Distribution]
"""
def __init__(self, mxDir, primary, internal, importing_suite, load, vc, vc_dir, dynamicallyImported=False):
- if primary is True and vc_dir is None:
- abort("The primary suite must be in a vcs repository")
self.imported_by = [] if primary else [importing_suite]
self.mxDir = mxDir
self.dir = dirname(mxDir)
@@ -1496,7 +1442,7 @@ class Suite(object):
2019-04-14 23:36:23 +03:00
self._outputRoot = None
self._preloaded_suite_dict = None
self.vc = vc
- self.vc_dir = vc_dir
+ self.vc_dir = get_env('MX_GIT_CACHE_DIR')
self._preload_suite_dict()
self._init_imports()
if load:
@@ -2405,7 +2351,9 @@ class Repository(SuiteConstituent):
2019-04-14 23:36:23 +03:00
class SourceSuite(Suite):
"""A source suite"""
def __init__(self, mxDir, primary=False, load=True, internal=False, importing_suite=None, dynamicallyImported=False):
- vc, vc_dir = VC.get_vc_root(dirname(mxDir), abortOnError=False)
+ vc, vc_dir_test = VC.get_vc_root(dirname(mxDir), abortOnError=False)
+ vc_dir = get_env('MX_GIT_CACHE_DIR')
+ warn("LOOKING FOR: " + mxDir)
Suite.__init__(self, mxDir, primary, internal, importing_suite, load, vc, vc_dir, dynamicallyImported=dynamicallyImported)
logvv("SourceSuite.__init__({}), got vc={}, vc_dir={}".format(mxDir, self.vc, self.vc_dir))
self.projects = []
@@ -2454,17 +2402,7 @@ class SourceSuite(Suite):
2019-04-14 23:36:23 +03:00
"""
Gets the release tag from VC or create a time based once if VC is unavailable
"""
- if snapshotSuffix not in self._releaseVersion:
- _version = self._get_early_suite_dict_property('version')
- if _version and self.getMxCompatibility().addVersionSuffixToExplicitVersion():
- if not self.is_release():
- _version = _version + '-' + snapshotSuffix
- if not _version:
- _version = self.vc.release_version_from_tags(self.vc_dir, self.name, snapshotSuffix=snapshotSuffix)
- if not _version:
- _version = 'unknown-{0}-{1}'.format(platform.node(), time.strftime('%Y-%m-%d_%H-%M-%S_%Z'))
- self._releaseVersion[snapshotSuffix] = _version
- return self._releaseVersion[snapshotSuffix]
+ return get_env('version')
def scm_metadata(self, abortOnError=False):
scm = self.scm
@@ -2993,12 +2931,35 @@ def _find_suite_import(importing_suite, suite_import, fatalIfMissing=True, load=
Attempts to locate an existing suite in the local context
Returns the path to the mx.name dir if found else None
"""
- if mode == 'binary':
- # binary suites are always stored relative to the importing suite in mx-private directory
- return importing_suite._find_binary_suite_dir(suite_import.name)
+ warn("FAKE CLONE: " + str(suite_import))
+ if (suite_import.name == "truffle"):
+ return join(get_env('TMP'), "source", "truffle", "mx.truffle")
+ if (suite_import.name == "graal-nodejs"):
+ return join(get_env('MX_GIT_CACHE_DIR'), "graaljs", "graal-nodejs", "mx.graal-nodejs")
+ if (suite_import.name == "truffleruby"):
+ return join(get_env('MX_GIT_CACHE_DIR'), "truffleruby", "mx.truffleruby")
+ if (suite_import.name == "graalpython"):
+ return join(get_env('MX_GIT_CACHE_DIR'), "graalpython", "mx.graalpython")
+ if (suite_import.name == "vm"):
+ return join(get_env('TMP'), "source", "vm", "mx.vm")
+ if (suite_import.name == "fastr"):
+ return join(get_env('MX_GIT_CACHE_DIR'), "fastr", "mx.fastr")
+ if (suite_import.name == "sdk"):
+ return join(get_env('TMP'), "source", "sdk", "mx.sdk")
+ if (suite_import.name == "graal-js"):
+ return join(get_env('MX_GIT_CACHE_DIR'), "graaljs", "graal-js", "mx.graal-js")
+ if (suite_import.name == "regex"):
+ return join(get_env('TMP'), "source", "regex", "mx.regex")
+ if (suite_import.name == "substratevm"):
+ return join(get_env('TMP'), "source", "substratevm", "mx.substratevm")
+ if (suite_import.name == "tools"):
+ return join(get_env('TMP'), "source", "tools", "mx.tools")
+ if (suite_import.name == "sulong"):
+ return join(get_env('TMP'), "source", "sulong", "mx.sulong")
+ if (suite_import.name == "compiler"):
+ return join(get_env('TMP'), "source", "compiler", "mx.compiler")
else:
- # use the SuiteModel to locate a local source copy of the suite
- return _suitemodel.find_suite_dir(suite_import)
+ return join(get_env('MX_GIT_CACHE_DIR'), suite_import.name)
def _get_import_dir(url, mode):
"""Return directory where the suite will be cloned to"""
@@ -3816,7 +3777,7 @@ def getmtime(name):
"""
Wrapper for builtin open function that handles long path names on Windows.
"""
- return os.path.getmtime(_safe_path(name))
+ return 315532800
def stat(name):
@@ -4062,57 +4023,8 @@ def _attempt_download(url, path, jarEntryName=None):
2019-04-14 23:36:23 +03:00
return False
def download(path, urls, verbose=False, abortOnError=True, verifyOnly=False):
- """
- Attempts to downloads content for each URL in a list, stopping after the first successful download.
- If the content cannot be retrieved from any URL, the program is aborted, unless abortOnError=False.
- The downloaded content is written to the file indicated by `path`.
- """
- if not verifyOnly:
- ensure_dirname_exists(path)
- assert not path.endswith(os.sep)
-
- # https://docs.oracle.com/javase/7/docs/api/java/net/JarURLConnection.html
- jarURLPattern = re.compile('jar:(.*)!/(.*)')
- verify_errors = {}
- for url in urls:
- if not verifyOnly or verbose:
- log('Downloading ' + url + ' to ' + path)
- m = jarURLPattern.match(url)
- jarEntryName = None
- if m:
- url = m.group(1)
- jarEntryName = m.group(2)
-
- if not _opts.trust_http and (url.lower().startswith('http://') or url.lower().startswith('ftp://')):
- warn('Downloading from non-https URL {}. Use --trust-http mx option to suppress this warning.'.format(url))
-
2019-04-14 23:36:23 +03:00
- if verifyOnly:
- try:
- conn = _urlopen(url, timeout=10)
- conn.close()
- except (IOError, socket.timeout) as e:
- _suggest_tlsv1_error(e)
- verify_errors[url] = e
- else:
- for i in range(4):
- if i != 0:
- time.sleep(1)
- warn('Retry {} to download from {}'.format(i, url))
- if _attempt_download(url, path, jarEntryName):
- return True # Download was successful
2019-04-14 23:36:23 +03:00
-
- if verifyOnly and len(verify_errors) < len(urls): # verify-mode at least one success -> success
- return True
- else: # Either verification error or no download was successful
2019-04-14 23:36:23 +03:00
- msg = 'Could not download to ' + path + ' from any of the following URLs: ' + ', '.join(urls)
- if verifyOnly: # verify-mode -> print error details
2019-04-14 23:36:23 +03:00
- for url, e in verify_errors.items():
- msg += '\n ' + url + ': ' + str(e)
- if abortOnError:
- abort(msg)
- else:
- warn(msg)
- return False
2019-04-14 23:36:23 +03:00
+ print("FAKE download(path={} urls={} verbose={} abortOnError={} verifyOnly={})".format(path, urls, verbose, abortOnError, verifyOnly))
+ return True
def update_file(path, content, showDiff=False):
"""
@@ -7887,30 +7799,6 @@ class PackedResourceLibrary(ResourceLibrary):
def get_path(self, resolve):
extract_path = _make_absolute(self.extract_path, self.suite.dir)
- download_path = super(PackedResourceLibrary, self).get_path(resolve)
- if resolve and self._check_extract_needed(extract_path, download_path):
- extract_path_tmp = tempfile.mkdtemp(suffix=basename(extract_path), dir=dirname(extract_path))
- try:
- # extract archive
- Extractor.create(download_path).extract(extract_path_tmp)
- # ensure modification time is up to date
- os.utime(extract_path_tmp, None)
- logv("Moving temporary directory {} to {}".format(extract_path_tmp, extract_path))
- try:
- # attempt atomic overwrite
- os.rename(extract_path_tmp, extract_path)
- except OSError:
- # clean destination & re-try for cases where atomic overwrite doesn't work
- rmtree(extract_path, ignore_errors=True)
- os.rename(extract_path_tmp, extract_path)
- except OSError as ose:
- # Rename failed. Race with other process?
- if self._check_extract_needed(extract_path, download_path):
- # ok something really went wrong
- abort("Extracting {} failed!".format(download_path), context=ose)
- finally:
- rmtree(extract_path_tmp, ignore_errors=True)
-
return extract_path
def _check_download_needed(self):
@@ -8430,7 +8318,7 @@ class VC(_with_metaclass(ABCMeta, object)):
:param str branch: a branch name
:param bool abortOnError: if True abort on error
"""
- abort(self.kind + " update_to_branch is not implemented")
+ self.run(['hg', vcdir] + cmd)
def is_release_from_tags(self, vcdir, prefix):
"""
@@ -8831,7 +8719,7 @@ class HgConfig(VC):
return None
def parent_info(self, vcdir, abortOnError=True):
- out = self.hg_command(vcdir, ["log", "-r", ".", "--template", "{author}|||{date|hgdate}"], abortOnError=abortOnError)
+ out = _check_output_str(["hg", '-R', vcdir, "log", "-r", ".", "--template", "{author}|||{date|hgdate}"])
author, date = out.split("|||")
ts, _ = date.split(" ")
return self._sanitize_parent_info({
@@ -14069,6 +13957,7 @@ class Archiver(SafeFileCreation):
2019-04-14 23:36:23 +03:00
def _add_zip(self, filename, archive_name, provenance):
self._add_provenance(archive_name, provenance)
+ os.utime(filename, (315532800, 315532800))
self.zf.write(filename, archive_name)
def _add_str_zip(self, data, archive_name, provenance):