mirror of
https://github.com/ilyakooo0/nixpkgs.git
synced 2024-11-11 04:02:55 +03:00
Merge master into staging-next
This commit is contained in:
commit
e0950ae9ad
@ -1,361 +1,3 @@
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -i python3 -p "python3.withPackages(ps: with ps; [ packaging requests toolz ])" -p git
|
||||
#!/bin/sh
|
||||
exec nix-shell -p "python3.withPackages(ps: with ps; [ packaging requests toolz ])" -p git --run pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py
|
||||
|
||||
"""
|
||||
Update a Python package expression by passing in the `.nix` file, or the directory containing it.
|
||||
You can pass in multiple files or paths.
|
||||
|
||||
You'll likely want to use
|
||||
``
|
||||
$ ./update-python-libraries ../../pkgs/development/python-modules/*
|
||||
``
|
||||
to update all libraries in that folder.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
import toolz
|
||||
from concurrent.futures import ThreadPoolExecutor as Pool
|
||||
from packaging.version import Version as _Version
|
||||
from packaging.version import InvalidVersion
|
||||
from packaging.specifiers import SpecifierSet
|
||||
import collections
|
||||
import subprocess
|
||||
|
||||
INDEX = "https://pypi.io/pypi"
|
||||
"""url of PyPI"""
|
||||
|
||||
EXTENSIONS = ['tar.gz', 'tar.bz2', 'tar', 'zip', '.whl']
|
||||
"""Permitted file extensions. These are evaluated from left to right and the first occurance is returned."""
|
||||
|
||||
PRERELEASES = False
|
||||
|
||||
import logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
|
||||
class Version(_Version, collections.abc.Sequence):
|
||||
|
||||
def __init__(self, version):
|
||||
super().__init__(version)
|
||||
# We cannot use `str(Version(0.04.21))` because that becomes `0.4.21`
|
||||
# https://github.com/avian2/unidecode/issues/13#issuecomment-354538882
|
||||
self.raw_version = version
|
||||
|
||||
def __getitem__(self, i):
|
||||
return self._version.release[i]
|
||||
|
||||
def __len__(self):
|
||||
return len(self._version.release)
|
||||
|
||||
def __iter__(self):
|
||||
yield from self._version.release
|
||||
|
||||
|
||||
def _get_values(attribute, text):
|
||||
"""Match attribute in text and return all matches.
|
||||
|
||||
:returns: List of matches.
|
||||
"""
|
||||
regex = '{}\s+=\s+"(.*)";'.format(attribute)
|
||||
regex = re.compile(regex)
|
||||
values = regex.findall(text)
|
||||
return values
|
||||
|
||||
def _get_unique_value(attribute, text):
|
||||
"""Match attribute in text and return unique match.
|
||||
|
||||
:returns: Single match.
|
||||
"""
|
||||
values = _get_values(attribute, text)
|
||||
n = len(values)
|
||||
if n > 1:
|
||||
raise ValueError("found too many values for {}".format(attribute))
|
||||
elif n == 1:
|
||||
return values[0]
|
||||
else:
|
||||
raise ValueError("no value found for {}".format(attribute))
|
||||
|
||||
def _get_line_and_value(attribute, text):
|
||||
"""Match attribute in text. Return the line and the value of the attribute."""
|
||||
regex = '({}\s+=\s+"(.*)";)'.format(attribute)
|
||||
regex = re.compile(regex)
|
||||
value = regex.findall(text)
|
||||
n = len(value)
|
||||
if n > 1:
|
||||
raise ValueError("found too many values for {}".format(attribute))
|
||||
elif n == 1:
|
||||
return value[0]
|
||||
else:
|
||||
raise ValueError("no value found for {}".format(attribute))
|
||||
|
||||
|
||||
def _replace_value(attribute, value, text):
|
||||
"""Search and replace value of attribute in text."""
|
||||
old_line, old_value = _get_line_and_value(attribute, text)
|
||||
new_line = old_line.replace(old_value, value)
|
||||
new_text = text.replace(old_line, new_line)
|
||||
return new_text
|
||||
|
||||
def _fetch_page(url):
|
||||
r = requests.get(url)
|
||||
if r.status_code == requests.codes.ok:
|
||||
return r.json()
|
||||
else:
|
||||
raise ValueError("request for {} failed".format(url))
|
||||
|
||||
|
||||
SEMVER = {
|
||||
'major' : 0,
|
||||
'minor' : 1,
|
||||
'patch' : 2,
|
||||
}
|
||||
|
||||
|
||||
def _determine_latest_version(current_version, target, versions):
|
||||
"""Determine latest version, given `target`.
|
||||
"""
|
||||
current_version = Version(current_version)
|
||||
|
||||
def _parse_versions(versions):
|
||||
for v in versions:
|
||||
try:
|
||||
yield Version(v)
|
||||
except InvalidVersion:
|
||||
pass
|
||||
|
||||
versions = _parse_versions(versions)
|
||||
|
||||
index = SEMVER[target]
|
||||
|
||||
ceiling = list(current_version[0:index])
|
||||
if len(ceiling) == 0:
|
||||
ceiling = None
|
||||
else:
|
||||
ceiling[-1]+=1
|
||||
ceiling = Version(".".join(map(str, ceiling)))
|
||||
|
||||
# We do not want prereleases
|
||||
versions = SpecifierSet(prereleases=PRERELEASES).filter(versions)
|
||||
|
||||
if ceiling is not None:
|
||||
versions = SpecifierSet(f"<{ceiling}").filter(versions)
|
||||
|
||||
return (max(sorted(versions))).raw_version
|
||||
|
||||
|
||||
def _get_latest_version_pypi(package, extension, current_version, target):
|
||||
"""Get latest version and hash from PyPI."""
|
||||
url = "{}/{}/json".format(INDEX, package)
|
||||
json = _fetch_page(url)
|
||||
|
||||
versions = json['releases'].keys()
|
||||
version = _determine_latest_version(current_version, target, versions)
|
||||
|
||||
try:
|
||||
releases = json['releases'][version]
|
||||
except KeyError as e:
|
||||
raise KeyError('Could not find version {} for {}'.format(version, package)) from e
|
||||
for release in releases:
|
||||
if release['filename'].endswith(extension):
|
||||
# TODO: In case of wheel we need to do further checks!
|
||||
sha256 = release['digests']['sha256']
|
||||
break
|
||||
else:
|
||||
sha256 = None
|
||||
return version, sha256
|
||||
|
||||
|
||||
def _get_latest_version_github(package, extension, current_version, target):
|
||||
raise ValueError("updating from GitHub is not yet supported.")
|
||||
|
||||
|
||||
FETCHERS = {
|
||||
'fetchFromGitHub' : _get_latest_version_github,
|
||||
'fetchPypi' : _get_latest_version_pypi,
|
||||
'fetchurl' : _get_latest_version_pypi,
|
||||
}
|
||||
|
||||
|
||||
DEFAULT_SETUPTOOLS_EXTENSION = 'tar.gz'
|
||||
|
||||
|
||||
FORMATS = {
|
||||
'setuptools' : DEFAULT_SETUPTOOLS_EXTENSION,
|
||||
'wheel' : 'whl'
|
||||
}
|
||||
|
||||
def _determine_fetcher(text):
|
||||
# Count occurences of fetchers.
|
||||
nfetchers = sum(text.count('src = {}'.format(fetcher)) for fetcher in FETCHERS.keys())
|
||||
if nfetchers == 0:
|
||||
raise ValueError("no fetcher.")
|
||||
elif nfetchers > 1:
|
||||
raise ValueError("multiple fetchers.")
|
||||
else:
|
||||
# Then we check which fetcher to use.
|
||||
for fetcher in FETCHERS.keys():
|
||||
if 'src = {}'.format(fetcher) in text:
|
||||
return fetcher
|
||||
|
||||
|
||||
def _determine_extension(text, fetcher):
|
||||
"""Determine what extension is used in the expression.
|
||||
|
||||
If we use:
|
||||
- fetchPypi, we check if format is specified.
|
||||
- fetchurl, we determine the extension from the url.
|
||||
- fetchFromGitHub we simply use `.tar.gz`.
|
||||
"""
|
||||
if fetcher == 'fetchPypi':
|
||||
try:
|
||||
src_format = _get_unique_value('format', text)
|
||||
except ValueError as e:
|
||||
src_format = None # format was not given
|
||||
|
||||
try:
|
||||
extension = _get_unique_value('extension', text)
|
||||
except ValueError as e:
|
||||
extension = None # extension was not given
|
||||
|
||||
if extension is None:
|
||||
if src_format is None:
|
||||
src_format = 'setuptools'
|
||||
elif src_format == 'flit':
|
||||
raise ValueError("Don't know how to update a Flit package.")
|
||||
extension = FORMATS[src_format]
|
||||
|
||||
elif fetcher == 'fetchurl':
|
||||
url = _get_unique_value('url', text)
|
||||
extension = os.path.splitext(url)[1]
|
||||
if 'pypi' not in url:
|
||||
raise ValueError('url does not point to PyPI.')
|
||||
|
||||
elif fetcher == 'fetchFromGitHub':
|
||||
raise ValueError('updating from GitHub is not yet implemented.')
|
||||
|
||||
return extension
|
||||
|
||||
|
||||
def _update_package(path, target):
|
||||
|
||||
# Read the expression
|
||||
with open(path, 'r') as f:
|
||||
text = f.read()
|
||||
|
||||
# Determine pname.
|
||||
pname = _get_unique_value('pname', text)
|
||||
|
||||
# Determine version.
|
||||
version = _get_unique_value('version', text)
|
||||
|
||||
# First we check how many fetchers are mentioned.
|
||||
fetcher = _determine_fetcher(text)
|
||||
|
||||
extension = _determine_extension(text, fetcher)
|
||||
|
||||
new_version, new_sha256 = FETCHERS[fetcher](pname, extension, version, target)
|
||||
|
||||
if new_version == version:
|
||||
logging.info("Path {}: no update available for {}.".format(path, pname))
|
||||
return False
|
||||
elif Version(new_version) <= Version(version):
|
||||
raise ValueError("downgrade for {}.".format(pname))
|
||||
if not new_sha256:
|
||||
raise ValueError("no file available for {}.".format(pname))
|
||||
|
||||
text = _replace_value('version', new_version, text)
|
||||
text = _replace_value('sha256', new_sha256, text)
|
||||
|
||||
with open(path, 'w') as f:
|
||||
f.write(text)
|
||||
|
||||
logging.info("Path {}: updated {} from {} to {}".format(path, pname, version, new_version))
|
||||
|
||||
result = {
|
||||
'path' : path,
|
||||
'target': target,
|
||||
'pname': pname,
|
||||
'old_version' : version,
|
||||
'new_version' : new_version,
|
||||
#'fetcher' : fetcher,
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _update(path, target):
|
||||
|
||||
# We need to read and modify a Nix expression.
|
||||
if os.path.isdir(path):
|
||||
path = os.path.join(path, 'default.nix')
|
||||
|
||||
# If a default.nix does not exist, we quit.
|
||||
if not os.path.isfile(path):
|
||||
logging.info("Path {}: does not exist.".format(path))
|
||||
return False
|
||||
|
||||
# If file is not a Nix expression, we quit.
|
||||
if not path.endswith(".nix"):
|
||||
logging.info("Path {}: does not end with `.nix`.".format(path))
|
||||
return False
|
||||
|
||||
try:
|
||||
return _update_package(path, target)
|
||||
except ValueError as e:
|
||||
logging.warning("Path {}: {}".format(path, e))
|
||||
return False
|
||||
|
||||
|
||||
def _commit(path, pname, old_version, new_version, **kwargs):
|
||||
"""Commit result.
|
||||
"""
|
||||
|
||||
msg = f'python: {pname}: {old_version} -> {new_version}'
|
||||
|
||||
try:
|
||||
subprocess.check_call(['git', 'add', path])
|
||||
subprocess.check_call(['git', 'commit', '-m', msg])
|
||||
except subprocess.CalledProcessError as e:
|
||||
subprocess.check_call(['git', 'checkout', path])
|
||||
raise subprocess.CalledProcessError(f'Could not commit {path}') from e
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('package', type=str, nargs='+')
|
||||
parser.add_argument('--target', type=str, choices=SEMVER.keys(), default='major')
|
||||
parser.add_argument('--commit', action='store_true', help='Create a commit for each package update')
|
||||
|
||||
args = parser.parse_args()
|
||||
target = args.target
|
||||
|
||||
packages = list(map(os.path.abspath, args.package))
|
||||
|
||||
logging.info("Updating packages...")
|
||||
|
||||
# Use threads to update packages concurrently
|
||||
with Pool() as p:
|
||||
results = list(p.map(lambda pkg: _update(pkg, target), packages))
|
||||
|
||||
logging.info("Finished updating packages.")
|
||||
|
||||
# Commits are created sequentially.
|
||||
if args.commit:
|
||||
logging.info("Committing updates...")
|
||||
list(map(lambda x: _commit(**x), filter(bool, results)))
|
||||
logging.info("Finished committing updates")
|
||||
|
||||
count = sum(map(bool, results))
|
||||
logging.info("{} package(s) updated".format(count))
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -29,21 +29,6 @@ let
|
||||
installOptions =
|
||||
"${mysqldAndInstallOptions} ${lib.optionalString isMysqlAtLeast57 "--insecure"}";
|
||||
|
||||
myCnf = pkgs.writeText "my.cnf"
|
||||
''
|
||||
[mysqld]
|
||||
port = ${toString cfg.port}
|
||||
datadir = ${cfg.dataDir}
|
||||
${optionalString (cfg.bind != null) "bind-address = ${cfg.bind}" }
|
||||
${optionalString (cfg.replication.role == "master" || cfg.replication.role == "slave") "log-bin=mysql-bin"}
|
||||
${optionalString (cfg.replication.role == "master" || cfg.replication.role == "slave") "server-id = ${toString cfg.replication.serverId}"}
|
||||
${optionalString (cfg.ensureUsers != [])
|
||||
''
|
||||
plugin-load-add = auth_socket.so
|
||||
''}
|
||||
${cfg.extraOptions}
|
||||
'';
|
||||
|
||||
in
|
||||
|
||||
{
|
||||
@ -242,6 +227,21 @@ in
|
||||
|
||||
environment.systemPackages = [mysql];
|
||||
|
||||
environment.etc."my.cnf".text =
|
||||
''
|
||||
[mysqld]
|
||||
port = ${toString cfg.port}
|
||||
datadir = ${cfg.dataDir}
|
||||
${optionalString (cfg.bind != null) "bind-address = ${cfg.bind}" }
|
||||
${optionalString (cfg.replication.role == "master" || cfg.replication.role == "slave") "log-bin=mysql-bin"}
|
||||
${optionalString (cfg.replication.role == "master" || cfg.replication.role == "slave") "server-id = ${toString cfg.replication.serverId}"}
|
||||
${optionalString (cfg.ensureUsers != [])
|
||||
''
|
||||
plugin-load-add = auth_socket.so
|
||||
''}
|
||||
${cfg.extraOptions}
|
||||
'';
|
||||
|
||||
systemd.services.mysql = let
|
||||
hasNotify = (cfg.package == pkgs.mariadb);
|
||||
in {
|
||||
@ -263,7 +263,7 @@ in
|
||||
if ! test -e ${cfg.dataDir}/mysql; then
|
||||
mkdir -m 0700 -p ${cfg.dataDir}
|
||||
chown -R ${cfg.user} ${cfg.dataDir}
|
||||
${mysql}/bin/mysql_install_db ${installOptions}
|
||||
${mysql}/bin/mysql_install_db --defaults-file=/etc/my.cnf ${installOptions}
|
||||
touch /tmp/mysql_init
|
||||
fi
|
||||
|
||||
@ -274,7 +274,7 @@ in
|
||||
serviceConfig = {
|
||||
Type = if hasNotify then "notify" else "simple";
|
||||
RuntimeDirectory = "mysqld";
|
||||
ExecStart = "${mysql}/bin/mysqld --defaults-extra-file=${myCnf} ${mysqldOptions}";
|
||||
ExecStart = "${mysql}/bin/mysqld --defaults-file=/etc/my.cnf ${mysqldOptions}";
|
||||
};
|
||||
|
||||
postStart = ''
|
||||
|
@ -60,7 +60,7 @@ stdenv.mkDerivation rec {
|
||||
name = "monero-wallet-gui";
|
||||
exec = "monero-wallet-gui";
|
||||
icon = "monero";
|
||||
desktopName = "Monero Wallet";
|
||||
desktopName = "Monero";
|
||||
genericName = "Wallet";
|
||||
categories = "Application;Network;Utility;";
|
||||
};
|
||||
|
@ -1,14 +1,14 @@
|
||||
{stdenv, fetchurl, libogg, libao, pkgconfig, libopus, flac}:
|
||||
{stdenv, fetchurl, libogg, libao, pkgconfig, flac, opusfile, libopusenc}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "opus-tools-0.1.10";
|
||||
name = "opus-tools-0.2";
|
||||
src = fetchurl {
|
||||
url = "http://downloads.xiph.org/releases/opus/${name}.tar.gz";
|
||||
sha256 = "135jfb9ny3xvd27idsxj7j5ns90lslbyrq70cq3bfwcls4r7add2";
|
||||
sha256 = "11pzl27s4vcz4m18ch72nivbhww2zmzn56wspb7rll1y1nq6rrdl";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ pkgconfig ];
|
||||
buildInputs = [ libogg libao libopus flac ];
|
||||
buildInputs = [ libogg libao flac opusfile libopusenc ];
|
||||
|
||||
meta = {
|
||||
description = "Tools to work with opus encoded audio streams";
|
||||
|
@ -12,7 +12,7 @@
|
||||
|
||||
buildGoPackage rec {
|
||||
name = "aminal-${version}";
|
||||
version = "0.7.8";
|
||||
version = "0.7.12";
|
||||
|
||||
goPackagePath = "github.com/liamg/aminal";
|
||||
|
||||
@ -36,7 +36,7 @@ buildGoPackage rec {
|
||||
owner = "liamg";
|
||||
repo = "aminal";
|
||||
rev = "v${version}";
|
||||
sha256 = "02gamvvs56w4zwqdvalbsgb2gbh0398gl7qk36pgyqkcgj3bcwv8";
|
||||
sha256 = "1ak5g2i4ggi00b4q7qigfwsrwb5rvswjjbr2hp9kyxd45nycb0g4";
|
||||
};
|
||||
|
||||
preBuild = ''
|
||||
|
@ -1,4 +1,4 @@
|
||||
{ stdenv, fetchFromGitHub, go }:
|
||||
{ stdenv, fetchFromGitHub, go, removeReferencesTo }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "cni-${version}";
|
||||
@ -11,7 +11,9 @@ stdenv.mkDerivation rec {
|
||||
sha256 = "00ajs2r5r2z3l0vqwxrcwhjfc9px12qbcv5vnvs2mdipvvls1y2y";
|
||||
};
|
||||
|
||||
buildInputs = [ go ];
|
||||
buildInputs = [ removeReferencesTo go ];
|
||||
|
||||
GOCACHE = "off";
|
||||
|
||||
buildPhase = ''
|
||||
patchShebangs build.sh
|
||||
@ -23,6 +25,10 @@ stdenv.mkDerivation rec {
|
||||
mv bin/cnitool $out/bin
|
||||
'';
|
||||
|
||||
preFixup = ''
|
||||
find $out/bin -type f -exec remove-references-to -t ${go} '{}' +
|
||||
'';
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
description = "Container Network Interface - networking for Linux containers";
|
||||
license = licenses.asl20;
|
||||
|
@ -1,4 +1,4 @@
|
||||
{ stdenv, lib, fetchFromGitHub, go }:
|
||||
{ stdenv, lib, fetchFromGitHub, go, removeReferencesTo }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "cni-plugins-${version}";
|
||||
@ -11,7 +11,9 @@ stdenv.mkDerivation rec {
|
||||
sha256 = "1sywllwnr6lc812sgkqjdd3y10r82shl88dlnwgnbgzs738q2vp2";
|
||||
};
|
||||
|
||||
buildInputs = [ go ];
|
||||
buildInputs = [ removeReferencesTo go ];
|
||||
|
||||
GOCACHE = "off";
|
||||
|
||||
buildPhase = ''
|
||||
patchShebangs build.sh
|
||||
@ -23,6 +25,10 @@ stdenv.mkDerivation rec {
|
||||
mv bin/* $out/bin
|
||||
'';
|
||||
|
||||
preFixup = ''
|
||||
find $out/bin -type f -exec remove-references-to -t ${go} '{}' +
|
||||
'';
|
||||
|
||||
meta = with lib; {
|
||||
description = "Some standard networking plugins, maintained by the CNI team";
|
||||
homepage = https://github.com/containernetworking/plugins;
|
||||
|
@ -3,13 +3,13 @@
|
||||
with stdenv.lib;
|
||||
stdenv.mkDerivation rec {
|
||||
name = "bitlbee-discord-${version}";
|
||||
version = "0.4.1";
|
||||
version = "0.4.2";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
rev = version;
|
||||
owner = "sm00th";
|
||||
repo = "bitlbee-discord";
|
||||
sha256 = "1n3xw5mcmg7224r09gbm39bd6h2158dwl6jx21290636b4345f4c";
|
||||
sha256 = "02pigk2vbz0jdz11f96sygdvp1j762yjn62h124fkcsc070g7a2f";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ autoreconfHook pkgconfig ];
|
||||
|
@ -1,15 +1,15 @@
|
||||
{ stdenv, fetchurl, gd, ncurses }:
|
||||
{ stdenv, fetchurl, gd, ncurses, sqlite }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "vnstat-${version}";
|
||||
version = "1.18";
|
||||
version = "2.1";
|
||||
|
||||
src = fetchurl {
|
||||
sha256 = "1mc7qqvrnl0zyhgh8n7wx1g1cbwq74xpvbz8rfjmyi77p693a6fp";
|
||||
sha256 = "0yk0x6bg9f36dsslhayyyi8fg04yvzjzqkjmlrcsrv6nnggchb6i";
|
||||
url = "https://humdi.net/vnstat/${name}.tar.gz";
|
||||
};
|
||||
|
||||
buildInputs = [ gd ncurses ];
|
||||
buildInputs = [ gd ncurses sqlite ];
|
||||
|
||||
postPatch = ''
|
||||
substituteInPlace src/cfg.c --replace /usr/local $out
|
||||
|
@ -5,12 +5,12 @@ let
|
||||
in
|
||||
buildPythonApplication rec {
|
||||
pname = "todoman";
|
||||
version = "3.4.1";
|
||||
version = "3.5.0";
|
||||
name = "${pname}-${version}";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
sha256 = "1rvid1rklvgvsf6xmxd91j2fi46v4fzn5z6zbs5yn0wpb0k605r5";
|
||||
sha256 = "051qjdpwif06x7qspnb4pfwdhb8nnmz99yqcp4kla5hv0n3jh0w9";
|
||||
};
|
||||
|
||||
LOCALE_ARCHIVE = stdenv.lib.optionalString stdenv.isLinux
|
||||
@ -29,13 +29,6 @@ buildPythonApplication rec {
|
||||
makeWrapperArgs = [ "--set LOCALE_ARCHIVE ${glibcLocales}/lib/locale/locale-archive"
|
||||
"--set CHARSET en_us.UTF-8" ];
|
||||
|
||||
patches = [
|
||||
(fetchpatch {
|
||||
url = "https://github.com/pimutils/todoman/commit/3e191111b72df9ec91a773befefa291799374422.patch";
|
||||
sha256 = "12mskbp0d8p2lllkxm3m9wyy2hsbnz2qs297civsc3ly2l5bcrag";
|
||||
})
|
||||
];
|
||||
|
||||
preCheck = ''
|
||||
# Remove one failing test that only checks whether the command line works
|
||||
rm tests/test_main.py
|
||||
|
@ -7,8 +7,8 @@ let
|
||||
in
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
srcVersion = "nov18a";
|
||||
version = "20181101_a";
|
||||
srcVersion = "dec18a";
|
||||
version = "20181201_a";
|
||||
name = "gildas-${version}";
|
||||
|
||||
src = fetchurl {
|
||||
@ -16,7 +16,7 @@ stdenv.mkDerivation rec {
|
||||
# source code of the previous release to a different directory
|
||||
urls = [ "http://www.iram.fr/~gildas/dist/gildas-src-${srcVersion}.tar.gz"
|
||||
"http://www.iram.fr/~gildas/dist/archive/gildas/gildas-src-${srcVersion}.tar.gz" ];
|
||||
sha256 = "1dl2v8y6vrwaxm3b7nf6dv3ipzybhlhy2kxwnwgc7gqz5704251v";
|
||||
sha256 = "f295b5b7f999c0d746a52b307af7b7bdbed0d9b3d87100a6a102e0cc64f3a9bd";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
@ -3,7 +3,7 @@
|
||||
# Contact: stdenv.lib.maintainers.fuuzetsu
|
||||
|
||||
{ stdenv, Agda, glibcLocales
|
||||
, writeScriptBin
|
||||
, writeShellScriptBin
|
||||
, extension ? (self: super: {})
|
||||
}:
|
||||
|
||||
@ -77,8 +77,8 @@ let
|
||||
buildInputs = let
|
||||
# Makes a wrapper available to the user. Very useful in
|
||||
# nix-shell where all dependencies are -i'd.
|
||||
agdaWrapper = writeScriptBin "agda" ''
|
||||
${self.agdaWithArgs} "$@"
|
||||
agdaWrapper = writeShellScriptBin "agda" ''
|
||||
exec ${self.agdaWithArgs} "$@"
|
||||
'';
|
||||
in [agdaWrapper] ++ self.buildDepends;
|
||||
};
|
||||
|
@ -2,17 +2,22 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "elementary-icon-theme-${version}";
|
||||
version = "5.0";
|
||||
version = "5.0.1";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "elementary";
|
||||
repo = "icons";
|
||||
rev = version;
|
||||
sha256 = "146s26q4bb5sag35iv42hrnbdciam2ajl7s5s5jayli5vp8bw08w";
|
||||
sha256 = "1rw924b3ixfdff368dpv4vgsykwncmrvj9a6yfss0cf236xnvr9b";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ meson ninja python3 gtk3 ];
|
||||
|
||||
# Disable installing gimp and inkscape palette files
|
||||
mesonFlags = [
|
||||
"-Dpalettes=false"
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
chmod +x meson/symlink.py
|
||||
patchShebangs .
|
||||
|
232
pkgs/development/compilers/ghc/8.6.3.nix
Normal file
232
pkgs/development/compilers/ghc/8.6.3.nix
Normal file
@ -0,0 +1,232 @@
|
||||
{ stdenv, targetPackages
|
||||
|
||||
# build-tools
|
||||
, bootPkgs
|
||||
, autoconf, automake, coreutils, fetchurl, fetchpatch, perl, python3, m4, sphinx
|
||||
|
||||
, libiconv ? null, ncurses
|
||||
|
||||
, useLLVM ? !stdenv.targetPlatform.isx86 || (stdenv.targetPlatform.isMusl && stdenv.hostPlatform != stdenv.targetPlatform)
|
||||
, # LLVM is conceptually a run-time-only depedendency, but for
|
||||
# non-x86, we need LLVM to bootstrap later stages, so it becomes a
|
||||
# build-time dependency too.
|
||||
buildLlvmPackages, llvmPackages
|
||||
|
||||
, # If enabled, GHC will be built with the GPL-free but slower integer-simple
|
||||
# library instead of the faster but GPLed integer-gmp library.
|
||||
enableIntegerSimple ? !(stdenv.lib.any (stdenv.lib.meta.platformMatch stdenv.hostPlatform) gmp.meta.platforms), gmp
|
||||
|
||||
, # If enabled, use -fPIC when compiling static libs.
|
||||
enableRelocatedStaticLibs ? stdenv.targetPlatform != stdenv.hostPlatform
|
||||
|
||||
, # Whether to build dynamic libs for the standard library (on the target
|
||||
# platform). Static libs are always built.
|
||||
enableShared ? !stdenv.targetPlatform.isWindows && !stdenv.targetPlatform.useiOSPrebuilt
|
||||
|
||||
, # Whetherto build terminfo.
|
||||
enableTerminfo ? !stdenv.targetPlatform.isWindows
|
||||
|
||||
, # What flavour to build. An empty string indicates no
|
||||
# specific flavour and falls back to ghc default values.
|
||||
ghcFlavour ? stdenv.lib.optionalString (stdenv.targetPlatform != stdenv.hostPlatform) "perf-cross"
|
||||
}:
|
||||
|
||||
assert !enableIntegerSimple -> gmp != null;
|
||||
|
||||
let
|
||||
inherit (stdenv) buildPlatform hostPlatform targetPlatform;
|
||||
|
||||
inherit (bootPkgs) ghc;
|
||||
|
||||
# TODO(@Ericson2314) Make unconditional
|
||||
targetPrefix = stdenv.lib.optionalString
|
||||
(targetPlatform != hostPlatform)
|
||||
"${targetPlatform.config}-";
|
||||
|
||||
buildMK = ''
|
||||
BuildFlavour = ${ghcFlavour}
|
||||
ifneq \"\$(BuildFlavour)\" \"\"
|
||||
include mk/flavours/\$(BuildFlavour).mk
|
||||
endif
|
||||
DYNAMIC_GHC_PROGRAMS = ${if enableShared then "YES" else "NO"}
|
||||
INTEGER_LIBRARY = ${if enableIntegerSimple then "integer-simple" else "integer-gmp"}
|
||||
'' + stdenv.lib.optionalString (targetPlatform != hostPlatform) ''
|
||||
Stage1Only = ${if targetPlatform.system == hostPlatform.system then "NO" else "YES"}
|
||||
CrossCompilePrefix = ${targetPrefix}
|
||||
HADDOCK_DOCS = NO
|
||||
BUILD_SPHINX_HTML = NO
|
||||
BUILD_SPHINX_PDF = NO
|
||||
'' + stdenv.lib.optionalString enableRelocatedStaticLibs ''
|
||||
GhcLibHcOpts += -fPIC
|
||||
GhcRtsHcOpts += -fPIC
|
||||
'' + stdenv.lib.optionalString targetPlatform.useAndroidPrebuilt ''
|
||||
EXTRA_CC_OPTS += -std=gnu99
|
||||
'';
|
||||
|
||||
# Splicer will pull out correct variations
|
||||
libDeps = platform: stdenv.lib.optional enableTerminfo [ ncurses ]
|
||||
++ stdenv.lib.optional (!enableIntegerSimple) gmp
|
||||
++ stdenv.lib.optional (platform.libc != "glibc" && !targetPlatform.isWindows) libiconv;
|
||||
|
||||
toolsForTarget =
|
||||
if hostPlatform == buildPlatform then
|
||||
[ targetPackages.stdenv.cc ] ++ stdenv.lib.optional useLLVM llvmPackages.llvm
|
||||
else assert targetPlatform == hostPlatform; # build != host == target
|
||||
[ stdenv.cc ] ++ stdenv.lib.optional useLLVM buildLlvmPackages.llvm;
|
||||
|
||||
targetCC = builtins.head toolsForTarget;
|
||||
|
||||
in
|
||||
stdenv.mkDerivation (rec {
|
||||
version = "8.6.3";
|
||||
name = "${targetPrefix}ghc-${version}";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://downloads.haskell.org/~ghc/${version}/ghc-${version}-src.tar.xz";
|
||||
sha256 = "08vzq0dpg4a39bs61j6rq4z0n7jby5mc69h4m25xhd8rjyvkg7lz";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
outputs = [ "out" "doc" ];
|
||||
|
||||
patches = [(fetchpatch rec { # https://phabricator.haskell.org/D5123
|
||||
url = "http://tarballs.nixos.org/sha256/${sha256}";
|
||||
name = "D5123.diff";
|
||||
sha256 = "0nhqwdamf2y4gbwqxcgjxs0kqx23w9gv5kj0zv6450dq19rji82n";
|
||||
})];
|
||||
|
||||
postPatch = "patchShebangs .";
|
||||
|
||||
# GHC is a bit confused on its cross terminology.
|
||||
preConfigure = ''
|
||||
for env in $(env | grep '^TARGET_' | sed -E 's|\+?=.*||'); do
|
||||
export "''${env#TARGET_}=''${!env}"
|
||||
done
|
||||
# GHC is a bit confused on its cross terminology, as these would normally be
|
||||
# the *host* tools.
|
||||
export CC="${targetCC}/bin/${targetCC.targetPrefix}cc"
|
||||
export CXX="${targetCC}/bin/${targetCC.targetPrefix}cxx"
|
||||
# Use gold to work around https://sourceware.org/bugzilla/show_bug.cgi?id=16177
|
||||
export LD="${targetCC.bintools}/bin/${targetCC.bintools.targetPrefix}ld${stdenv.lib.optionalString targetPlatform.isAarch32 ".gold"}"
|
||||
export AS="${targetCC.bintools.bintools}/bin/${targetCC.bintools.targetPrefix}as"
|
||||
export AR="${targetCC.bintools.bintools}/bin/${targetCC.bintools.targetPrefix}ar"
|
||||
export NM="${targetCC.bintools.bintools}/bin/${targetCC.bintools.targetPrefix}nm"
|
||||
export RANLIB="${targetCC.bintools.bintools}/bin/${targetCC.bintools.targetPrefix}ranlib"
|
||||
export READELF="${targetCC.bintools.bintools}/bin/${targetCC.bintools.targetPrefix}readelf"
|
||||
export STRIP="${targetCC.bintools.bintools}/bin/${targetCC.bintools.targetPrefix}strip"
|
||||
|
||||
echo -n "${buildMK}" > mk/build.mk
|
||||
sed -i -e 's|-isysroot /Developer/SDKs/MacOSX10.5.sdk||' configure
|
||||
'' + stdenv.lib.optionalString (!stdenv.isDarwin) ''
|
||||
export NIX_LDFLAGS+=" -rpath $out/lib/ghc-${version}"
|
||||
'' + stdenv.lib.optionalString stdenv.isDarwin ''
|
||||
export NIX_LDFLAGS+=" -no_dtrace_dof"
|
||||
'' + stdenv.lib.optionalString targetPlatform.useAndroidPrebuilt ''
|
||||
sed -i -e '5i ,("armv7a-unknown-linux-androideabi", ("e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64", "cortex-a8", ""))' llvm-targets
|
||||
'' + stdenv.lib.optionalString targetPlatform.isMusl ''
|
||||
echo "patching llvm-targets for musl targets..."
|
||||
echo "Cloning these existing '*-linux-gnu*' targets:"
|
||||
grep linux-gnu llvm-targets | sed 's/^/ /'
|
||||
echo "(go go gadget sed)"
|
||||
sed -i 's,\(^.*linux-\)gnu\(.*\)$,\0\n\1musl\2,' llvm-targets
|
||||
echo "llvm-targets now contains these '*-linux-musl*' targets:"
|
||||
grep linux-musl llvm-targets | sed 's/^/ /'
|
||||
|
||||
echo "And now patching to preserve '-musleabi' as done with '-gnueabi'"
|
||||
# (aclocal.m4 is actual source, but patch configure as well since we don't re-gen)
|
||||
for x in configure aclocal.m4; do
|
||||
substituteInPlace $x \
|
||||
--replace '*-android*|*-gnueabi*)' \
|
||||
'*-android*|*-gnueabi*|*-musleabi*)'
|
||||
done
|
||||
'';
|
||||
|
||||
# TODO(@Ericson2314): Always pass "--target" and always prefix.
|
||||
configurePlatforms = [ "build" "host" ]
|
||||
++ stdenv.lib.optional (targetPlatform != hostPlatform) "target";
|
||||
# `--with` flags for libraries needed for RTS linker
|
||||
configureFlags = [
|
||||
"--datadir=$doc/share/doc/ghc"
|
||||
"--with-curses-includes=${ncurses.dev}/include" "--with-curses-libraries=${ncurses.out}/lib"
|
||||
] ++ stdenv.lib.optional (targetPlatform == hostPlatform && !enableIntegerSimple) [
|
||||
"--with-gmp-includes=${targetPackages.gmp.dev}/include" "--with-gmp-libraries=${targetPackages.gmp.out}/lib"
|
||||
] ++ stdenv.lib.optional (targetPlatform == hostPlatform && hostPlatform.libc != "glibc" && !targetPlatform.isWindows) [
|
||||
"--with-iconv-includes=${libiconv}/include" "--with-iconv-libraries=${libiconv}/lib"
|
||||
] ++ stdenv.lib.optionals (targetPlatform != hostPlatform) [
|
||||
"--enable-bootstrap-with-devel-snapshot"
|
||||
] ++ stdenv.lib.optionals (targetPlatform.isAarch32) [
|
||||
"CFLAGS=-fuse-ld=gold"
|
||||
"CONF_GCC_LINKER_OPTS_STAGE1=-fuse-ld=gold"
|
||||
"CONF_GCC_LINKER_OPTS_STAGE2=-fuse-ld=gold"
|
||||
] ++ stdenv.lib.optionals (targetPlatform.isDarwin && targetPlatform.isAarch64) [
|
||||
# fix for iOS: https://www.reddit.com/r/haskell/comments/4ttdz1/building_an_osxi386_to_iosarm64_cross_compiler/d5qvd67/
|
||||
"--disable-large-address-space"
|
||||
];
|
||||
|
||||
# Make sure we never relax`$PATH` and hooks support for compatability.
|
||||
strictDeps = true;
|
||||
|
||||
nativeBuildInputs = [
|
||||
perl autoconf automake m4 python3 sphinx
|
||||
ghc bootPkgs.alex bootPkgs.happy bootPkgs.hscolour
|
||||
];
|
||||
|
||||
# For building runtime libs
|
||||
depsBuildTarget = toolsForTarget;
|
||||
|
||||
buildInputs = [ perl ] ++ (libDeps hostPlatform);
|
||||
|
||||
propagatedBuildInputs = [ targetPackages.stdenv.cc ]
|
||||
++ stdenv.lib.optional useLLVM llvmPackages.llvm;
|
||||
|
||||
depsTargetTarget = map stdenv.lib.getDev (libDeps targetPlatform);
|
||||
depsTargetTargetPropagated = map (stdenv.lib.getOutput "out") (libDeps targetPlatform);
|
||||
|
||||
# required, because otherwise all symbols from HSffi.o are stripped, and
|
||||
# that in turn causes GHCi to abort
|
||||
stripDebugFlags = [ "-S" ] ++ stdenv.lib.optional (!targetPlatform.isDarwin) "--keep-file-symbols";
|
||||
|
||||
checkTarget = "test";
|
||||
|
||||
hardeningDisable = [ "format" ] ++ stdenv.lib.optional stdenv.targetPlatform.isMusl "pie";
|
||||
|
||||
postInstall = ''
|
||||
for bin in "$out"/lib/${name}/bin/*; do
|
||||
isELF "$bin" || continue
|
||||
paxmark m "$bin"
|
||||
done
|
||||
|
||||
# Install the bash completion file.
|
||||
install -D -m 444 utils/completion/ghc.bash $out/share/bash-completion/completions/${targetPrefix}ghc
|
||||
|
||||
# Patch scripts to include "readelf" and "cat" in $PATH.
|
||||
for i in "$out/bin/"*; do
|
||||
test ! -h $i || continue
|
||||
egrep --quiet '^#!' <(head -n 1 $i) || continue
|
||||
sed -i -e '2i export PATH="$PATH:${stdenv.lib.makeBinPath [ targetPackages.stdenv.cc.bintools coreutils ]}"' $i
|
||||
done
|
||||
'';
|
||||
|
||||
passthru = {
|
||||
inherit bootPkgs targetPrefix;
|
||||
|
||||
inherit llvmPackages;
|
||||
inherit enableShared;
|
||||
|
||||
# Our Cabal compiler name
|
||||
haskellCompilerName = "ghc-8.6.3";
|
||||
};
|
||||
|
||||
meta = {
|
||||
homepage = http://haskell.org/ghc;
|
||||
description = "The Glasgow Haskell Compiler";
|
||||
maintainers = with stdenv.lib.maintainers; [ marcweber andres peti ];
|
||||
inherit (ghc.meta) license platforms;
|
||||
};
|
||||
|
||||
} // stdenv.lib.optionalAttrs targetPlatform.useAndroidPrebuilt {
|
||||
dontStrip = true;
|
||||
dontPatchELF = true;
|
||||
noAuditTmpdir = true;
|
||||
})
|
@ -12,6 +12,8 @@
|
||||
, namePrefix
|
||||
, bootstrapped-pip
|
||||
, flit
|
||||
, writeScript
|
||||
, update-python-libraries
|
||||
}:
|
||||
|
||||
let
|
||||
@ -20,7 +22,8 @@ let
|
||||
wheel-specific = import ./build-python-package-wheel.nix { };
|
||||
common = import ./build-python-package-common.nix { inherit python bootstrapped-pip; };
|
||||
mkPythonDerivation = import ./mk-python-derivation.nix {
|
||||
inherit lib config python wrapPython setuptools unzip ensureNewerSourcesForZipFilesHook toPythonModule namePrefix;
|
||||
inherit lib config python wrapPython setuptools unzip ensureNewerSourcesForZipFilesHook;
|
||||
inherit toPythonModule namePrefix writeScript update-python-libraries;
|
||||
};
|
||||
in
|
||||
|
||||
|
@ -10,6 +10,8 @@
|
||||
# Whether the derivation provides a Python module or not.
|
||||
, toPythonModule
|
||||
, namePrefix
|
||||
, writeScript
|
||||
, update-python-libraries
|
||||
}:
|
||||
|
||||
{ name ? "${attrs.pname}-${attrs.version}"
|
||||
@ -64,7 +66,7 @@ if disabled
|
||||
then throw "${name} not supported for interpreter ${python.executable}"
|
||||
else
|
||||
|
||||
toPythonModule (python.stdenv.mkDerivation (builtins.removeAttrs attrs [
|
||||
let self = toPythonModule (python.stdenv.mkDerivation (builtins.removeAttrs attrs [
|
||||
"disabled" "checkInputs" "doCheck" "doInstallCheck" "dontWrapPythonPrograms" "catchConflicts"
|
||||
] // {
|
||||
|
||||
@ -106,4 +108,14 @@ toPythonModule (python.stdenv.mkDerivation (builtins.removeAttrs attrs [
|
||||
platforms = python.meta.platforms;
|
||||
isBuildPythonPackage = python.meta.platforms;
|
||||
} // meta;
|
||||
}))
|
||||
}));
|
||||
|
||||
passthru = {
|
||||
updateScript = let
|
||||
filename = builtins.head (lib.splitString ":" self.meta.position);
|
||||
in writeScript "update-python" ''
|
||||
#!${python.stdenv.shell}
|
||||
${update-python-libraries} ${filename}
|
||||
'';
|
||||
};
|
||||
in lib.extendDerivation true passthru self
|
||||
|
@ -0,0 +1,12 @@
|
||||
{ python3, runCommand, git }:
|
||||
|
||||
runCommand "update-python-libraries" {
|
||||
buildInputs = [
|
||||
(python3.withPackages(ps: with ps; [ packaging requests toolz ]))
|
||||
git
|
||||
];
|
||||
} ''
|
||||
cp ${./update-python-libraries.py} $out
|
||||
patchShebangs $out
|
||||
substituteInPlace $out --replace 'GIT = "git"' 'GIT = "${git}/bin/git"'
|
||||
''
|
@ -0,0 +1,362 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Update a Python package expression by passing in the `.nix` file, or the directory containing it.
|
||||
You can pass in multiple files or paths.
|
||||
|
||||
You'll likely want to use
|
||||
``
|
||||
$ ./update-python-libraries ../../pkgs/development/python-modules/*
|
||||
``
|
||||
to update all libraries in that folder.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
import toolz
|
||||
from concurrent.futures import ThreadPoolExecutor as Pool
|
||||
from packaging.version import Version as _Version
|
||||
from packaging.version import InvalidVersion
|
||||
from packaging.specifiers import SpecifierSet
|
||||
import collections
|
||||
import subprocess
|
||||
|
||||
INDEX = "https://pypi.io/pypi"
|
||||
"""url of PyPI"""
|
||||
|
||||
EXTENSIONS = ['tar.gz', 'tar.bz2', 'tar', 'zip', '.whl']
|
||||
"""Permitted file extensions. These are evaluated from left to right and the first occurance is returned."""
|
||||
|
||||
PRERELEASES = False
|
||||
|
||||
GIT = "git"
|
||||
|
||||
import logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
|
||||
class Version(_Version, collections.abc.Sequence):
|
||||
|
||||
def __init__(self, version):
|
||||
super().__init__(version)
|
||||
# We cannot use `str(Version(0.04.21))` because that becomes `0.4.21`
|
||||
# https://github.com/avian2/unidecode/issues/13#issuecomment-354538882
|
||||
self.raw_version = version
|
||||
|
||||
def __getitem__(self, i):
|
||||
return self._version.release[i]
|
||||
|
||||
def __len__(self):
|
||||
return len(self._version.release)
|
||||
|
||||
def __iter__(self):
|
||||
yield from self._version.release
|
||||
|
||||
|
||||
def _get_values(attribute, text):
|
||||
"""Match attribute in text and return all matches.
|
||||
|
||||
:returns: List of matches.
|
||||
"""
|
||||
regex = '{}\s+=\s+"(.*)";'.format(attribute)
|
||||
regex = re.compile(regex)
|
||||
values = regex.findall(text)
|
||||
return values
|
||||
|
||||
def _get_unique_value(attribute, text):
|
||||
"""Match attribute in text and return unique match.
|
||||
|
||||
:returns: Single match.
|
||||
"""
|
||||
values = _get_values(attribute, text)
|
||||
n = len(values)
|
||||
if n > 1:
|
||||
raise ValueError("found too many values for {}".format(attribute))
|
||||
elif n == 1:
|
||||
return values[0]
|
||||
else:
|
||||
raise ValueError("no value found for {}".format(attribute))
|
||||
|
||||
def _get_line_and_value(attribute, text):
|
||||
"""Match attribute in text. Return the line and the value of the attribute."""
|
||||
regex = '({}\s+=\s+"(.*)";)'.format(attribute)
|
||||
regex = re.compile(regex)
|
||||
value = regex.findall(text)
|
||||
n = len(value)
|
||||
if n > 1:
|
||||
raise ValueError("found too many values for {}".format(attribute))
|
||||
elif n == 1:
|
||||
return value[0]
|
||||
else:
|
||||
raise ValueError("no value found for {}".format(attribute))
|
||||
|
||||
|
||||
def _replace_value(attribute, value, text):
|
||||
"""Search and replace value of attribute in text."""
|
||||
old_line, old_value = _get_line_and_value(attribute, text)
|
||||
new_line = old_line.replace(old_value, value)
|
||||
new_text = text.replace(old_line, new_line)
|
||||
return new_text
|
||||
|
||||
def _fetch_page(url):
|
||||
r = requests.get(url)
|
||||
if r.status_code == requests.codes.ok:
|
||||
return r.json()
|
||||
else:
|
||||
raise ValueError("request for {} failed".format(url))
|
||||
|
||||
|
||||
SEMVER = {
|
||||
'major' : 0,
|
||||
'minor' : 1,
|
||||
'patch' : 2,
|
||||
}
|
||||
|
||||
|
||||
def _determine_latest_version(current_version, target, versions):
|
||||
"""Determine latest version, given `target`.
|
||||
"""
|
||||
current_version = Version(current_version)
|
||||
|
||||
def _parse_versions(versions):
|
||||
for v in versions:
|
||||
try:
|
||||
yield Version(v)
|
||||
except InvalidVersion:
|
||||
pass
|
||||
|
||||
versions = _parse_versions(versions)
|
||||
|
||||
index = SEMVER[target]
|
||||
|
||||
ceiling = list(current_version[0:index])
|
||||
if len(ceiling) == 0:
|
||||
ceiling = None
|
||||
else:
|
||||
ceiling[-1]+=1
|
||||
ceiling = Version(".".join(map(str, ceiling)))
|
||||
|
||||
# We do not want prereleases
|
||||
versions = SpecifierSet(prereleases=PRERELEASES).filter(versions)
|
||||
|
||||
if ceiling is not None:
|
||||
versions = SpecifierSet(f"<{ceiling}").filter(versions)
|
||||
|
||||
return (max(sorted(versions))).raw_version
|
||||
|
||||
|
||||
def _get_latest_version_pypi(package, extension, current_version, target):
|
||||
"""Get latest version and hash from PyPI."""
|
||||
url = "{}/{}/json".format(INDEX, package)
|
||||
json = _fetch_page(url)
|
||||
|
||||
versions = json['releases'].keys()
|
||||
version = _determine_latest_version(current_version, target, versions)
|
||||
|
||||
try:
|
||||
releases = json['releases'][version]
|
||||
except KeyError as e:
|
||||
raise KeyError('Could not find version {} for {}'.format(version, package)) from e
|
||||
for release in releases:
|
||||
if release['filename'].endswith(extension):
|
||||
# TODO: In case of wheel we need to do further checks!
|
||||
sha256 = release['digests']['sha256']
|
||||
break
|
||||
else:
|
||||
sha256 = None
|
||||
return version, sha256
|
||||
|
||||
|
||||
def _get_latest_version_github(package, extension, current_version, target):
|
||||
raise ValueError("updating from GitHub is not yet supported.")
|
||||
|
||||
|
||||
FETCHERS = {
|
||||
'fetchFromGitHub' : _get_latest_version_github,
|
||||
'fetchPypi' : _get_latest_version_pypi,
|
||||
'fetchurl' : _get_latest_version_pypi,
|
||||
}
|
||||
|
||||
|
||||
DEFAULT_SETUPTOOLS_EXTENSION = 'tar.gz'
|
||||
|
||||
|
||||
FORMATS = {
|
||||
'setuptools' : DEFAULT_SETUPTOOLS_EXTENSION,
|
||||
'wheel' : 'whl'
|
||||
}
|
||||
|
||||
def _determine_fetcher(text):
|
||||
# Count occurences of fetchers.
|
||||
nfetchers = sum(text.count('src = {}'.format(fetcher)) for fetcher in FETCHERS.keys())
|
||||
if nfetchers == 0:
|
||||
raise ValueError("no fetcher.")
|
||||
elif nfetchers > 1:
|
||||
raise ValueError("multiple fetchers.")
|
||||
else:
|
||||
# Then we check which fetcher to use.
|
||||
for fetcher in FETCHERS.keys():
|
||||
if 'src = {}'.format(fetcher) in text:
|
||||
return fetcher
|
||||
|
||||
|
||||
def _determine_extension(text, fetcher):
|
||||
"""Determine what extension is used in the expression.
|
||||
|
||||
If we use:
|
||||
- fetchPypi, we check if format is specified.
|
||||
- fetchurl, we determine the extension from the url.
|
||||
- fetchFromGitHub we simply use `.tar.gz`.
|
||||
"""
|
||||
if fetcher == 'fetchPypi':
|
||||
try:
|
||||
src_format = _get_unique_value('format', text)
|
||||
except ValueError as e:
|
||||
src_format = None # format was not given
|
||||
|
||||
try:
|
||||
extension = _get_unique_value('extension', text)
|
||||
except ValueError as e:
|
||||
extension = None # extension was not given
|
||||
|
||||
if extension is None:
|
||||
if src_format is None:
|
||||
src_format = 'setuptools'
|
||||
elif src_format == 'flit':
|
||||
raise ValueError("Don't know how to update a Flit package.")
|
||||
extension = FORMATS[src_format]
|
||||
|
||||
elif fetcher == 'fetchurl':
|
||||
url = _get_unique_value('url', text)
|
||||
extension = os.path.splitext(url)[1]
|
||||
if 'pypi' not in url:
|
||||
raise ValueError('url does not point to PyPI.')
|
||||
|
||||
elif fetcher == 'fetchFromGitHub':
|
||||
raise ValueError('updating from GitHub is not yet implemented.')
|
||||
|
||||
return extension
|
||||
|
||||
|
||||
def _update_package(path, target):
|
||||
|
||||
# Read the expression
|
||||
with open(path, 'r') as f:
|
||||
text = f.read()
|
||||
|
||||
# Determine pname.
|
||||
pname = _get_unique_value('pname', text)
|
||||
|
||||
# Determine version.
|
||||
version = _get_unique_value('version', text)
|
||||
|
||||
# First we check how many fetchers are mentioned.
|
||||
fetcher = _determine_fetcher(text)
|
||||
|
||||
extension = _determine_extension(text, fetcher)
|
||||
|
||||
new_version, new_sha256 = FETCHERS[fetcher](pname, extension, version, target)
|
||||
|
||||
if new_version == version:
|
||||
logging.info("Path {}: no update available for {}.".format(path, pname))
|
||||
return False
|
||||
elif Version(new_version) <= Version(version):
|
||||
raise ValueError("downgrade for {}.".format(pname))
|
||||
if not new_sha256:
|
||||
raise ValueError("no file available for {}.".format(pname))
|
||||
|
||||
text = _replace_value('version', new_version, text)
|
||||
text = _replace_value('sha256', new_sha256, text)
|
||||
|
||||
with open(path, 'w') as f:
|
||||
f.write(text)
|
||||
|
||||
logging.info("Path {}: updated {} from {} to {}".format(path, pname, version, new_version))
|
||||
|
||||
result = {
|
||||
'path' : path,
|
||||
'target': target,
|
||||
'pname': pname,
|
||||
'old_version' : version,
|
||||
'new_version' : new_version,
|
||||
#'fetcher' : fetcher,
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _update(path, target):
|
||||
|
||||
# We need to read and modify a Nix expression.
|
||||
if os.path.isdir(path):
|
||||
path = os.path.join(path, 'default.nix')
|
||||
|
||||
# If a default.nix does not exist, we quit.
|
||||
if not os.path.isfile(path):
|
||||
logging.info("Path {}: does not exist.".format(path))
|
||||
return False
|
||||
|
||||
# If file is not a Nix expression, we quit.
|
||||
if not path.endswith(".nix"):
|
||||
logging.info("Path {}: does not end with `.nix`.".format(path))
|
||||
return False
|
||||
|
||||
try:
|
||||
return _update_package(path, target)
|
||||
except ValueError as e:
|
||||
logging.warning("Path {}: {}".format(path, e))
|
||||
return False
|
||||
|
||||
|
||||
def _commit(path, pname, old_version, new_version, **kwargs):
|
||||
"""Commit result.
|
||||
"""
|
||||
|
||||
msg = f'python: {pname}: {old_version} -> {new_version}'
|
||||
|
||||
try:
|
||||
subprocess.check_call([GIT, 'add', path])
|
||||
subprocess.check_call([GIT, 'commit', '-m', msg])
|
||||
except subprocess.CalledProcessError as e:
|
||||
subprocess.check_call([GIT, 'checkout', path])
|
||||
raise subprocess.CalledProcessError(f'Could not commit {path}') from e
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('package', type=str, nargs='+')
|
||||
parser.add_argument('--target', type=str, choices=SEMVER.keys(), default='major')
|
||||
parser.add_argument('--commit', action='store_true', help='Create a commit for each package update')
|
||||
|
||||
args = parser.parse_args()
|
||||
target = args.target
|
||||
|
||||
packages = list(map(os.path.abspath, args.package))
|
||||
|
||||
logging.info("Updating packages...")
|
||||
|
||||
# Use threads to update packages concurrently
|
||||
with Pool() as p:
|
||||
results = list(p.map(lambda pkg: _update(pkg, target), packages))
|
||||
|
||||
logging.info("Finished updating packages.")
|
||||
|
||||
# Commits are created sequentially.
|
||||
if args.commit:
|
||||
logging.info("Committing updates...")
|
||||
list(map(lambda x: _commit(**x), filter(bool, results)))
|
||||
logging.info("Finished committing updates")
|
||||
|
||||
count = sum(map(bool, results))
|
||||
logging.info("{} package(s) updated".format(count))
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -6,13 +6,13 @@ buildRubyGem rec {
|
||||
|
||||
name = "${gemName}-${version}";
|
||||
gemName = "bundix";
|
||||
version = "2.4.0";
|
||||
version = "2.4.1";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "manveru";
|
||||
repo = "bundix";
|
||||
rev = version;
|
||||
sha256 = "1lq8nday6031mj7ivnk2wd47v2smz6frnb8xh2yhyhpld045v1rz";
|
||||
sha256 = "175qmv7dj7v50v71b78dzn5pb4a35ml6p15asks9q1rrlkz0n4gn";
|
||||
};
|
||||
|
||||
buildInputs = [ ruby bundler ];
|
||||
|
@ -2,14 +2,14 @@
|
||||
|
||||
buildGoPackage rec {
|
||||
name = "elfinfo-${version}";
|
||||
version = "0.7.4";
|
||||
version = "0.7.5";
|
||||
|
||||
goPackagePath = "github.com/xyproto/elfinfo";
|
||||
src = fetchFromGitHub {
|
||||
rev = version;
|
||||
owner = "xyproto";
|
||||
repo = "elfinfo";
|
||||
sha256 = "12n86psri9077v7s6b4j7djg5kijf9gybd80f9sfs0xmgkbly3gv";
|
||||
sha256 = "0b6zyfq0yhpbf03h52q2lgf6ff086gcsbnhm6chx18h0q1g17m96";
|
||||
};
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
|
@ -2,9 +2,9 @@
|
||||
|
||||
buildGoPackage rec {
|
||||
name = "skaffold-${version}";
|
||||
version = "0.18.0";
|
||||
# rev is the 0.18.0 commit, mainly for skaffold version command output
|
||||
rev = "34651689be78b2c6bcfbace5072b00b93661f895";
|
||||
version = "0.19.0";
|
||||
# rev is the 0.19.0 commit, mainly for skaffold version command output
|
||||
rev = "9eb0dfc1bf634b97462c66b4dfb80e4cea378ade";
|
||||
|
||||
goPackagePath = "github.com/GoogleContainerTools/skaffold";
|
||||
subPackages = ["cmd/skaffold"];
|
||||
@ -20,7 +20,7 @@ buildGoPackage rec {
|
||||
owner = "GoogleContainerTools";
|
||||
repo = "skaffold";
|
||||
rev = "v${version}";
|
||||
sha256 = "0an3g4jqch7a6ckh8yhia7lykpvb5lvz4kd5kqfmw9479kygv9sa";
|
||||
sha256 = "0s7dyfdmgslwnmbkzyqvf2622gj5d7vx9igwz3bf6dpaz382mk6h";
|
||||
};
|
||||
|
||||
meta = {
|
||||
|
@ -45,6 +45,7 @@ stdenv.mkDerivation ((lib.optionalAttrs (! isNull buildScript) {
|
||||
++ lib.optional xineramaSupport pkgs.xorg.libXinerama
|
||||
++ lib.optional udevSupport pkgs.udev
|
||||
++ lib.optional vulkanSupport pkgs.vulkan-loader
|
||||
++ lib.optional sdlSupport pkgs.SDL2
|
||||
++ lib.optionals gstreamerSupport (with pkgs.gst_all_1; [ gstreamer gst-plugins-base gst-plugins-good gst-plugins-bad gst-plugins-ugly gst-libav ])
|
||||
++ lib.optionals gtkSupport [ pkgs.gtk3 pkgs.glib ]
|
||||
++ lib.optionals openclSupport [ pkgs.opencl-headers pkgs.ocl-icd ]
|
||||
|
@ -42,6 +42,7 @@
|
||||
xineramaSupport ? false,
|
||||
xmlSupport ? false,
|
||||
vulkanSupport ? false,
|
||||
sdlSupport ? false,
|
||||
}:
|
||||
|
||||
let wine-build = build: release:
|
||||
@ -53,7 +54,7 @@ let wine-build = build: release:
|
||||
netapiSupport cursesSupport vaSupport pcapSupport v4lSupport saneSupport
|
||||
gsmSupport gphoto2Support ldapSupport fontconfigSupport alsaSupport
|
||||
pulseaudioSupport xineramaSupport gtkSupport openclSupport xmlSupport tlsSupport
|
||||
openglSupport gstreamerSupport udevSupport vulkanSupport;
|
||||
openglSupport gstreamerSupport udevSupport vulkanSupport sdlSupport;
|
||||
};
|
||||
});
|
||||
|
||||
|
@ -89,7 +89,7 @@ stdenv.mkDerivation rec {
|
||||
${lib.concatMapStringsSep "\n" (x: x.install or "") needed}
|
||||
'';
|
||||
|
||||
NIX_CFLAGS_LINK = [ "-lsystemd" ] ++ lib.concatMap (x: x.NIX_CFLAGS_LINK or []) needed;
|
||||
NIX_CFLAGS_LINK = lib.optional withSystemd "-lsystemd" ++ lib.concatMap (x: x.NIX_CFLAGS_LINK or []) needed;
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
homepage = https://uwsgi-docs.readthedocs.org/en/latest/;
|
||||
|
@ -1,16 +1,16 @@
|
||||
{ stdenv, fetchFromGitHub, buildGoPackage }:
|
||||
|
||||
buildGoPackage rec {
|
||||
version = "0.3.0";
|
||||
version = "0.5.1";
|
||||
name = "grobi-${version}";
|
||||
|
||||
goPackagePath = "github.com/fd0/grobi";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
rev = "78a0639ffad765933a5233a1c94d2626e24277b8";
|
||||
rev = "5ddc167b9e4f84755a515828360abda15c54b7de";
|
||||
owner = "fd0";
|
||||
repo = "grobi";
|
||||
sha256 = "16q7vnhb1p6ds561832sfdszvlafww67bjn3lc0d18v7lyak2l3i";
|
||||
sha256 = "0iyxidq60pf6ki52f8fffplf10nl8w9jx1b7igg98csnc6iqxh89";
|
||||
};
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
|
@ -7912,6 +7912,8 @@ in
|
||||
python37Packages = recurseIntoAttrs python37.pkgs;
|
||||
pypyPackages = pypy.pkgs;
|
||||
|
||||
update-python-libraries = callPackage ../development/interpreters/python/update-python-libraries { };
|
||||
|
||||
# Should eventually be moved inside Python interpreters.
|
||||
python-setup-hook = callPackage ../development/interpreters/python/setup-hook.nix { };
|
||||
|
||||
|
@ -68,6 +68,12 @@ in {
|
||||
buildLlvmPackages = buildPackages.llvmPackages_6;
|
||||
llvmPackages = pkgs.llvmPackages_6;
|
||||
};
|
||||
ghc863 = callPackage ../development/compilers/ghc/8.6.3.nix {
|
||||
bootPkgs = packages.ghc822;
|
||||
inherit (buildPackages.python3Packages) sphinx;
|
||||
buildLlvmPackages = buildPackages.llvmPackages_6;
|
||||
llvmPackages = pkgs.llvmPackages_6;
|
||||
};
|
||||
ghcHEAD = callPackage ../development/compilers/ghc/head.nix {
|
||||
bootPkgs = packages.ghc822Binary;
|
||||
inherit (buildPackages.python3Packages) sphinx;
|
||||
@ -130,6 +136,11 @@ in {
|
||||
ghc = bh.compiler.ghc862;
|
||||
compilerConfig = callPackage ../development/haskell-modules/configuration-ghc-8.6.x.nix { };
|
||||
};
|
||||
ghc863 = callPackage ../development/haskell-modules {
|
||||
buildHaskellPackages = bh.packages.ghc863;
|
||||
ghc = bh.compiler.ghc863;
|
||||
compilerConfig = callPackage ../development/haskell-modules/configuration-ghc-8.6.x.nix { };
|
||||
};
|
||||
ghcHEAD = callPackage ../development/haskell-modules {
|
||||
buildHaskellPackages = bh.packages.ghcHEAD;
|
||||
ghc = bh.compiler.ghcHEAD;
|
||||
|
@ -25,6 +25,7 @@ rec {
|
||||
udevSupport = true;
|
||||
xineramaSupport = true;
|
||||
xmlSupport = true;
|
||||
sdlSupport = true;
|
||||
};
|
||||
|
||||
full = base.override {
|
||||
|
Loading…
Reference in New Issue
Block a user