Merge master into haskell-updates

This commit is contained in:
github-actions[bot] 2023-08-25 00:11:18 +00:00 committed by GitHub
commit 11cf09d386
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
183 changed files with 6181 additions and 21145 deletions

View File

@ -3926,6 +3926,12 @@
githubId = 11422515;
name = "David Sánchez";
};
dawidd6 = {
email = "dawidd0811@gmail.com";
github = "dawidd6";
githubId = 9713907;
name = "Dawid Dziurla";
};
dawidsowa = {
email = "dawid_sowa@posteo.net";
github = "dawidsowa";
@ -6107,6 +6113,12 @@
githubId = 313929;
name = "Gabriel Ebner";
};
geluk = {
email = "johan+nix@geluk.io";
github = "geluk";
githubId = 1516985;
name = "Johan Geluk";
};
genericnerdyusername = {
name = "GenericNerdyUsername";
email = "genericnerdyusername@proton.me";
@ -10887,13 +10899,15 @@
name = "Matthias C. M. Troffaes";
};
McSinyx = {
email = "mcsinyx@disroot.org";
email = "cnx@loang.net";
github = "McSinyx";
githubId = 13689192;
matrix = "@cnx:loang.net";
name = "Nguyn Gia Phong";
keys = [{
fingerprint = "E90E 11B8 0493 343B 6132 E394 2714 8B2C 06A2 224B";
}];
keys = [
{ fingerprint = "E90E 11B8 0493 343B 6132 E394 2714 8B2C 06A2 224B"; }
{ fingerprint = "838A FE0D 55DC 074E 360F 943A 84B6 9CE6 F3F6 B767"; }
];
};
mcwitt = {
email = "mcwitt@gmail.com";

View File

@ -203,6 +203,8 @@ The module update takes care of the new config syntax and the data itself (user
- `programs.gnupg.agent.pinentryFlavor` is now set in `/etc/gnupg/gpg-agent.conf`, and will no longer take precedence over a `pinentry-program` set in `~/.gnupg/gpg-agent.conf`.
- `services.influxdb2` now supports doing an automatic initial setup and provisioning of users, organizations, buckets and authentication tokens, see [#249502](https://github.com/NixOS/nixpkgs/pull/249502) for more details.
- `wrapHelm` now exposes `passthru.pluginsDir` which can be passed to `helmfile`. For convenience, a top-level package `helmfile-wrapped` has been added, which inherits `passthru.pluginsDir` from `kubernetes-helm-wrapped`. See [#217768](https://github.com/NixOS/nixpkgs/issues/217768) for details.
- `boot.initrd.network.udhcp.enable` allows control over dhcp during stage 1 regardless of what `networking.useDHCP` is set to.

View File

@ -105,36 +105,25 @@ in
}
];
services.zram-generator.enable = true;
system.requiredKernelConfig = with config.lib.kernelConfig; [
(isModule "ZRAM")
];
# Disabling this for the moment, as it would create and mkswap devices twice,
# once in stage 2 boot, and again when the zram-reloader service starts.
# boot.kernelModules = [ "zram" ];
systemd.packages = [ pkgs.zram-generator ];
systemd.services."systemd-zram-setup@".path = [ pkgs.util-linux ]; # for mkswap
environment.etc."systemd/zram-generator.conf".source =
(pkgs.formats.ini { }).generate "zram-generator.conf" (lib.listToAttrs
(builtins.map
(dev: {
name = dev;
value =
let
size = "${toString cfg.memoryPercent} / 100 * ram";
in
{
zram-size = if cfg.memoryMax != null then "min(${size}, ${toString cfg.memoryMax} / 1024 / 1024)" else size;
compression-algorithm = cfg.algorithm;
swap-priority = cfg.priority;
} // lib.optionalAttrs (cfg.writebackDevice != null) {
writeback-device = cfg.writebackDevice;
};
})
devices));
services.zram-generator.settings = lib.listToAttrs
(builtins.map
(dev: {
name = dev;
value =
let
size = "${toString cfg.memoryPercent} / 100 * ram";
in
{
zram-size = if cfg.memoryMax != null then "min(${size}, ${toString cfg.memoryMax} / 1024 / 1024)" else size;
compression-algorithm = cfg.algorithm;
swap-priority = cfg.priority;
} // lib.optionalAttrs (cfg.writebackDevice != null) {
writeback-device = cfg.writebackDevice;
};
})
devices);
};

View File

@ -1172,6 +1172,7 @@
./services/system/self-deploy.nix
./services/system/systembus-notify.nix
./services/system/uptimed.nix
./services/system/zram-generator.nix
./services/torrent/deluge.nix
./services/torrent/flexget.nix
./services/torrent/magnetico.nix

View File

@ -159,6 +159,14 @@ in
type = types.bool;
};
enableLsColors = mkOption {
default = true;
description = lib.mdDoc ''
Enable extra colors in directory listings (used by `ls` and `tree`).
'';
type = types.bool;
};
};
};
@ -263,6 +271,11 @@ in
${cfg.interactiveShellInit}
${optionalString cfg.enableLsColors ''
# Extra colors for directory listings.
eval "$(${pkgs.coreutils}/bin/dircolors -b)"
''}
# Setup aliases.
${zshAliases}

View File

@ -5,8 +5,8 @@ let
parentWrapperDir = dirOf wrapperDir;
securityWrapper = sourceProg : pkgs.callPackage ./wrapper.nix {
inherit sourceProg;
securityWrapper = pkgs.callPackage ./wrapper.nix {
inherit parentWrapperDir;
};
fileModeType =
@ -91,7 +91,8 @@ let
, ...
}:
''
cp ${securityWrapper source}/bin/security-wrapper "$wrapperDir/${program}"
cp ${securityWrapper}/bin/security-wrapper "$wrapperDir/${program}"
echo -n "${source}" > "$wrapperDir/${program}.real"
# Prevent races
chmod 0000 "$wrapperDir/${program}"
@ -118,7 +119,8 @@ let
, ...
}:
''
cp ${securityWrapper source}/bin/security-wrapper "$wrapperDir/${program}"
cp ${securityWrapper}/bin/security-wrapper "$wrapperDir/${program}"
echo -n "${source}" > "$wrapperDir/${program}.real"
# Prevent races
chmod 0000 "$wrapperDir/${program}"

View File

@ -17,10 +17,6 @@
#include <syscall.h>
#include <byteswap.h>
#ifndef SOURCE_PROG
#error SOURCE_PROG should be defined via preprocessor commandline
#endif
// aborts when false, printing the failed expression
#define ASSERT(expr) ((expr) ? (void) 0 : assert_failure(#expr))
// aborts when returns non-zero, printing the failed expression and errno
@ -28,6 +24,10 @@
extern char **environ;
// The WRAPPER_DIR macro is supplied at compile time so that it cannot
// be changed at runtime
static char *wrapper_dir = WRAPPER_DIR;
// Wrapper debug variable name
static char *wrapper_debug = "WRAPPER_DEBUG";
@ -151,20 +151,115 @@ static int make_caps_ambient(const char *self_path) {
return 0;
}
int readlink_malloc(const char *p, char **ret) {
size_t l = FILENAME_MAX+1;
int r;
for (;;) {
char *c = calloc(l, sizeof(char));
if (!c) {
return -ENOMEM;
}
ssize_t n = readlink(p, c, l-1);
if (n < 0) {
r = -errno;
free(c);
return r;
}
if ((size_t) n < l-1) {
c[n] = 0;
*ret = c;
return 0;
}
free(c);
l *= 2;
}
}
int main(int argc, char **argv) {
ASSERT(argc >= 1);
char *self_path = NULL;
int self_path_size = readlink_malloc("/proc/self/exe", &self_path);
if (self_path_size < 0) {
fprintf(stderr, "cannot readlink /proc/self/exe: %s", strerror(-self_path_size));
}
unsigned int ruid, euid, suid, rgid, egid, sgid;
MUSTSUCCEED(getresuid(&ruid, &euid, &suid));
MUSTSUCCEED(getresgid(&rgid, &egid, &sgid));
// If true, then we did not benefit from setuid privilege escalation,
// where the original uid is still in ruid and different from euid == suid.
int didnt_suid = (ruid == euid) && (euid == suid);
// If true, then we did not benefit from setgid privilege escalation
int didnt_sgid = (rgid == egid) && (egid == sgid);
// Make sure that we are being executed from the right location,
// i.e., `safe_wrapper_dir'. This is to prevent someone from creating
// hard link `X' from some other location, along with a false
// `X.real' file, to allow arbitrary programs from being executed
// with elevated capabilities.
int len = strlen(wrapper_dir);
if (len > 0 && '/' == wrapper_dir[len - 1])
--len;
ASSERT(!strncmp(self_path, wrapper_dir, len));
ASSERT('/' == wrapper_dir[0]);
ASSERT('/' == self_path[len]);
// If we got privileges with the fs set[ug]id bit, check that the privilege we
// got matches the one one we expected, ie that our effective uid/gid
// matches the uid/gid of `self_path`. This ensures that we were executed as
// `self_path', and not, say, as some other setuid program.
// We don't check that if we did not benefit from the set[ug]id bit, as
// can be the case in nosuid mounts or user namespaces.
struct stat st;
ASSERT(lstat(self_path, &st) != -1);
// if the wrapper gained privilege with suid, check that we got the uid of the file owner
ASSERT(!((st.st_mode & S_ISUID) && !didnt_suid) || (st.st_uid == euid));
// if the wrapper gained privilege with sgid, check that we got the gid of the file group
ASSERT(!((st.st_mode & S_ISGID) && !didnt_sgid) || (st.st_gid == egid));
// same, but with suid instead of euid
ASSERT(!((st.st_mode & S_ISUID) && !didnt_suid) || (st.st_uid == suid));
ASSERT(!((st.st_mode & S_ISGID) && !didnt_sgid) || (st.st_gid == sgid));
// And, of course, we shouldn't be writable.
ASSERT(!(st.st_mode & (S_IWGRP | S_IWOTH)));
// Read the path of the real (wrapped) program from <self>.real.
char real_fn[PATH_MAX + 10];
int real_fn_size = snprintf(real_fn, sizeof(real_fn), "%s.real", self_path);
ASSERT(real_fn_size < sizeof(real_fn));
int fd_self = open(real_fn, O_RDONLY);
ASSERT(fd_self != -1);
char source_prog[PATH_MAX];
len = read(fd_self, source_prog, PATH_MAX);
ASSERT(len != -1);
ASSERT(len < sizeof(source_prog));
ASSERT(len > 0);
source_prog[len] = 0;
close(fd_self);
// Read the capabilities set on the wrapper and raise them in to
// the ambient set so the program we're wrapping receives the
// capabilities too!
if (make_caps_ambient("/proc/self/exe") != 0) {
if (make_caps_ambient(self_path) != 0) {
free(self_path);
return 1;
}
free(self_path);
execve(SOURCE_PROG, argv, environ);
execve(source_prog, argv, environ);
fprintf(stderr, "%s: cannot run `%s': %s\n",
argv[0], SOURCE_PROG, strerror(errno));
argv[0], source_prog, strerror(errno));
return 1;
}

View File

@ -1,4 +1,4 @@
{ stdenv, linuxHeaders, sourceProg, debug ? false }:
{ stdenv, linuxHeaders, parentWrapperDir, debug ? false }:
# For testing:
# $ nix-build -E 'with import <nixpkgs> {}; pkgs.callPackage ./wrapper.nix { parentWrapperDir = "/run/wrappers"; debug = true; }'
stdenv.mkDerivation {
@ -7,7 +7,7 @@ stdenv.mkDerivation {
dontUnpack = true;
hardeningEnable = [ "pie" ];
CFLAGS = [
''-DSOURCE_PROG="${sourceProg}"''
''-DWRAPPER_DIR="${parentWrapperDir}"''
] ++ (if debug then [
"-Werror" "-Og" "-g"
] else [

View File

@ -3,34 +3,291 @@
let
inherit
(lib)
any
attrNames
attrValues
count
escapeShellArg
filterAttrs
flatten
flip
getExe
hasAttr
hasInfix
listToAttrs
literalExpression
mapAttrsToList
mdDoc
mkEnableOption
mkIf
mkOption
nameValuePair
optional
subtractLists
types
unique
;
format = pkgs.formats.json { };
cfg = config.services.influxdb2;
configFile = format.generate "config.json" cfg.settings;
validPermissions = [
"authorizations"
"buckets"
"dashboards"
"orgs"
"tasks"
"telegrafs"
"users"
"variables"
"secrets"
"labels"
"views"
"documents"
"notificationRules"
"notificationEndpoints"
"checks"
"dbrp"
"annotations"
"sources"
"scrapers"
"notebooks"
"remotes"
"replications"
];
# Determines whether at least one active api token is defined
anyAuthDefined =
flip any (attrValues cfg.provision.organizations)
(o: o.present && flip any (attrValues o.auths)
(a: a.present && a.tokenFile != null));
provisionState = pkgs.writeText "provision_state.json" (builtins.toJSON {
inherit (cfg.provision) organizations users;
});
provisioningScript = pkgs.writeShellScript "post-start-provision" ''
set -euo pipefail
export INFLUX_HOST="http://"${escapeShellArg (
if ! hasAttr "http-bind-address" cfg.settings
|| hasInfix "0.0.0.0" cfg.settings.http-bind-address
then "localhost:8086"
else cfg.settings.http-bind-address
)}
# Wait for the influxdb server to come online
count=0
while ! influx ping &>/dev/null; do
if [ "$count" -eq 300 ]; then
echo "Tried for 30 seconds, giving up..."
exit 1
fi
if ! kill -0 "$MAINPID"; then
echo "Main server died, giving up..."
exit 1
fi
sleep 0.1
count=$((count++))
done
# Do the initial database setup. Pass /dev/null as configs-path to
# avoid saving the token as the active config.
if test -e "$STATE_DIRECTORY/.first_startup"; then
influx setup \
--configs-path /dev/null \
--org ${escapeShellArg cfg.provision.initialSetup.organization} \
--bucket ${escapeShellArg cfg.provision.initialSetup.bucket} \
--username ${escapeShellArg cfg.provision.initialSetup.username} \
--password "$(< "$CREDENTIALS_DIRECTORY/admin-password")" \
--token "$(< "$CREDENTIALS_DIRECTORY/admin-token")" \
--retention ${toString cfg.provision.initialSetup.retention}s \
--force >/dev/null
rm -f "$STATE_DIRECTORY/.first_startup"
fi
provision_result=$(${getExe pkgs.influxdb2-provision} ${provisionState} "$INFLUX_HOST" "$(< "$CREDENTIALS_DIRECTORY/admin-token")")
if [[ "$(jq '[.auths[] | select(.action == "created")] | length' <<< "$provision_result")" -gt 0 ]]; then
echo "Created at least one new token, queueing service restart so we can manipulate secrets"
touch "$STATE_DIRECTORY/.needs_restart"
fi
'';
restarterScript = pkgs.writeShellScript "post-start-restarter" ''
set -euo pipefail
if test -e "$STATE_DIRECTORY/.needs_restart"; then
rm -f "$STATE_DIRECTORY/.needs_restart"
/run/current-system/systemd/bin/systemctl restart influxdb2
fi
'';
organizationSubmodule = types.submodule (organizationSubmod: let
org = organizationSubmod.config._module.args.name;
in {
options = {
present = mkOption {
description = mdDoc "Whether to ensure that this organization is present or absent.";
type = types.bool;
default = true;
};
description = mkOption {
description = mdDoc "Optional description for the organization.";
default = null;
type = types.nullOr types.str;
};
buckets = mkOption {
description = mdDoc "Buckets to provision in this organization.";
default = {};
type = types.attrsOf (types.submodule (bucketSubmod: let
bucket = bucketSubmod.config._module.args.name;
in {
options = {
present = mkOption {
description = mdDoc "Whether to ensure that this bucket is present or absent.";
type = types.bool;
default = true;
};
description = mkOption {
description = mdDoc "Optional description for the bucket.";
default = null;
type = types.nullOr types.str;
};
retention = mkOption {
type = types.ints.unsigned;
default = 0;
description = mdDoc "The duration in seconds for which the bucket will retain data (0 is infinite).";
};
};
}));
};
auths = mkOption {
description = mdDoc "API tokens to provision for the user in this organization.";
default = {};
type = types.attrsOf (types.submodule (authSubmod: let
auth = authSubmod.config._module.args.name;
in {
options = {
id = mkOption {
description = mdDoc "A unique identifier for this authentication token. Since influx doesn't store names for tokens, this will be hashed and appended to the description to identify the token.";
readOnly = true;
default = builtins.substring 0 32 (builtins.hashString "sha256" "${org}:${auth}");
defaultText = "<a hash derived from org and name>";
type = types.str;
};
present = mkOption {
description = mdDoc "Whether to ensure that this user is present or absent.";
type = types.bool;
default = true;
};
description = mkOption {
description = ''
Optional description for the API token.
Note that the actual token will always be created with a descriptionregardless
of whether this is given or not. The name is always added plus a unique suffix
to later identify the token to track whether it has already been created.
'';
default = null;
type = types.nullOr types.str;
};
tokenFile = mkOption {
type = types.nullOr types.path;
default = null;
description = mdDoc "The token value. If not given, influx will automatically generate one.";
};
operator = mkOption {
description = mdDoc "Grants all permissions in all organizations.";
default = false;
type = types.bool;
};
allAccess = mkOption {
description = mdDoc "Grants all permissions in the associated organization.";
default = false;
type = types.bool;
};
readPermissions = mkOption {
description = mdDoc ''
The read permissions to include for this token. Access is usually granted only
for resources in the associated organization.
Available permissions are `authorizations`, `buckets`, `dashboards`,
`orgs`, `tasks`, `telegrafs`, `users`, `variables`, `secrets`, `labels`, `views`,
`documents`, `notificationRules`, `notificationEndpoints`, `checks`, `dbrp`,
`annotations`, `sources`, `scrapers`, `notebooks`, `remotes`, `replications`.
Refer to `influx auth create --help` for a full list with descriptions.
`buckets` grants read access to all associated buckets. Use `readBuckets` to define
more granular access permissions.
'';
default = [];
type = types.listOf (types.enum validPermissions);
};
writePermissions = mkOption {
description = mdDoc ''
The read permissions to include for this token. Access is usually granted only
for resources in the associated organization.
Available permissions are `authorizations`, `buckets`, `dashboards`,
`orgs`, `tasks`, `telegrafs`, `users`, `variables`, `secrets`, `labels`, `views`,
`documents`, `notificationRules`, `notificationEndpoints`, `checks`, `dbrp`,
`annotations`, `sources`, `scrapers`, `notebooks`, `remotes`, `replications`.
Refer to `influx auth create --help` for a full list with descriptions.
`buckets` grants write access to all associated buckets. Use `writeBuckets` to define
more granular access permissions.
'';
default = [];
type = types.listOf (types.enum validPermissions);
};
readBuckets = mkOption {
description = mdDoc "The organization's buckets which should be allowed to be read";
default = [];
type = types.listOf types.str;
};
writeBuckets = mkOption {
description = mdDoc "The organization's buckets which should be allowed to be written";
default = [];
type = types.listOf types.str;
};
};
}));
};
};
});
in
{
options = {
services.influxdb2 = {
enable = mkEnableOption (lib.mdDoc "the influxdb2 server");
enable = mkEnableOption (mdDoc "the influxdb2 server");
package = mkOption {
default = pkgs.influxdb2-server;
defaultText = literalExpression "pkgs.influxdb2";
description = lib.mdDoc "influxdb2 derivation to use.";
description = mdDoc "influxdb2 derivation to use.";
type = types.package;
};
settings = mkOption {
default = { };
description = lib.mdDoc ''configuration options for influxdb2, see <https://docs.influxdata.com/influxdb/v2.0/reference/config-options> for details.'';
description = mdDoc ''configuration options for influxdb2, see <https://docs.influxdata.com/influxdb/v2.0/reference/config-options> for details.'';
type = format.type;
};
@ -41,52 +298,135 @@ in
organization = mkOption {
type = types.str;
example = "main";
description = "Primary organization name";
description = mdDoc "Primary organization name";
};
bucket = mkOption {
type = types.str;
example = "example";
description = "Primary bucket name";
description = mdDoc "Primary bucket name";
};
username = mkOption {
type = types.str;
default = "admin";
description = "Primary username";
description = mdDoc "Primary username";
};
retention = mkOption {
type = types.str;
default = "0";
description = ''
The duration for which the bucket will retain data (0 is infinite).
Accepted units are `ns` (nanoseconds), `us` or `µs` (microseconds), `ms` (milliseconds),
`s` (seconds), `m` (minutes), `h` (hours), `d` (days) and `w` (weeks).
'';
type = types.ints.unsigned;
default = 0;
description = mdDoc "The duration in seconds for which the bucket will retain data (0 is infinite).";
};
passwordFile = mkOption {
type = types.path;
description = "Password for primary user. Don't use a file from the nix store!";
description = mdDoc "Password for primary user. Don't use a file from the nix store!";
};
tokenFile = mkOption {
type = types.path;
description = "API Token to set for the admin user. Don't use a file from the nix store!";
description = mdDoc "API Token to set for the admin user. Don't use a file from the nix store!";
};
};
organizations = mkOption {
description = mdDoc "Organizations to provision.";
example = literalExpression ''
{
myorg = {
description = "My organization";
buckets.mybucket = {
description = "My bucket";
retention = 31536000; # 1 year
};
auths.mytoken = {
readBuckets = ["mybucket"];
tokenFile = "/run/secrets/mytoken";
};
};
}
'';
default = {};
type = types.attrsOf organizationSubmodule;
};
users = mkOption {
description = mdDoc "Users to provision.";
default = {};
example = literalExpression ''
{
# admin = {}; /* The initialSetup.username will automatically be added. */
myuser.passwordFile = "/run/secrets/myuser_password";
}
'';
type = types.attrsOf (types.submodule (userSubmod: let
user = userSubmod.config._module.args.name;
org = userSubmod.config.org;
in {
options = {
present = mkOption {
description = mdDoc "Whether to ensure that this user is present or absent.";
type = types.bool;
default = true;
};
passwordFile = mkOption {
description = mdDoc "Password for the user. If unset, the user will not be able to log in until a password is set by an operator! Don't use a file from the nix store!";
default = null;
type = types.nullOr types.path;
};
};
}));
};
};
};
};
config = mkIf cfg.enable {
assertions = [
{
assertion = !(hasAttr "bolt-path" cfg.settings) && !(hasAttr "engine-path" cfg.settings);
message = "services.influxdb2.config: bolt-path and engine-path should not be set as they are managed by systemd";
}
];
assertions =
[
{
assertion = !(hasAttr "bolt-path" cfg.settings) && !(hasAttr "engine-path" cfg.settings);
message = "services.influxdb2.config: bolt-path and engine-path should not be set as they are managed by systemd";
}
]
++ flatten (flip mapAttrsToList cfg.provision.organizations (orgName: org:
flip mapAttrsToList org.auths (authName: auth:
[
{
assertion = 1 == count (x: x) [
auth.operator
auth.allAccess
(auth.readPermissions != []
|| auth.writePermissions != []
|| auth.readBuckets != []
|| auth.writeBuckets != [])
];
message = "influxdb2: provision.organizations.${orgName}.auths.${authName}: The `operator` and `allAccess` options are mutually exclusive with each other and the granular permission settings.";
}
(let unknownBuckets = subtractLists (attrNames org.buckets) auth.readBuckets; in {
assertion = unknownBuckets == [];
message = "influxdb2: provision.organizations.${orgName}.auths.${authName}: Refers to invalid buckets in readBuckets: ${toString unknownBuckets}";
})
(let unknownBuckets = subtractLists (attrNames org.buckets) auth.writeBuckets; in {
assertion = unknownBuckets == [];
message = "influxdb2: provision.organizations.${orgName}.auths.${authName}: Refers to invalid buckets in writeBuckets: ${toString unknownBuckets}";
})
]
)
));
services.influxdb2.provision = mkIf cfg.provision.enable {
organizations.${cfg.provision.initialSetup.organization} = {
buckets.${cfg.provision.initialSetup.bucket} = {
inherit (cfg.provision.initialSetup) retention;
};
};
users.${cfg.provision.initialSetup.username} = {
inherit (cfg.provision.initialSetup) passwordFile;
};
};
systemd.services.influxdb2 = {
description = "InfluxDB is an open-source, distributed, time series database";
@ -111,58 +451,38 @@ in
"admin-password:${cfg.provision.initialSetup.passwordFile}"
"admin-token:${cfg.provision.initialSetup.tokenFile}"
];
ExecStartPost = mkIf cfg.provision.enable (
[provisioningScript] ++
# Only the restarter runs with elevated privileges
optional anyAuthDefined "+${restarterScript}"
);
};
path = [pkgs.influxdb2-cli];
path = [
pkgs.influxdb2-cli
pkgs.jq
];
# Mark if this is the first startup so postStart can do the initial setup
preStart = mkIf cfg.provision.enable ''
# Mark if this is the first startup so postStart can do the initial setup.
# Also extract any token secret mappings and apply them if this isn't the first start.
preStart = let
tokenPaths = listToAttrs (flatten
# For all organizations
(flip mapAttrsToList cfg.provision.organizations
# For each contained token that has a token file
(_: org: flip mapAttrsToList (filterAttrs (_: x: x.tokenFile != null) org.auths)
# Collect id -> tokenFile for the mapping
(_: auth: nameValuePair auth.id auth.tokenFile))));
tokenMappings = pkgs.writeText "token_mappings.json" (builtins.toJSON tokenPaths);
in mkIf cfg.provision.enable ''
if ! test -e "$STATE_DIRECTORY/influxd.bolt"; then
touch "$STATE_DIRECTORY/.first_startup"
else
# Manipulate provisioned api tokens if necessary
${getExe pkgs.influxdb2-token-manipulator} "$STATE_DIRECTORY/influxd.bolt" ${tokenMappings}
fi
'';
postStart = let
initCfg = cfg.provision.initialSetup;
in mkIf cfg.provision.enable (
''
set -euo pipefail
export INFLUX_HOST="http://"${escapeShellArg (cfg.settings.http-bind-address or "localhost:8086")}
# Wait for the influxdb server to come online
count=0
while ! influx ping &>/dev/null; do
if [ "$count" -eq 300 ]; then
echo "Tried for 30 seconds, giving up..."
exit 1
fi
if ! kill -0 "$MAINPID"; then
echo "Main server died, giving up..."
exit 1
fi
sleep 0.1
count=$((count++))
done
# Do the initial database setup. Pass /dev/null as configs-path to
# avoid saving the token as the active config.
if test -e "$STATE_DIRECTORY/.first_startup"; then
influx setup \
--configs-path /dev/null \
--org ${escapeShellArg initCfg.organization} \
--bucket ${escapeShellArg initCfg.bucket} \
--username ${escapeShellArg initCfg.username} \
--password "$(< "$CREDENTIALS_DIRECTORY/admin-password")" \
--token "$(< "$CREDENTIALS_DIRECTORY/admin-token")" \
--retention ${escapeShellArg initCfg.retention} \
--force >/dev/null
rm -f "$STATE_DIRECTORY/.first_startup"
fi
''
);
};
users.extraUsers.influxdb2 = {

View File

@ -8,7 +8,7 @@ in
{
options = {
services.hddfancontrol.enable = lib.mkEnableOption "hddfancontrol daemon";
services.hddfancontrol.enable = lib.mkEnableOption (lib.mdDoc "hddfancontrol daemon");
services.hddfancontrol.disks = lib.mkOption {
type = with types; listOf path;
@ -58,7 +58,6 @@ in
systemd.packages = [pkgs.hddfancontrol];
systemd.services.hddfancontrol = {
enable = true;
wantedBy = [ "multi-user.target" ];
environment.HDDFANCONTROL_ARGS = lib.escapeShellArgs args;
};

View File

@ -0,0 +1,38 @@
{ config, lib, pkgs, ... }:
let
cfg = config.services.zram-generator;
settingsFormat = pkgs.formats.ini { };
in
{
meta = {
maintainers = with lib.maintainers; [ nickcao ];
};
options.services.zram-generator = {
enable = lib.mkEnableOption (lib.mdDoc "Systemd unit generator for zram devices");
package = lib.mkPackageOptionMD pkgs "zram-generator" { };
settings = lib.mkOption {
type = lib.types.submodule {
freeformType = settingsFormat.type;
};
default = { };
description = lib.mdDoc ''
Configuration for zram-generator,
see https://github.com/systemd/zram-generator for documentation.
'';
};
};
config = lib.mkIf cfg.enable {
system.requiredKernelConfig = with config.lib.kernelConfig; [
(isModule "ZRAM")
];
systemd.packages = [ cfg.package ];
systemd.services."systemd-zram-setup@".path = [ pkgs.util-linux ]; # for mkswap
environment.etc."systemd/zram-generator.conf".source = settingsFormat.generate "zram-generator.conf" cfg.settings;
};
}

View File

@ -45,6 +45,7 @@ in
{ name = "dwm";
start =
''
export _JAVA_AWT_WM_NONREPARENTING=1
dwm &
waitPID=$!
'';

View File

@ -137,14 +137,8 @@ let
magicOrExtension = ''\x00asm'';
mask = ''\xff\xff\xff\xff'';
};
x86_64-windows = {
magicOrExtension = "exe";
recognitionType = "extension";
};
i686-windows = {
magicOrExtension = "exe";
recognitionType = "extension";
};
x86_64-windows.magicOrExtension = "MZ";
i686-windows.magicOrExtension = "MZ";
};
in {

View File

@ -434,7 +434,7 @@ in {
lightdm = handleTest ./lightdm.nix {};
lighttpd = handleTest ./lighttpd.nix {};
limesurvey = handleTest ./limesurvey.nix {};
listmonk = handleTest ./listmonk.nix {};
listmonk = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./listmonk.nix {};
litestream = handleTest ./litestream.nix {};
lldap = handleTest ./lldap.nix {};
locate = handleTest ./locate.nix {};
@ -443,10 +443,8 @@ in {
loki = handleTest ./loki.nix {};
luks = handleTest ./luks.nix {};
lvm2 = handleTest ./lvm2 {};
lxd = handleTest ./lxd.nix {};
lxd-nftables = handleTest ./lxd-nftables.nix {};
lxd = handleTest ./lxd {};
lxd-image-server = handleTest ./lxd-image-server.nix {};
lxd-ui = handleTest ./lxd-ui.nix {};
#logstash = handleTest ./logstash.nix {};
lorri = handleTest ./lorri/default.nix {};
maddy = discoverTests (import ./maddy { inherit handleTest; });

View File

@ -1,24 +0,0 @@
storage_pools:
- name: default
driver: dir
config:
source: /var/lxd-pool
networks:
- name: lxdbr0
type: bridge
config:
ipv4.address: auto
ipv6.address: none
profiles:
- name: default
devices:
eth0:
name: eth0
network: lxdbr0
type: nic
root:
path: /
pool: default
type: disk

View File

@ -6,6 +6,9 @@ import ./make-test-python.nix ({ pkgs, ...} : {
nodes.machine = { lib, ... }: {
environment.systemPackages = [ pkgs.influxdb2-cli ];
# Make sure that the service is restarted immediately if tokens need to be rewritten
# without relying on any Restart=on-failure behavior
systemd.services.influxdb2.serviceConfig.RestartSec = 6000;
services.influxdb2.enable = true;
services.influxdb2.provision = {
enable = true;
@ -15,22 +18,208 @@ import ./make-test-python.nix ({ pkgs, ...} : {
passwordFile = pkgs.writeText "admin-pw" "ExAmPl3PA55W0rD";
tokenFile = pkgs.writeText "admin-token" "verysecureadmintoken";
};
organizations.someorg = {
buckets.somebucket = {};
auths.sometoken = {
description = "some auth token";
readBuckets = ["somebucket"];
writeBuckets = ["somebucket"];
};
};
users.someuser.passwordFile = pkgs.writeText "tmp-pw" "abcgoiuhaoga";
};
specialisation.withModifications.configuration = { ... }: {
services.influxdb2.provision = {
organizations.someorg.buckets.somebucket.present = false;
organizations.someorg.auths.sometoken.present = false;
users.someuser.present = false;
organizations.myorg = {
description = "Myorg description";
buckets.mybucket = {
description = "Mybucket description";
};
auths.mytoken = {
operator = true;
description = "operator token";
tokenFile = pkgs.writeText "tmp-tok" "someusertoken";
};
};
users.myuser.passwordFile = pkgs.writeText "tmp-pw" "abcgoiuhaoga";
};
};
specialisation.withParentDelete.configuration = { ... }: {
services.influxdb2.provision = {
organizations.someorg.present = false;
# Deleting the parent implies:
#organizations.someorg.buckets.somebucket.present = false;
#organizations.someorg.auths.sometoken.present = false;
};
};
specialisation.withNewTokens.configuration = { ... }: {
services.influxdb2.provision = {
organizations.default = {
auths.operator = {
operator = true;
description = "new optoken";
tokenFile = pkgs.writeText "tmp-tok" "newoptoken";
};
auths.allaccess = {
operator = true;
description = "new allaccess";
tokenFile = pkgs.writeText "tmp-tok" "newallaccess";
};
auths.specifics = {
description = "new specifics";
readPermissions = ["users" "tasks"];
writePermissions = ["tasks"];
tokenFile = pkgs.writeText "tmp-tok" "newspecificstoken";
};
};
};
};
};
testScript = { nodes, ... }:
let
specialisations = "${nodes.machine.system.build.toplevel}/specialisation";
tokenArg = "--token verysecureadmintoken";
in ''
def assert_contains(haystack, needle):
if needle not in haystack:
print("The haystack that will cause the following exception is:")
print("---")
print(haystack)
print("---")
raise Exception(f"Expected string '{needle}' was not found")
def assert_lacks(haystack, needle):
if needle in haystack:
print("The haystack that will cause the following exception is:")
print("---")
print(haystack, end="")
print("---")
raise Exception(f"Unexpected string '{needle}' was found")
machine.wait_for_unit("influxdb2.service")
machine.fail("curl --fail -X POST 'http://localhost:8086/api/v2/signin' -u admin:wrongpassword")
machine.succeed("curl --fail -X POST 'http://localhost:8086/api/v2/signin' -u admin:ExAmPl3PA55W0rD")
out = machine.succeed("influx org list ${tokenArg}")
assert "default" in out
assert_contains(out, "default")
assert_lacks(out, "myorg")
assert_contains(out, "someorg")
out = machine.succeed("influx bucket list ${tokenArg} --org default")
assert "default" in out
assert_contains(out, "default")
machine.fail("influx bucket list ${tokenArg} --org myorg")
out = machine.succeed("influx bucket list ${tokenArg} --org someorg")
assert_contains(out, "somebucket")
out = machine.succeed("influx user list ${tokenArg}")
assert_contains(out, "admin")
assert_lacks(out, "myuser")
assert_contains(out, "someuser")
out = machine.succeed("influx auth list ${tokenArg}")
assert_lacks(out, "operator token")
assert_contains(out, "some auth token")
with subtest("withModifications"):
machine.succeed('${specialisations}/withModifications/bin/switch-to-configuration test')
machine.wait_for_unit("influxdb2.service")
out = machine.succeed("influx org list ${tokenArg}")
assert_contains(out, "default")
assert_contains(out, "myorg")
assert_contains(out, "someorg")
out = machine.succeed("influx bucket list ${tokenArg} --org myorg")
assert_contains(out, "mybucket")
out = machine.succeed("influx bucket list ${tokenArg} --org someorg")
assert_lacks(out, "somebucket")
out = machine.succeed("influx user list ${tokenArg}")
assert_contains(out, "admin")
assert_contains(out, "myuser")
assert_lacks(out, "someuser")
out = machine.succeed("influx auth list ${tokenArg}")
assert_contains(out, "operator token")
assert_lacks(out, "some auth token")
# Make sure the user token is also usable
machine.succeed("influx auth list --token someusertoken")
with subtest("keepsUnrelated"):
machine.succeed('${nodes.machine.system.build.toplevel}/bin/switch-to-configuration test')
machine.wait_for_unit("influxdb2.service")
out = machine.succeed("influx org list ${tokenArg}")
assert_contains(out, "default")
assert_contains(out, "myorg")
assert_contains(out, "someorg")
out = machine.succeed("influx bucket list ${tokenArg} --org default")
assert_contains(out, "default")
out = machine.succeed("influx bucket list ${tokenArg} --org myorg")
assert_contains(out, "mybucket")
out = machine.succeed("influx bucket list ${tokenArg} --org someorg")
assert_contains(out, "somebucket")
out = machine.succeed("influx user list ${tokenArg}")
assert_contains(out, "admin")
assert_contains(out, "myuser")
assert_contains(out, "someuser")
out = machine.succeed("influx auth list ${tokenArg}")
assert_contains(out, "operator token")
assert_contains(out, "some auth token")
with subtest("withParentDelete"):
machine.succeed('${specialisations}/withParentDelete/bin/switch-to-configuration test')
machine.wait_for_unit("influxdb2.service")
out = machine.succeed("influx org list ${tokenArg}")
assert_contains(out, "default")
assert_contains(out, "myorg")
assert_lacks(out, "someorg")
out = machine.succeed("influx bucket list ${tokenArg} --org default")
assert_contains(out, "default")
out = machine.succeed("influx bucket list ${tokenArg} --org myorg")
assert_contains(out, "mybucket")
machine.fail("influx bucket list ${tokenArg} --org someorg")
out = machine.succeed("influx user list ${tokenArg}")
assert_contains(out, "admin")
assert_contains(out, "myuser")
assert_contains(out, "someuser")
out = machine.succeed("influx auth list ${tokenArg}")
assert_contains(out, "operator token")
assert_lacks(out, "some auth token")
with subtest("withNewTokens"):
machine.succeed('${specialisations}/withNewTokens/bin/switch-to-configuration test')
machine.wait_for_unit("influxdb2.service")
out = machine.succeed("influx auth list ${tokenArg}")
assert_contains(out, "operator token")
assert_contains(out, "some auth token")
assert_contains(out, "new optoken")
assert_contains(out, "new allaccess")
assert_contains(out, "new specifics")
'';
})

View File

@ -42,20 +42,27 @@ import ./make-test-python.nix ({ lib, ... }: {
machine.wait_for_open_port(9000)
machine.succeed("[[ -f /var/lib/listmonk/.db_settings_initialized ]]")
assert json.loads(machine.succeed(generate_listmonk_request("GET", 'health')))['data'], 'Health endpoint returned unexpected value'
# A sample subscriber is guaranteed to exist at install-time
# A sample transactional template is guaranteed to exist at install-time
subscribers = json.loads(machine.succeed(generate_listmonk_request('GET', "subscribers")))['data']['results']
templates = json.loads(machine.succeed(generate_listmonk_request('GET', "templates")))['data']
tx_template = templates[2]
# Test transactional endpoint
# subscriber_id=1 is guaranteed to exist at install-time
# template_id=2 is guaranteed to exist at install-time and is a transactional template (1 is a campaign template).
machine.succeed(
generate_listmonk_request('POST', 'tx', data={'subscriber_id': 1, 'template_id': 2})
)
assert 'Welcome John Doe' in machine.succeed(
print(machine.succeed(
generate_listmonk_request('POST', 'tx', data={'subscriber_id': subscribers[0]['id'], 'template_id': tx_template['id']})
))
assert 'Welcome Anon Doe' in machine.succeed(
"curl --fail http://localhost:8025/api/v2/messages"
)
), "Failed to find Welcome John Doe inside the messages API endpoint"
# Test campaign endpoint
# Based on https://github.com/knadh/listmonk/blob/174a48f252a146d7e69dab42724e3329dbe25ebe/cmd/campaigns.go#L549 as docs do not exist.
campaign_data = json.loads(machine.succeed(
generate_listmonk_request('POST', 'campaigns/1/test', data={'template_id': 1, 'subscribers': ['john@example.com'], 'name': 'Test', 'subject': 'NixOS is great', 'lists': [1], 'messenger': 'email'})
generate_listmonk_request('POST', 'campaigns/1/test', data={'template_id': templates[0]['id'], 'subscribers': ['john@example.com'], 'name': 'Test', 'subject': 'NixOS is great', 'lists': [1], 'messenger': 'email'})
))
assert campaign_data['data'] # This is a boolean asserting if the test was successful or not: https://github.com/knadh/listmonk/blob/174a48f252a146d7e69dab42724e3329dbe25ebe/cmd/campaigns.go#L626

View File

@ -61,14 +61,14 @@ in {
machine.wait_for_unit("lxd.service")
machine.wait_for_file("/var/lib/lxd/unix.socket")
# It takes additional second for lxd to settle
machine.sleep(1)
# Wait for lxd to settle
machine.succeed("lxd waitready")
# lxd expects the pool's directory to already exist
machine.succeed("mkdir /var/lxd-pool")
machine.succeed(
"cat ${./common/lxd/config.yaml} | lxd init --preseed"
"lxd init --minimal"
)
machine.succeed(

View File

@ -1,7 +1,7 @@
import ./make-test-python.nix ({ pkgs, lib, ... } :
import ../make-test-python.nix ({ pkgs, lib, ... } :
let
lxd-image = import ../release.nix {
lxd-image = import ../../release.nix {
configuration = {
# Building documentation makes the test unnecessarily take a longer time:
documentation.enable = lib.mkForce false;
@ -38,19 +38,18 @@ in {
};
testScript = ''
def instance_is_up(_) -> bool:
status, _ = machine.execute("lxc exec container --disable-stdin --force-interactive /run/current-system/sw/bin/true")
return status == 0
machine.wait_for_unit("sockets.target")
machine.wait_for_unit("lxd.service")
machine.wait_for_file("/var/lib/lxd/unix.socket")
# It takes additional second for lxd to settle
machine.sleep(1)
# Wait for lxd to settle
machine.succeed("lxd waitready")
# lxd expects the pool's directory to already exist
machine.succeed("mkdir /var/lxd-pool")
machine.succeed(
"cat ${./common/lxd/config.yaml} | lxd init --preseed"
)
machine.succeed("lxd init --minimal")
machine.succeed(
"lxc image import ${lxd-image-metadata}/*/*.tar.xz ${lxd-image-rootfs}/*/*.tar.xz --alias nixos"
@ -58,21 +57,23 @@ in {
with subtest("Container can be managed"):
machine.succeed("lxc launch nixos container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
machine.succeed("echo true | lxc exec container /run/current-system/sw/bin/bash -")
machine.succeed("lxc exec container true")
machine.succeed("lxc delete -f container")
with subtest("Container is mounted with lxcfs inside"):
machine.succeed("lxc launch nixos container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
## ---------- ##
## limits.cpu ##
machine.succeed("lxc config set container limits.cpu 1")
machine.succeed("lxc restart container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
assert (
"1"
@ -81,7 +82,8 @@ in {
machine.succeed("lxc config set container limits.cpu 2")
machine.succeed("lxc restart container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
assert (
"2"
@ -93,7 +95,8 @@ in {
machine.succeed("lxc config set container limits.memory 64MB")
machine.succeed("lxc restart container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
assert (
"MemTotal: 62500 kB"
@ -102,7 +105,8 @@ in {
machine.succeed("lxc config set container limits.memory 128MB")
machine.succeed("lxc restart container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
assert (
"MemTotal: 125000 kB"

View File

@ -0,0 +1,9 @@
{
system ? builtins.currentSystem,
config ? {},
pkgs ? import ../../.. {inherit system config;},
}: {
container = import ./container.nix {inherit system pkgs;};
nftables = import ./nftables.nix {inherit system pkgs;};
ui = import ./ui.nix {inherit system pkgs;};
}

View File

@ -5,7 +5,7 @@
# iptables to nftables requires a full reboot, which is a bit hard inside NixOS
# tests.
import ./make-test-python.nix ({ pkgs, ...} : {
import ../make-test-python.nix ({ pkgs, ...} : {
name = "lxd-nftables";
meta = with pkgs.lib.maintainers; {

View File

@ -1,4 +1,4 @@
import ./make-test-python.nix ({ pkgs, lib, ... }: {
import ../make-test-python.nix ({ pkgs, lib, ... }: {
name = "lxd-ui";
meta = with pkgs.lib.maintainers; {

View File

@ -84,17 +84,6 @@ in
test_as_regular_in_userns_mapped_as_root('/run/wrappers/bin/sgid_root_busybox id -g', '0')
test_as_regular_in_userns_mapped_as_root('/run/wrappers/bin/sgid_root_busybox id -rg', '0')
# Test that in nonewprivs environment the wrappers simply exec their target.
test_as_regular('${pkgs.util-linux}/bin/setpriv --no-new-privs /run/wrappers/bin/suid_root_busybox id -u', '${toString userUid}')
test_as_regular('${pkgs.util-linux}/bin/setpriv --no-new-privs /run/wrappers/bin/suid_root_busybox id -ru', '${toString userUid}')
test_as_regular('${pkgs.util-linux}/bin/setpriv --no-new-privs /run/wrappers/bin/suid_root_busybox id -g', '${toString usersGid}')
test_as_regular('${pkgs.util-linux}/bin/setpriv --no-new-privs /run/wrappers/bin/suid_root_busybox id -rg', '${toString usersGid}')
test_as_regular('${pkgs.util-linux}/bin/setpriv --no-new-privs /run/wrappers/bin/sgid_root_busybox id -u', '${toString userUid}')
test_as_regular('${pkgs.util-linux}/bin/setpriv --no-new-privs /run/wrappers/bin/sgid_root_busybox id -ru', '${toString userUid}')
test_as_regular('${pkgs.util-linux}/bin/setpriv --no-new-privs /run/wrappers/bin/sgid_root_busybox id -g', '${toString usersGid}')
test_as_regular('${pkgs.util-linux}/bin/setpriv --no-new-privs /run/wrappers/bin/sgid_root_busybox id -rg', '${toString usersGid}')
# We are only testing the permitted set, because it's easiest to look at with capsh.
machine.fail(cmd_as_regular('${pkgs.libcap}/bin/capsh --has-p=CAP_CHOWN'))
machine.fail(cmd_as_regular('${pkgs.libcap}/bin/capsh --has-p=CAP_SYS_ADMIN'))

View File

@ -23,7 +23,24 @@ See the [CONTRIBUTING.md](../CONTRIBUTING.md) document for more general informat
## Quick Start to Adding a Package
To add a package to Nixpkgs:
We welcome new contributors of new packages to Nixpkgs, arguably the greatest software database known. However, each new package comes with a cost for the maintainers, Continuous Integration, caching servers and users downloading Nixpkgs.
Before adding a new package, please consider the following questions:
* Is the package ready for general use? We don't want to include projects that are too immature or are going to be abandoned immediately. In case of doubt, check with upstream.
* Does the project have a clear license statement? Remember that softwares are unfree by default (all rights reserved), and merely providing access to the source code does not imply its redistribution. In case of doubt, ask upstream.
* How realistic is it that it will be used by other people? It's good that nixpkgs caters to various niches, but if it's a niche of 5 people it's probably too small.
* Are you willing to maintain the package? You should care enough about the package to be willing to keep it up and running for at least one complete Nixpkgs' release life-cycle.
If any of these questions' answer is no, then you should probably not add the package.
This is section describes a general framework of understanding and exceptions might apply.
Luckily it's pretty easy to maintain your own package set with Nix, which can then be added to the [Nix User Repository](https://github.com/nix-community/nur) project.
---
Now that this is out of the way. To add a package to Nixpkgs:
1. Checkout the Nixpkgs source tree:
@ -67,7 +84,9 @@ To add a package to Nixpkgs:
Some notes:
- All [`meta`](https://nixos.org/manual/nixpkgs/stable/#chap-meta) attributes are optional, but its still a good idea to provide at least the `description`, `homepage` and [`license`](https://nixos.org/manual/nixpkgs/stable/#sec-meta-license).
- Add yourself as the maintainer of the package.
- All other [`meta`](https://nixos.org/manual/nixpkgs/stable/#chap-meta) attributes are optional, but its still a good idea to provide at least the `description`, `homepage` and [`license`](https://nixos.org/manual/nixpkgs/stable/#sec-meta-license).
- You can use `nix-prefetch-url url` to get the SHA-256 hash of source distributions. There are similar commands as `nix-prefetch-git` and `nix-prefetch-hg` available in `nix-prefetch-scripts` package.

View File

@ -52,13 +52,13 @@
mkDerivation rec {
pname = "mixxx";
version = "2.3.5";
version = "2.3.6";
src = fetchFromGitHub {
owner = "mixxxdj";
repo = "mixxx";
rev = version;
sha256 = "sha256-NAp7RoYSI6BRw7C0ejW4pCJJYx9BG8D+BGVCVTDrggQ=";
hash = "sha256-VdgCsd/7vMFUleOU0ESoZDQ8yhQSsLZADVi4XI76Ouw=";
};
nativeBuildInputs = [ cmake pkg-config ];

View File

@ -63,6 +63,22 @@ self: let
popd
'';
});
xeft = super.xeft.overrideAttrs (old: let
libExt = pkgs.stdenv.targetPlatform.extensions.sharedLibrary;
in {
dontUnpack = false;
buildInputs = (old.buildInputs or [ ]) ++ [ pkgs.xapian ];
buildPhase = (old.buildPhase or "") + ''
$CXX -shared -o xapian-lite${libExt} xapian-lite.cc $NIX_CFLAGS_COMPILE -lxapian
'';
postInstall = (old.postInstall or "") + "\n" + ''
outd=$out/share/emacs/site-lisp/elpa/xeft-*
install -m444 -t $outd xapian-lite${libExt}
rm $outd/xapian-lite.cc $outd/emacs-module.h $outd/emacs-module-prelude.h $outd/demo.gif $outd/Makefile
'';
});
};
elpaDevelPackages = super // overrides;

View File

@ -158,6 +158,23 @@ self: let
}
);
xeft = super.xeft.overrideAttrs (old: let
libExt = pkgs.stdenv.targetPlatform.extensions.sharedLibrary;
in {
dontUnpack = false;
buildInputs = (old.buildInputs or [ ]) ++ [ pkgs.xapian ];
buildPhase = (old.buildPhase or "") + ''
$CXX -shared -o xapian-lite${libExt} xapian-lite.cc $NIX_CFLAGS_COMPILE -lxapian
'';
postInstall = (old.postInstall or "") + "\n" + ''
outd=$out/share/emacs/site-lisp/elpa/xeft-*
install -m444 -t $outd xapian-lite${libExt}
rm $outd/xapian-lite.cc $outd/emacs-module.h $outd/emacs-module-prelude.h $outd/demo.gif $outd/Makefile
'';
});
};
elpaPackages = super // overrides;

View File

@ -2,12 +2,12 @@
stdenv.mkDerivation rec {
pname = "kakoune-unwrapped";
version = "2022.10.31";
version = "2023.08.05";
src = fetchFromGitHub {
repo = "kakoune";
owner = "mawww";
rev = "v${version}";
sha256 = "sha256-vmzGaGl0KSjseSD/s6DXxvMUTmAle+Iv/ZP9llaFnXk=";
sha256 = "sha256-RR3kw39vEjsg+6cIY6cK2i3ecGHlr1yzuBKaDtGlOGo=";
};
makeFlags = [ "debug=no" "PREFIX=${placeholder "out"}" ];

View File

@ -1635,7 +1635,6 @@ self: super: {
"coc-haxe"
"coc-highlight"
"coc-html"
"coc-imselect"
"coc-java"
"coc-jest"
"coc-json"

View File

@ -14,19 +14,19 @@
stdenv.mkDerivation rec {
pname = "drawio";
version = "21.6.1";
version = "21.6.8";
src = fetchFromGitHub {
owner = "jgraph";
repo = "drawio-desktop";
rev = "v${version}";
fetchSubmodules = true;
hash = "sha256-60fOecWDYGkn4rJzxmum14L4IAaHAG+uKyjNo9nkVHg=";
hash = "sha256-k16npV8N4zPIXjc8ZJcQHgv76h2VhbqtT2ZCzDqkF8U";
};
offlineCache = fetchYarnDeps {
yarnLock = src + "/yarn.lock";
hash = "sha256-Knk9ys8Kjk1QOl80vmIA2H6wP8Mj6iNcmb/bR4zMQgw=";
hash = "sha256-rJvwXhtO/HsfpbDyOh+jFc6E9wQ+sZMT8vnhJpGlkF8";
};
nativeBuildInputs = [

View File

@ -1,28 +1,26 @@
{ lib, mkDerivation, fetchpatch, fetchFromGitHub, cmake, qttools, qtwebkit }:
{ lib
, mkDerivation
, fetchFromGitHub
, cmake
, qttools
, qtwebkit
}:
mkDerivation rec {
pname = "fontmatrix";
version = "0.6.0-qt5";
version = "0.9.100";
src = fetchFromGitHub {
owner = "fcoiffie";
owner = "fontmatrix";
repo = "fontmatrix";
rev = "1ff8382d8c85c18d9962918f461341ff4fe21993";
sha256 = "0yx1gbsjj9ddq1kiqplif1w5x5saw250zbmhmd4phqmaqzr60w0h";
rev = "v${version}";
sha256 = "sha256-DtajGhx79DiecglXHja9q/TKVq8Jl2faQdA5Ib/yT88=";
};
# Add missing QAction include
patches = [ (fetchpatch {
url = "https://github.com/fcoiffie/fontmatrix/commit/dc6de8c414ae21516b72daead79c8db88309b102.patch";
sha256 = "092860fdyf5gq67jqfxnlgwzjgpizi6j0njjv3m62aiznrhig7c8";
})];
buildInputs = [ qttools qtwebkit ];
nativeBuildInputs = [ cmake ];
hardeningDisable = [ "format" ];
meta = with lib; {
description = "Fontmatrix is a free/libre font explorer for Linux, Windows and Mac";
homepage = "https://github.com/fontmatrix/fontmatrix";

View File

@ -27,8 +27,8 @@ mkDerivation rec {
src = fetchFromGitHub {
owner = "cnr-isti-vclab";
repo = "meshlab";
rev = "Meshlab-${version}";
sha256 = "sha256-MP+jkiV6yS1T1eWClxM56kZWLXwu0g4w/zBHy6CSL6Y=";
rev = "MeshLab-${version}";
sha256 = "sha256-jcc3PfsiIeYyipteZgzd0NwZgFFgR/mMBiaInzhOcDY=";
fetchSubmodules = true; # for vcglib
};

View File

@ -27,6 +27,11 @@
, freeglut
, libGLU
, xcbuild
# for passthru.tests
, cups-filters
, python3
, zathura
}:
let
@ -146,6 +151,11 @@ stdenv.mkDerivation rec {
enableParallelBuilding = true;
passthru.tests = {
inherit cups-filters zathura;
inherit (python3.pkgs) pikepdf pymupdf;
};
meta = with lib; {
homepage = "https://mupdf.com";
description = "Lightweight PDF, XPS, and E-book viewer and toolkit written in portable C";

View File

@ -100,7 +100,7 @@ let
filetype
flask
flask-babel
flask_assets
flask-assets
flask-login
flask-limiter
frozendict

View File

@ -24,7 +24,7 @@ let
alembic = super.alembic.overridePythonAttrs (lib.const {
doCheck = false;
});
flask_migrate = super.flask_migrate.overridePythonAttrs (oldAttrs: rec {
flask-migrate = super.flask-migrate.overridePythonAttrs (oldAttrs: rec {
version = "2.7.0";
src = fetchPypi {
pname = "Flask-Migrate";
@ -198,8 +198,8 @@ python3'.pkgs.buildPythonPackage rec {
];
propagatedBuildInputs = with python3'.pkgs; [
cryptography pyrad pymysql python-dateutil flask-versioned flask_script
defusedxml croniter flask_migrate pyjwt configobj sqlsoup pillow
cryptography pyrad pymysql python-dateutil flask-versioned flask-script
defusedxml croniter flask-migrate pyjwt configobj sqlsoup pillow
python-gnupg passlib pyopenssl beautifulsoup4 smpplib flask-babel
ldap3 huey pyyaml qrcode oauth2client requests lxml cbor2 psycopg2
pydash ecdsa google-auth importlib-metadata argon2-cffi bcrypt segno

View File

@ -0,0 +1,58 @@
{ lib
, stdenv
, python3
, fetchFromGitHub
, qt6
}:
python3.pkgs.buildPythonApplication {
pname = "retool";
version = "unstable-2023-08-24";
format = "pyproject";
disabled = python3.pkgs.pythonOlder "3.10";
src = fetchFromGitHub {
owner = "unexpectedpanda";
repo = "retool";
rev = "d8acdb960d35b5a6b01d7dc66b7e40b3ec451301";
hash = "sha256-6y/7RR7O2xYKXdxaFtkRfnSlwygp/LRDUozUJo6ue7s=";
};
patches = [
# pythonRelaxDepsHook will not work in this package until
# https://github.com/NixOS/nixpkgs/pull/248516 hits master
./relax_deps.patch
];
nativeBuildInputs = with python3.pkgs; [
poetry-core
qt6.wrapQtAppsHook
];
buildInputs = [
qt6.qtbase
] ++
lib.optionals (stdenv.isLinux) [
qt6.qtwayland
];
propagatedBuildInputs = with python3.pkgs; [
alive-progress
lxml
psutil
validators
pyside6
strictyaml
];
# Upstream has no tests
doCheck = false;
meta = with lib; {
description = "A better filter tool for Redump and No-Intro dats";
homepage = "https://github.com/unexpectedpanda/retool";
license = licenses.bsd3;
maintainers = with maintainers; [ thiagokokada ];
};
}

View File

@ -0,0 +1,25 @@
diff --git a/pyproject.toml b/pyproject.toml
index 6e62347..34600a0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -13,12 +13,14 @@ packages = [
[tool.poetry.dependencies]
python = ">=3.10,<3.12"
-strictyaml = "^1.6.2"
-lxml = "^4.9.2"
-PySide6 = "^6.4.2"
-validators = "^0.20.0"
-alive-progress = "^3.0.1"
-psutil = "^5.9.4"
+strictyaml = "*"
+lxml = "*"
+# ERROR: Could not find a version that satisfies the requirement PySide6 (from retool) (from versions: none)
+# ERROR: No matching distribution found for PySide6
+# PySide6 = "*"
+validators = "*"
+alive-progress = "*"
+psutil = "*"
[tool.poetry.scripts]
retool = "retool:main"

View File

@ -0,0 +1,41 @@
{ lib
, rustPlatform
, fetchFromGitHub
, stdenv
, darwin
, xorg
}:
rustPlatform.buildRustPackage rec {
pname = "safecloset";
version = "1.3.0";
src = fetchFromGitHub {
owner = "Canop";
repo = "safecloset";
rev = "v${version}";
hash = "sha256-HY8HaWGsqKUsqNLFpxrGJvAcVD68fqKX2v7xCiEKuDM=";
};
cargoHash = "sha256-geZoqfPgYUd4X903EM7+gq/VPvIClAmlC0nkqWriB0M=";
buildInputs = lib.optionals stdenv.isDarwin [
darwin.apple_sdk.frameworks.AppKit
] ++ lib.optionals stdenv.isLinux [
xorg.libxcb
];
checkFlags = [
# skip flaky test
"--skip=timer::timer_tests::test_timer_reset"
];
meta = with lib; {
description = "Cross-platform secure TUI secret locker";
homepage = "https://github.com/Canop/safecloset";
changelog = "https://github.com/Canop/safecloset/blob/${src.rev}/CHANGELOG.md";
license = licenses.agpl3Only;
maintainers = with maintainers; [ figsoda ];
mainProgram = "safecloset";
};
}

View File

@ -44,6 +44,7 @@
, cavaSupport ? true
, evdevSupport ? true
, experimentalPatches ? true
, hyprlandSupport ? true
, inputSupport ? true
, jackSupport ? true
@ -162,7 +163,7 @@ stdenv.mkDerivation (finalAttrs: {
"tests" = runTests;
"upower_glib" = upowerSupport;
"wireplumber" = wireplumberSupport;
});
}) ++ lib.optional experimentalPatches (lib.mesonBool "experimental" true);
preFixup = lib.optionalString withMediaPlayer ''
cp $src/resources/custom_modules/mediaplayer.py $out/bin/waybar-mediaplayer.py

View File

@ -7,13 +7,13 @@
buildGoModule rec {
pname = "cloudflared";
version = "2023.7.3";
version = "2023.8.0";
src = fetchFromGitHub {
owner = "cloudflare";
repo = "cloudflared";
rev = "refs/tags/${version}";
hash = "sha256-Sv6f12XXVHIi97Ows1hsqAeb4z+ZtVM5B0v0Xz/b5iY=";
hash = "sha256-2gBVq7qCOJtSPbE1Yiq7J3roAku1ax+vfZ0SZ8WVHz8=";
};
vendorHash = null;
@ -78,7 +78,7 @@ buildGoModule rec {
changelog = "https://github.com/cloudflare/cloudflared/releases/tag/${version}";
license = licenses.asl20;
platforms = platforms.unix ++ platforms.windows;
maintainers = with maintainers; [ bbigras enorris thoughtpolice piperswe ];
maintainers = with maintainers; [ bbigras enorris thoughtpolice piperswe qjoly ];
mainProgram = "cloudflared";
};
}

View File

@ -2,13 +2,13 @@
buildGoModule rec {
pname = "cilium-cli";
version = "0.14.3";
version = "0.15.6";
src = fetchFromGitHub {
owner = "cilium";
repo = pname;
rev = "v${version}";
sha256 = "sha256-vH+wQ3pPz70jc3QzD/0vbKMqQtYak8UGoQmEgeYbFbk=";
hash = "sha256-0y07EPF/1oq4FqiJFNZgnUeesJzU0+jxlQ4zg1M5Xzk=";
};
vendorHash = null;
@ -17,7 +17,7 @@ buildGoModule rec {
ldflags = [
"-s" "-w"
"-X github.com/cilium/cilium-cli/internal/cli/cmd.Version=${version}"
"-X github.com/cilium/cilium-cli/cli.Version=${version}"
];
# Required to workaround install check error:
@ -41,7 +41,7 @@ buildGoModule rec {
description = "CLI to install, manage & troubleshoot Kubernetes clusters running Cilium";
license = licenses.asl20;
homepage = "https://www.cilium.io/";
maintainers = with maintainers; [ humancalico bryanasdev000 ];
maintainers = with maintainers; [ humancalico bryanasdev000 qjoly ];
mainProgram = "cilium";
};
}

View File

@ -9,14 +9,14 @@
"vendorHash": null
},
"acme": {
"hash": "sha256-azNFQ4U7iGIKLingq4GItjXvdcsm0YkrQ4PRvEeDjVU=",
"hash": "sha256-5KR32V4sE5AkOVroLmelNBzBZpD4KfhC491X+5eo+n8=",
"homepage": "https://registry.terraform.io/providers/vancluever/acme",
"owner": "vancluever",
"proxyVendor": true,
"repo": "terraform-provider-acme",
"rev": "v2.16.1",
"rev": "v2.17.0",
"spdx": "MPL-2.0",
"vendorHash": "sha256-9F853+GHfwGH0JQRLawLEB8X76z/Xll1Aa4+vBRWk1o="
"vendorHash": "sha256-UIV0dIoRZxNiaEq1HGPIV4mFLn4pAoGPo6tx6zV3r3A="
},
"age": {
"hash": "sha256-bJrzjvkrCX93bNqCA+FdRibHnAw6cb61StqtwUY5ok4=",
@ -28,13 +28,13 @@
"vendorHash": "sha256-jK7JuARpoxq7hvq5+vTtUwcYot0YqlOZdtDwq4IqKvk="
},
"aiven": {
"hash": "sha256-Nm5flY+BN9PpQY+4LyohFwDfdEPxfVpT/rkfn8aLQyI=",
"hash": "sha256-3agD22viTP+yntNg2nyYi5OpknXnfI2Jk/xEcvXgia8=",
"homepage": "https://registry.terraform.io/providers/aiven/aiven",
"owner": "aiven",
"repo": "terraform-provider-aiven",
"rev": "v4.8.0",
"rev": "v4.8.2",
"spdx": "MIT",
"vendorHash": "sha256-eScN0by/rnCf4+p4g3yhz2kJRyfFyqlVi+0MJXPdzKw="
"vendorHash": "sha256-sVPby/MLAgU7DfBDACqxvkLWblBhisHcUaoOgR3fMaM="
},
"akamai": {
"hash": "sha256-LGgZF2/YCYpoDOSu0UeuPqK9wGXrvPQE4WUGGS0sx30=",
@ -182,13 +182,13 @@
"vendorHash": "sha256-/dOiXO2aPkuZaFiwv/6AXJdIADgx8T7eOwvJfBBoqg8="
},
"buildkite": {
"hash": "sha256-nDJ4XsWvielQYqShBav7g/pZyDcU0jqgemXUqaNJHnA=",
"hash": "sha256-xojTeS+p9XG+wO9thmrSOWrizF56FCg+nwRBdaXqr/4=",
"homepage": "https://registry.terraform.io/providers/buildkite/buildkite",
"owner": "buildkite",
"repo": "terraform-provider-buildkite",
"rev": "v0.25.0",
"rev": "v0.25.1",
"spdx": "MIT",
"vendorHash": "sha256-C/jT+vcZat8UHXgOhtj+gyl8ttCEb564byp/npI2Ei8="
"vendorHash": "sha256-V2BsVBhtdPOT9iseWPhPTOrUe4iMhq4YUiBWd0ne5Xg="
},
"checkly": {
"hash": "sha256-tOTrAi6hd4HFbHAj0p/LTYdxQl1R1WuQ9L4hzqmDVqI=",
@ -218,13 +218,13 @@
"vendorHash": "sha256-qIgr+ynaNSfNx1iW5RJrNHvEnlr46dBzIi+5IXYn+3Q="
},
"cloudflare": {
"hash": "sha256-ayxekJkQt/7K/qwMKvjqkyVkux5+Jw3uyepmaiy3Ptc=",
"hash": "sha256-l1cTzPiOOLyvbvbt7dWR9lRgqVFiO5gRq4XNnLqvac0=",
"homepage": "https://registry.terraform.io/providers/cloudflare/cloudflare",
"owner": "cloudflare",
"repo": "terraform-provider-cloudflare",
"rev": "v4.12.0",
"rev": "v4.13.0",
"spdx": "MPL-2.0",
"vendorHash": "sha256-VTSbi2pDllzyKDhWs5EpWSXO5oKl+khVqLg/Ro3x8ys="
"vendorHash": "sha256-uZ0zc+/RmEiqxBSZLgLPmwN29BEJitPN13HE88zPxcI="
},
"cloudfoundry": {
"hash": "sha256-yEqsdgTSlwppt6ILRZQ6Epyh5WVN6Il3xsBOa/NfIdo=",
@ -282,13 +282,13 @@
"vendorHash": "sha256-ZCMSmOCPEMxCSpl3DjIUGPj1W/KNJgyjtHpmQ19JquA="
},
"datadog": {
"hash": "sha256-sytQJgrfgtJ761mGo0KUTxAukqvmPYyLM8+vsYGtoZc=",
"hash": "sha256-FAqtbze6Lw6SCU84R6aB0oY+pcWyDBdTZRBZhM2pjyc=",
"homepage": "https://registry.terraform.io/providers/DataDog/datadog",
"owner": "DataDog",
"repo": "terraform-provider-datadog",
"rev": "v3.28.0",
"rev": "v3.29.0",
"spdx": "MPL-2.0",
"vendorHash": "sha256-foS7GyRUdhF/M8uTPf2I4WQo7qEg4Z/3FXjagoeSRkU="
"vendorHash": "sha256-UJRuj5qmWWjkqMBGf0500//83ky0Dxx04IQETPwwSsw="
},
"dexidp": {
"hash": "sha256-69r3m3lIKftZQ8NXBD5KEHbsNUwCGpFgn/CYO+921M4=",
@ -363,13 +363,13 @@
"vendorHash": "sha256-oVTanZpCWs05HwyIKW2ajiBPz1HXOFzBAt5Us+EtTRw="
},
"equinix": {
"hash": "sha256-MEsE1OQwKjd1Y+Ek7UmZMbLq4x84iQW40sMl78UbW2c=",
"hash": "sha256-SSCKl0etImK9dXhq9ycQi/U38cZ+SuaoBiaeeg/+JDA=",
"homepage": "https://registry.terraform.io/providers/equinix/equinix",
"owner": "equinix",
"repo": "terraform-provider-equinix",
"rev": "v1.14.7",
"rev": "v1.15.0",
"spdx": "MIT",
"vendorHash": "sha256-cfJG0DJJJX85ISz7dSZ+di1uhgJJd5xUH99PhqGMPgw="
"vendorHash": "sha256-7oLAF+HpL/eNN6KXYp8zA9Yu6h5S+XrWJN4dE3B9H58="
},
"exoscale": {
"hash": "sha256-93pCsHrsYLJYgg8MXHz2Gg+vaPC9gcHdLastb89/BMg=",
@ -827,11 +827,11 @@
"vendorHash": "sha256-LRIfxQGwG988HE5fftGl6JmBG7tTknvmgpm4Fu1NbWI="
},
"oci": {
"hash": "sha256-sxhykS4pXF00VJVtVd7kO2GasAqBUUMqPDPLE3BzUFI=",
"hash": "sha256-S+gHfQsqnOlegd5JcuBOUKO7fynWQAWCZGrlqjY03e0=",
"homepage": "https://registry.terraform.io/providers/oracle/oci",
"owner": "oracle",
"repo": "terraform-provider-oci",
"rev": "v5.9.0",
"rev": "v5.10.0",
"spdx": "MPL-2.0",
"vendorHash": null
},

View File

@ -167,8 +167,8 @@ rec {
mkTerraform = attrs: pluggable (generic attrs);
terraform_1 = mkTerraform {
version = "1.5.5";
hash = "sha256-SBS3a/CIUdyIUJvc+rANIs+oXCQgfZut8b0517QKq64=";
version = "1.5.6";
hash = "sha256-vbV8Tmas7n1o8Q+DG9RrcfdAMa4bJsVg2SsTFH1TJ5M=";
vendorHash = "sha256-lQgWNMBf+ioNxzAV7tnTQSIS840XdI9fg9duuwoK+U4=";
patches = [ ./provider-path-0_15.patch ];
passthru = {

View File

@ -5,23 +5,23 @@
buildGoModule rec {
pname = "terragrunt";
version = "0.48.6";
version = "0.50.6";
src = fetchFromGitHub {
owner = "gruntwork-io";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-pvDZfKITFrhtLnewAhRGjwu45zj2q3usNSr9U2xb52Y=";
hash = "sha256-h6Qz27zWAN0mxDok2dpxlE0qLP2ECwMjiCZxg+9T/dw=";
};
vendorHash = "sha256-5Umoqi2D6iUk2Ut7YB/nmkOyA6Rx2qFhy/ZbfqoX5qA=";
vendorHash = "sha256-ZpLQcWi3qYTsy6BUZbHFFmhWG6CWqcb/NuzPLOUtKfs=";
doCheck = false;
ldflags = [
"-s"
"-w"
"-X main.VERSION=v${version}"
"-X github.com/gruntwork-io/go-commons/version.Version=v${version}"
];
doInstallCheck = true;

View File

@ -21,11 +21,11 @@
python3.pkgs.buildPythonApplication rec {
pname = "gajim";
version = "1.8.0";
version = "1.8.1";
src = fetchurl {
url = "https://gajim.org/downloads/${lib.versions.majorMinor version}/gajim-${version}.tar.gz";
hash = "sha256-EgH8mt0am2l9z4csGHH6rpLqTzFiBRzOPB4NCEP8TUM=";
hash = "sha256-Erh7tR6WX8pt89PRicgbVZd8CLlv18Vyq44O+ZnJVzU=";
};
format = "pyproject";

View File

@ -70,6 +70,12 @@ version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd"
[[package]]
name = "allocator-api2"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
[[package]]
name = "android-activity"
version = "0.4.1"
@ -161,13 +167,10 @@ dependencies = [
]
[[package]]
name = "bincode"
version = "1.3.3"
name = "base64"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
dependencies = [
"serde",
]
checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d"
[[package]]
name = "bit-set"
@ -452,8 +455,9 @@ dependencies = [
[[package]]
name = "cosmic-text"
version = "0.8.0"
source = "git+https://github.com/hecrj/cosmic-text.git?rev=e8b10fd675832cb9c1cc9de30922beb4cf883876#e8b10fd675832cb9c1cc9de30922beb4cf883876"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0b68966c2543609f8d92f9d33ac3b719b2a67529b0c6c0b3e025637b477eef9"
dependencies = [
"aliasable",
"fontdb",
@ -542,7 +546,7 @@ dependencies = [
name = "data"
version = "0.1.0"
dependencies = [
"bincode",
"base64",
"chrono",
"dirs-next",
"flate2",
@ -556,6 +560,7 @@ dependencies = [
"rand_chacha",
"seahash",
"serde",
"serde_json",
"serde_yaml",
"thiserror",
"tokio",
@ -623,70 +628,6 @@ dependencies = [
"winreg",
]
[[package]]
name = "encoding"
version = "0.2.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec"
dependencies = [
"encoding-index-japanese",
"encoding-index-korean",
"encoding-index-simpchinese",
"encoding-index-singlebyte",
"encoding-index-tradchinese",
]
[[package]]
name = "encoding-index-japanese"
version = "1.20141219.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04e8b2ff42e9a05335dbf8b5c6f7567e5591d0d916ccef4e0b1710d32a0d0c91"
dependencies = [
"encoding_index_tests",
]
[[package]]
name = "encoding-index-korean"
version = "1.20141219.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dc33fb8e6bcba213fe2f14275f0963fd16f0a02c878e3095ecfdf5bee529d81"
dependencies = [
"encoding_index_tests",
]
[[package]]
name = "encoding-index-simpchinese"
version = "1.20141219.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d87a7194909b9118fc707194baa434a4e3b0fb6a5a757c73c3adb07aa25031f7"
dependencies = [
"encoding_index_tests",
]
[[package]]
name = "encoding-index-singlebyte"
version = "1.20141219.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3351d5acffb224af9ca265f435b859c7c01537c0849754d3db3fdf2bfe2ae84a"
dependencies = [
"encoding_index_tests",
]
[[package]]
name = "encoding-index-tradchinese"
version = "1.20141219.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd0e20d5688ce3cab59eb3ef3a2083a5c77bf496cb798dc6fcdb75f323890c18"
dependencies = [
"encoding_index_tests",
]
[[package]]
name = "encoding_index_tests"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a246d82be1c9d791c5dfde9a2bd045fc3cbba3fa2b11ad558f27d01712f00569"
[[package]]
name = "errno"
version = "0.3.1"
@ -720,9 +661,9 @@ dependencies = [
[[package]]
name = "etagere"
version = "0.2.7"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6301151a318f367f392c31395beb1cfba5ccd9abc44d1db0db3a4b27b9601c89"
checksum = "fcf22f748754352918e082e0039335ee92454a5d62bcaf69b5e8daf5907d9644"
dependencies = [
"euclid",
"svg_fmt",
@ -939,9 +880,9 @@ dependencies = [
[[package]]
name = "getrandom"
version = "0.2.9"
version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4"
checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427"
dependencies = [
"cfg-if",
"js-sys",
@ -986,8 +927,9 @@ dependencies = [
[[package]]
name = "glyphon"
version = "0.2.0"
source = "git+https://github.com/hecrj/glyphon.git?rev=8dbf36020e5759fa9144517b321372266160113e#8dbf36020e5759fa9144517b321372266160113e"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e87caa7459145f5e5f167bf34db4532901404c679e62339fb712a0e3ccf722a"
dependencies = [
"cosmic-text",
"etagere",
@ -1059,9 +1001,9 @@ dependencies = [
[[package]]
name = "half"
version = "2.3.0"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9906a89f1724975a455316ae0554ceaa45ad83bb336f1125a87bfbdb9197cfa0"
checksum = "bc52e53916c08643f1b56ec082790d1e86a32e58dc5268f897f313fbae7b4872"
dependencies = [
"cfg-if",
"crunchy",
@ -1076,7 +1018,9 @@ dependencies = [
"embed-resource",
"fern",
"iced",
"image",
"log",
"once_cell",
"open",
"palette",
"thiserror",
@ -1096,11 +1040,12 @@ dependencies = [
[[package]]
name = "hashbrown"
version = "0.13.2"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e"
checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
dependencies = [
"ahash 0.8.3",
"allocator-api2",
]
[[package]]
@ -1135,9 +1080,9 @@ dependencies = [
[[package]]
name = "hermit-abi"
version = "0.3.1"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286"
checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b"
[[package]]
name = "hexf-parse"
@ -1171,7 +1116,7 @@ dependencies = [
[[package]]
name = "iced"
version = "0.9.0"
source = "git+https://github.com/tarkah/iced?rev=d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65#d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65"
source = "git+https://github.com/iced-rs/iced?rev=78dc341ea82449f1e075e37e67c1ccf66b88e8d6#78dc341ea82449f1e075e37e67c1ccf66b88e8d6"
dependencies = [
"iced_core",
"iced_futures",
@ -1185,7 +1130,7 @@ dependencies = [
[[package]]
name = "iced_core"
version = "0.9.0"
source = "git+https://github.com/tarkah/iced?rev=d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65#d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65"
source = "git+https://github.com/iced-rs/iced?rev=78dc341ea82449f1e075e37e67c1ccf66b88e8d6#78dc341ea82449f1e075e37e67c1ccf66b88e8d6"
dependencies = [
"bitflags 1.3.2",
"instant",
@ -1198,7 +1143,7 @@ dependencies = [
[[package]]
name = "iced_futures"
version = "0.6.0"
source = "git+https://github.com/tarkah/iced?rev=d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65#d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65"
source = "git+https://github.com/iced-rs/iced?rev=78dc341ea82449f1e075e37e67c1ccf66b88e8d6#78dc341ea82449f1e075e37e67c1ccf66b88e8d6"
dependencies = [
"futures",
"iced_core",
@ -1211,28 +1156,29 @@ dependencies = [
[[package]]
name = "iced_graphics"
version = "0.8.0"
source = "git+https://github.com/tarkah/iced?rev=d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65#d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65"
source = "git+https://github.com/iced-rs/iced?rev=78dc341ea82449f1e075e37e67c1ccf66b88e8d6#78dc341ea82449f1e075e37e67c1ccf66b88e8d6"
dependencies = [
"bitflags 1.3.2",
"bytemuck",
"glam",
"half",
"iced_core",
"image",
"kamadak-exif",
"log",
"raw-window-handle",
"thiserror",
"tiny-skia 0.9.1",
]
[[package]]
name = "iced_renderer"
version = "0.1.0"
source = "git+https://github.com/tarkah/iced?rev=d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65#d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65"
source = "git+https://github.com/iced-rs/iced?rev=78dc341ea82449f1e075e37e67c1ccf66b88e8d6#78dc341ea82449f1e075e37e67c1ccf66b88e8d6"
dependencies = [
"iced_graphics",
"iced_tiny_skia",
"iced_wgpu",
"log",
"raw-window-handle",
"thiserror",
]
@ -1240,7 +1186,7 @@ dependencies = [
[[package]]
name = "iced_runtime"
version = "0.1.0"
source = "git+https://github.com/tarkah/iced?rev=d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65#d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65"
source = "git+https://github.com/iced-rs/iced?rev=78dc341ea82449f1e075e37e67c1ccf66b88e8d6#78dc341ea82449f1e075e37e67c1ccf66b88e8d6"
dependencies = [
"iced_core",
"iced_futures",
@ -1250,7 +1196,7 @@ dependencies = [
[[package]]
name = "iced_style"
version = "0.8.0"
source = "git+https://github.com/tarkah/iced?rev=d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65#d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65"
source = "git+https://github.com/iced-rs/iced?rev=78dc341ea82449f1e075e37e67c1ccf66b88e8d6#78dc341ea82449f1e075e37e67c1ccf66b88e8d6"
dependencies = [
"iced_core",
"once_cell",
@ -1260,7 +1206,7 @@ dependencies = [
[[package]]
name = "iced_tiny_skia"
version = "0.1.0"
source = "git+https://github.com/tarkah/iced?rev=d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65#d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65"
source = "git+https://github.com/iced-rs/iced?rev=78dc341ea82449f1e075e37e67c1ccf66b88e8d6#78dc341ea82449f1e075e37e67c1ccf66b88e8d6"
dependencies = [
"bytemuck",
"cosmic-text",
@ -1270,14 +1216,14 @@ dependencies = [
"raw-window-handle",
"rustc-hash",
"softbuffer",
"tiny-skia 0.9.1",
"tiny-skia 0.10.0",
"twox-hash",
]
[[package]]
name = "iced_wgpu"
version = "0.10.0"
source = "git+https://github.com/tarkah/iced?rev=d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65#d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65"
source = "git+https://github.com/iced-rs/iced?rev=78dc341ea82449f1e075e37e67c1ccf66b88e8d6#78dc341ea82449f1e075e37e67c1ccf66b88e8d6"
dependencies = [
"bitflags 1.3.2",
"bytemuck",
@ -1297,7 +1243,7 @@ dependencies = [
[[package]]
name = "iced_widget"
version = "0.1.0"
source = "git+https://github.com/tarkah/iced?rev=d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65#d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65"
source = "git+https://github.com/iced-rs/iced?rev=78dc341ea82449f1e075e37e67c1ccf66b88e8d6#78dc341ea82449f1e075e37e67c1ccf66b88e8d6"
dependencies = [
"iced_renderer",
"iced_runtime",
@ -1311,7 +1257,7 @@ dependencies = [
[[package]]
name = "iced_winit"
version = "0.9.1"
source = "git+https://github.com/tarkah/iced?rev=d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65#d73dba1f1c2dda7f58c9983e7ee8fbac6f563a65"
source = "git+https://github.com/iced-rs/iced?rev=78dc341ea82449f1e075e37e67c1ccf66b88e8d6#78dc341ea82449f1e075e37e67c1ccf66b88e8d6"
dependencies = [
"iced_graphics",
"iced_runtime",
@ -1372,44 +1318,31 @@ version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2"
dependencies = [
"hermit-abi 0.3.1",
"hermit-abi 0.3.2",
"libc",
"windows-sys 0.48.0",
]
[[package]]
name = "irc"
version = "0.15.0"
source = "git+https://github.com/aatxe/irc.git?rev=8eef9c56881670aa614782ab4321fb8ae3975fa0#8eef9c56881670aa614782ab4321fb8ae3975fa0"
version = "0.1.0"
dependencies = [
"chrono",
"encoding",
"futures-util",
"irc-proto",
"log",
"native-tls",
"parking_lot 0.12.1",
"pin-project",
"serde",
"serde_derive",
"bytes",
"futures",
"irc_proto",
"thiserror",
"tokio",
"tokio-native-tls",
"tokio-stream",
"tokio-util",
"toml",
]
[[package]]
name = "irc-proto"
version = "0.15.0"
source = "git+https://github.com/aatxe/irc.git?rev=8eef9c56881670aa614782ab4321fb8ae3975fa0#8eef9c56881670aa614782ab4321fb8ae3975fa0"
name = "irc_proto"
version = "0.1.0"
dependencies = [
"bytes",
"encoding",
"itertools",
"nom",
"thiserror",
"tokio",
"tokio-util",
]
[[package]]
@ -1440,6 +1373,12 @@ dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
[[package]]
name = "jni-sys"
version = "0.3.0"
@ -1516,9 +1455,9 @@ checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8"
[[package]]
name = "libc"
version = "0.2.144"
version = "0.2.147"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1"
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
[[package]]
name = "libloading"
@ -1576,11 +1515,11 @@ checksum = "518ef76f2f87365916b142844c16d8fefd85039bc5699050210a7778ee1cd1de"
[[package]]
name = "lru"
version = "0.9.0"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71e7d46de488603ffdd5f30afbc64fbba2378214a2c3a2fb83abf3d33126df17"
checksum = "eedb2bdbad7e0634f83989bf596f497b070130daaa398ab22d84c39e266deec5"
dependencies = [
"hashbrown 0.13.2",
"hashbrown 0.14.0",
]
[[package]]
@ -1991,9 +1930,9 @@ dependencies = [
[[package]]
name = "openssl"
version = "0.10.54"
version = "0.10.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69b3f656a17a6cbc115b5c7a40c616947d213ba182135b014d6051b73ab6f019"
checksum = "345df152bc43501c5eb9e4654ff05f794effb78d4efe3d53abc158baddc0703d"
dependencies = [
"bitflags 1.3.2",
"cfg-if",
@ -2023,9 +1962,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-sys"
version = "0.9.88"
version = "0.9.90"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2ce0f250f34a308dcfdbb351f511359857d4ed2134ba715a4eadd46e1ffd617"
checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6"
dependencies = [
"cc",
"libc",
@ -2602,6 +2541,17 @@ dependencies = [
"syn 2.0.18",
]
[[package]]
name = "serde_json"
version = "1.0.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46266871c240a00b8f503b877622fe33430b3c7d963bdc0f2adc511e54a1eae3"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "serde_spanned"
version = "0.6.2"
@ -2900,9 +2850,9 @@ dependencies = [
[[package]]
name = "tiny-skia"
version = "0.9.1"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce2986c82f77818c7b9144c70818fdde98db15308e329ae2f7204d767808fd3c"
checksum = "7db11798945fa5c3e5490c794ccca7c6de86d3afdd54b4eb324109939c6f37bc"
dependencies = [
"arrayref",
"arrayvec",
@ -2910,7 +2860,7 @@ dependencies = [
"cfg-if",
"log",
"png",
"tiny-skia-path 0.9.0",
"tiny-skia-path 0.10.0",
]
[[package]]
@ -2926,9 +2876,9 @@ dependencies = [
[[package]]
name = "tiny-skia-path"
version = "0.9.0"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7acb0ccda1ac91084353a56d0b69b0e29c311fd809d2088b1ed2f9ae1841c47"
checksum = "2f60aa35c89ac2687ace1a2556eaaea68e8c0d47408a2e3e7f5c98a489e7281c"
dependencies = [
"arrayref",
"bytemuck",
@ -2952,21 +2902,35 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tokio"
version = "1.28.2"
version = "1.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94d7b1cfd2aa4011f2de74c2c4c63665e27a71006b0a192dcd2710272e73dfa2"
checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da"
dependencies = [
"autocfg",
"backtrace",
"bytes",
"libc",
"mio",
"num_cpus",
"parking_lot 0.12.1",
"pin-project-lite",
"signal-hook-registry",
"socket2",
"tokio-macros",
"windows-sys 0.48.0",
]
[[package]]
name = "tokio-macros"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.18",
]
[[package]]
name = "tokio-native-tls"
version = "0.3.1"

View File

@ -2,6 +2,8 @@
, stdenv
, darwin
, fetchFromGitHub
, copyDesktopItems
, makeDesktopItem
, libxkbcommon
, openssl
, pkg-config
@ -13,27 +15,25 @@
rustPlatform.buildRustPackage rec {
pname = "halloy";
version = "23.1-alpha1";
version = "2023.4";
src = fetchFromGitHub {
owner = "squidowl";
repo = "halloy";
rev = "refs/tags/${version}";
hash = "sha256-Aq+mKctmc1RwpnUEIi+Zmr4o8n6wgQchGCunPWouLsE=";
hash = "sha256-j5Yw7rXdNd32RnbV2jQ+ZUjbm14AKZ7khQNX6A+qPAM=";
};
cargoLock = {
lockFile = ./Cargo.lock;
outputHashes = {
"cosmic-text-0.8.0" = "sha256-p8PtXcFH+T3z6wWPFYbHFkxrkJpK4oHJ1aJvq4zld/4=";
"glyphon-0.2.0" = "sha256-7h5W82zPMw9PVZiF5HCo7HyRiVhGR8MsfgGuIjo+Kfg=";
"iced-0.9.0" = "sha256-KEBm62lDjSKXvXZssLoBfUYDSW+OpTXutxsKZMz8SE0=";
"irc-0.15.0" = "sha256-ZlwfyX4tmQr9D+blY4jWl85bwJ2tXUYp3ryLqoungII=";
"iced-0.9.0" = "sha256-z/tkUdFXNjxR5Si8dnNrkrvFos0VAqGjnFNSs88D/5w=";
"winit-0.28.6" = "sha256-szB1LCOPmPqhZNIWbeO8JMfRMcMRr0+Ze0f4uqyR8AE=";
};
};
nativeBuildInputs = [
copyDesktopItems
pkg-config
];
@ -57,6 +57,25 @@ rustPlatform.buildRustPackage rec {
wayland
];
desktopItems = [
(makeDesktopItem {
name = "org.squidowl.halloy";
desktopName = "Halloy";
comment = "IRC client written in Rust";
icon = "org.squidowl.halloy";
exec = pname;
terminal = false;
mimeTypes = [ "x-scheme-handler/irc" "x-scheme-handler/ircs" ];
categories = [ "Network" "IRCClient" ];
keywords = [ "IM" "Chat" ];
startupWMClass = "org.squidowl.halloy";
})
];
postInstall = ''
install -Dm644 assets/linux/org.squidowl.halloy.png $out/share/icons/hicolor/128x128/apps/org.squidowl.halloy.png
'';
meta = with lib; {
description = "IRC application";
homepage = "https://github.com/squidowl/halloy";

View File

@ -10,16 +10,16 @@
buildGoModule rec {
pname = "netmaker";
version = "0.20.5";
version = "0.20.6";
src = fetchFromGitHub {
owner = "gravitl";
repo = pname;
rev = "v${version}";
hash = "sha256-bnYIyYnJhrdI8zfeOBdab8yZuK2rxTO5YO6EKlaRlHo=";
hash = "sha256-2NrqplVduDsaLGla1rzLGhX1YgZL6NBFFDVQRen7Pfk=";
};
vendorHash = "sha256-Nz1vE3SelUdgJoGQLOBXtFwAtM1VTDL9oKDQqxVi8Vg=";
vendorHash = "sha256-TrVtUv1xlz3Wbw4RY4NAzWmPE8JVk+GqPveqvfTe8e4=";
inherit subPackages;
@ -39,6 +39,6 @@ buildGoModule rec {
homepage = "https://netmaker.io";
changelog = "https://github.com/gravitl/netmaker/-/releases/v${version}";
license = licenses.sspl;
maintainers = with maintainers; [ urandom ];
maintainers = with maintainers; [ urandom qjoly ];
};
}

View File

@ -12,7 +12,7 @@ let
python = python3;
pythonDeps = with python.pkgs; [
flask flask_assets flask-login flask-sqlalchemy flask_migrate flask-seasurf flask_mail flask-session flask-session-captcha flask-sslify
flask flask-assets flask-login flask-sqlalchemy flask-migrate flask-seasurf flask-mail flask-session flask-session-captcha flask-sslify
mysqlclient psycopg2 sqlalchemy
certifi cffi configobj cryptography bcrypt requests python-ldap pyotp qrcode dnspython
gunicorn itsdangerous python3-saml pytz rcssmin rjsmin authlib bravado-core

View File

@ -13,11 +13,11 @@
stdenv.mkDerivation rec {
pname = "appflowy";
version = "0.2.6";
version = "0.3.0";
src = fetchzip {
url = "https://github.com/AppFlowy-IO/appflowy/releases/download/${version}/AppFlowy_x86_64-unknown-linux-gnu_ubuntu-20.04.tar.gz";
sha256 = "sha256-e7nzJ81rMehpxwsbOlwnMh1jzCsGwc+kAo/6+AcCiLE=";
sha256 = "sha256-05RQtvf6I4/sjGtMDfc5U4esxfFFeTwIuxFAkbr6p4A";
stripRoot = false;
};

View File

@ -0,0 +1,55 @@
{ lib
, stdenv
, fetchFromGitHub
, zlib
}:
stdenv.mkDerivation (finalAttrs: {
pname = "bwa-mem2";
version = "unstable-2023-03-18";
src = fetchFromGitHub {
owner = "bwa-mem2";
repo = "bwa-mem2";
rev = "cf4306a47dac35e7e79a9e75398a35f33900cfd0";
fetchSubmodules = true;
hash = "sha256-1AYSn7nBrDwbX7oSrdEoa1d3t6xzwKnA0S87Y/XeXJg=";
};
buildInputs = [ zlib ];
# see https://github.com/bwa-mem2/bwa-mem2/issues/93
postPatch = lib.optionalString stdenv.isDarwin ''
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/include/safe_mem_lib.h
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/safeclib/memset16_s.c
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/safeclib/memset32_s.c
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/safeclib/memset_s.c
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/safeclib/wmemset_s.c
'';
buildFlags = [
(if stdenv.hostPlatform.sse4_2Support then "arch=sse42"
else if stdenv.hostPlatform.avxSupport then "arch=avx"
else if stdenv.hostPlatform.avx2Support then "arch=avx2"
else if stdenv.hostPlatform.avx512Support then "arch=avx512"
else "arch=sse41")
];
enableParallelBuilding = true;
installPhase = ''
runHook preInstall
mkdir -p $out/bin
cp bwa-mem2* $out/bin/
runHook postInstall
'';
meta = with lib; {
description = "Next version of the bwa-mem algorithm in bwa, a software package for mapping low-divergent sequences against a large reference genome";
license = licenses.mit;
homepage = "https://github.com/bwa-mem2/bwa-mem2/";
changelog = "https://github.com/bwa-mem2/bwa-mem2/blob/${finalAttrs.src.rev}/NEWS.md";
platforms = platforms.x86_64;
maintainers = with maintainers; [ alxsimon ];
};
})

View File

@ -13,13 +13,13 @@
stdenv.mkDerivation rec {
pname = "gerbv";
version = "2.9.8";
version = "2.10.0";
src = fetchFromGitHub {
owner = "gerbv";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-6r9C5qDOXsQoLsYMSCuIS01gI0ijH7WDoepcowo1yQw=";
hash = "sha256-sr48RGLYcMKuyH9p+5BhnR6QpKBvNOqqtRryw3+pbBk=";
};
postPatch = ''

View File

@ -8,13 +8,13 @@
stdenv.mkDerivation rec {
pname = "cryptominisat";
version = "5.11.11";
version = "5.11.12";
src = fetchFromGitHub {
owner = "msoos";
repo = "cryptominisat";
rev = version;
hash = "sha256-TYuOgOOs1EsdNz7ctZMsArTlw3QzHjiPZVozuniiPcI=";
hash = "sha256-1AJx8gPf+qDpAp0p4cfCObKZDWKDAKdGopllr2ajpHw=";
};
buildInputs = [ python3 boost ];

View File

@ -30,7 +30,7 @@ let
src = fetchurl {
urls = [
"https://download.geogebra.org/installers/6.0/GeoGebra-Linux64-Portable-${version}.zip"
"https://web.archive.org/web/20230627211859/https://download.geogebra.org/installers/6.0/GeoGebra-Linux64-Portable-${version}.zip"
"https://web.archive.org/web/20230824011801/https://download.geogebra.org/installers/6.0/GeoGebra-Linux64-Portable-${version}.zip"
];
hash = "sha256-sNCq1Xcx/Y5r+SIRiqQYcG9dVsfIC2Ef5KJf+tgfxC8=";
};
@ -65,9 +65,9 @@ let
src = fetchurl {
urls = [
"https://download.geogebra.org/installers/6.0/GeoGebra-Classic-6-MacOS-Portable-${version}.zip"
"https://web.archive.org/web/20230627214413/https://download.geogebra.org/installers/6.0/GeoGebra-Classic-6-MacOS-Portable-${version}.zip"
"https://web.archive.org/web/20230824012900/https://download.geogebra.org/installers/6.0/GeoGebra-Classic-6-MacOS-Portable-${version}.zip"
];
hash = "sha256-HtIhhq8E1Q5B6xZ7q6Ok95Rt53VWLoGf8PbY+UEOSKg=";
hash = "sha256-CrSoKAjXiejfJHyv8wIpcRr2d8u/50HnatiDm1CdnGQ=";
};
dontUnpack = true;

View File

@ -8,16 +8,16 @@
rustPlatform.buildRustPackage rec {
pname = "gql";
version = "0.4.1";
version = "0.5.0";
src = fetchFromGitHub {
owner = "AmrDeveloper";
repo = "GQL";
rev = version;
hash = "sha256-d6uncWHq9bLDODFle7xij9YjhpiQPL7mmyFmVxmy8hY=";
hash = "sha256-UTyP9ugUXiPMzkeIvPJUtORvcJ93YOBltglmlcym3sI=";
};
cargoHash = "sha256-jR79xchMpib76oVnpy+UIbcwhDXvDPyl+jWmVPfXVog=";
cargoHash = "sha256-AIt7Ns3vNrHQxJU7cSNr+h3tFGZ9hL1OMBqPHS61YUQ=";
nativeBuildInputs = [
pkg-config

View File

@ -8,13 +8,13 @@
stdenv.mkDerivation rec {
pname = "jftui";
version = "0.6.2";
version = "0.7.1";
src = fetchFromGitHub {
owner = "Aanok";
repo = pname;
rev = "v${version}";
sha256 = "sha256-4j0ypzszNWjHbb4RkMIoqvgz624zoKCKiIpidQUPIF4=";
sha256 = "sha256-dkMJn5ZsM1pfHvSsj0kHsP17PqbR+yrRYX62RxoxwwM=";
};
nativeBuildInputs = [

View File

@ -26,14 +26,14 @@
stdenv.mkDerivation (finalAttrs: {
pname = "qmplay2";
version = "23.06.17";
version = "23.08.22";
src = fetchFromGitHub {
owner = "zaps166";
repo = "QMPlay2";
rev = finalAttrs.version;
fetchSubmodules = true;
hash = "sha256-f4lIXB0eTyteCJdWFP0XnsnxGWc32CV+HlqpaCjmgOE=";
hash = "sha256-Ug7WAqZ+BxspQUXweL/OnVBGCsU60DOWNexbi0GpDo0=";
};
nativeBuildInputs = [
@ -79,7 +79,7 @@ stdenv.mkDerivation (finalAttrs: {
'';
changelog = "https://github.com/zaps166/QMPlay2/releases/tag/${finalAttrs.version}";
license = lib.licenses.lgpl3Plus;
maintainers = with lib.maintainers; [ AndersonTorres ];
maintainers = with lib.maintainers; [ AndersonTorres kashw2 ];
platforms = lib.platforms.linux;
};
})

View File

@ -35,8 +35,6 @@
++ ["${stdenv.hostPlatform.qemuArch}-softmmu"])
else null)
, nixosTestRunner ? false
, doCheck ? false
, qemu # for passthru.tests
, gitUpdater
}:
@ -44,7 +42,7 @@ let
hexagonSupport = hostCpuTargets == null || lib.elem "hexagon" hostCpuTargets;
in
stdenv.mkDerivation rec {
stdenv.mkDerivation (finalAttrs: {
pname = "qemu"
+ lib.optionalString xenSupport "-xen"
+ lib.optionalString hostCpuOnly "-host-cpu-only"
@ -52,7 +50,7 @@ stdenv.mkDerivation rec {
version = "8.0.4";
src = fetchurl {
url = "https://download.qemu.org/qemu-${version}.tar.xz";
url = "https://download.qemu.org/qemu-${finalAttrs.version}.tar.xz";
hash = "sha256-gcgX3aOK+Vi+W+8abPVbZYuy0/uHwealcd5reyxEUWw=";
};
@ -207,7 +205,7 @@ stdenv.mkDerivation rec {
preBuild = "cd build";
# tests can still timeout on slower systems
inherit doCheck;
doCheck = false;
nativeCheckInputs = [ socat ];
preCheck = ''
# time limits are a little meagre for a build machine that's
@ -248,7 +246,7 @@ stdenv.mkDerivation rec {
passthru = {
qemu-system-i386 = "bin/qemu-system-i386";
tests = {
qemu-tests = qemu.override { doCheck = true; };
qemu-tests = finalAttrs.finalPackage.overrideAttrs (_: { doCheck = true; });
};
updateScript = gitUpdater {
# No nicer place to find latest release.
@ -269,4 +267,4 @@ stdenv.mkDerivation rec {
maintainers = with maintainers; [ eelco qyliss ];
platforms = platforms.unix;
};
}
})

View File

@ -1,24 +1,24 @@
{ stdenv, installShellFiles, qemu, removeReferencesTo }:
{ stdenv, installShellFiles, qemu_kvm, removeReferencesTo }:
stdenv.mkDerivation rec {
pname = "qemu-utils";
inherit (qemu) version;
inherit (qemu_kvm) version;
nativeBuildInputs = [ installShellFiles ];
buildInputs = [ qemu ];
disallowedRequisites = [ qemu ];
buildInputs = [ qemu_kvm ];
disallowedRequisites = [ qemu_kvm ];
unpackPhase = "true";
installPhase = ''
mkdir -p "$out/bin"
cp "${qemu}/bin/qemu-img" "$out/bin/qemu-img"
cp "${qemu}/bin/qemu-io" "$out/bin/qemu-io"
cp "${qemu}/bin/qemu-nbd" "$out/bin/qemu-nbd"
${removeReferencesTo}/bin/remove-references-to -t ${qemu} $out/bin/*
cp "${qemu_kvm}/bin/qemu-img" "$out/bin/qemu-img"
cp "${qemu_kvm}/bin/qemu-io" "$out/bin/qemu-io"
cp "${qemu_kvm}/bin/qemu-nbd" "$out/bin/qemu-nbd"
${removeReferencesTo}/bin/remove-references-to -t ${qemu_kvm} $out/bin/*
installManPage ${qemu}/share/man/man1/qemu-img.1.gz
installManPage ${qemu}/share/man/man8/qemu-nbd.8.gz
installManPage ${qemu_kvm}/share/man/man1/qemu-img.1.gz
installManPage ${qemu_kvm}/share/man/man8/qemu-nbd.8.gz
'';
inherit (qemu) meta;
inherit (qemu_kvm) meta;
}

View File

@ -73,6 +73,7 @@ grimshot = stdenv.mkDerivation rec {
meta = with lib; {
description = "A helper for screenshots within sway";
maintainers = with maintainers; [ evils ];
mainProgram = "grimshot";
};
};

View File

@ -15,16 +15,16 @@ rec {
(when (eq nixpkgs--buffer-count 0)
(make-variable-buffer-local 'nixpkgs--is-nixpkgs-buffer)
; When generating a new temporary buffer (one whose name starts with a space), do inherit-local inheritance and make it a nixpkgs buffer
(defun nixpkgs--around-generate (orig name)
(defun nixpkgs--around-generate (orig name &optional ibh)
(if (and nixpkgs--is-nixpkgs-buffer (eq (aref name 0) ?\s))
(let ((buf (funcall orig name)))
(let ((buf (funcall orig name ibh)))
(progn
(inherit-local-inherit-child buf)
(with-current-buffer buf
(setq nixpkgs--buffer-count (1+ nixpkgs--buffer-count))
(add-hook 'kill-buffer-hook 'nixpkgs--decrement-buffer-count nil t)))
buf)
(funcall orig name)))
(funcall orig name ibh)))
(advice-add 'generate-new-buffer :around #'nixpkgs--around-generate)
; When we have no more nixpkgs buffers, tear down the buffer handling
(defun nixpkgs--decrement-buffer-count ()
@ -45,7 +45,7 @@ rec {
(setenv "PATH" (concat "${lib.makeSearchPath "bin" pkgs}:" (getenv "PATH")))
(inherit-local-permanent exec-path (append '(${builtins.concatStringsSep " " (map (p: "\"${p}/bin\"") pkgs)}) exec-path))
(inherit-local-permanent eshell-path-env (concat "${lib.makeSearchPath "bin" pkgs}:" eshell-path-env))
(inherit-local-permanent eshell-path-env (concat "${lib.makeSearchPath "bin" pkgs}:" (if (boundp 'eshell-path-env) eshell-path-env (getenv "PATH"))))
(setq nixpkgs--is-nixpkgs-buffer t)
(inherit-local 'nixpkgs--is-nixpkgs-buffer)

View File

@ -494,7 +494,7 @@ rec {
fi
diskImage="$1"
if ! test -e "$diskImage"; then
${qemu}/bin/qemu-img create -b ${image}/disk-image.qcow2 -f qcow2 "$diskImage"
${qemu}/bin/qemu-img create -b ${image}/disk-image.qcow2 -f qcow2 -F qcow2 "$diskImage"
fi
export TMPDIR=$(mktemp -d)
export out=/dummy

View File

@ -244,6 +244,11 @@ let
++ lib.optionals (langD) [
"--with-target-system-zlib=yes"
]
# On mips64-unknown-linux-gnu libsanitizer defines collide with
# glibc's definitions and fail the build. It was fixed in gcc-13+.
++ lib.optionals (targetPlatform.isMips && targetPlatform.parsed.abi.name == "gnu" && lib.versions.major version == "12") [
"--disable-libsanitizer"
]
;
in configureFlags

View File

@ -18,6 +18,8 @@ stdenv.mkDerivation rec {
makeFlags = [ "prefix=$(out)" ];
installFlags = [ "install-shared" ];
passthru.updateScript = gitUpdater {
# No nicer place to track releases
url = "git://git.ghostscript.com/mujs.git";

View File

@ -6,13 +6,13 @@
stdenv.mkDerivation (finalAttrs: {
pname = "wamr";
version = "1.2.2";
version = "1.2.3";
src = fetchFromGitHub {
owner = "bytecodealliance";
repo = "wasm-micro-runtime";
rev = "WAMR-${finalAttrs.version}";
hash = "sha256-jpT42up9HAVJpo03cFrffQQk2JiHEAEepBGlU4RUfNU=";
hash = "sha256-bnia0ORC0YajO7I3XDMdpjlktDqOiXDlGcf12N1G+eg=";
};
nativeBuildInputs = [ cmake ];
@ -23,6 +23,7 @@ stdenv.mkDerivation (finalAttrs: {
description = "WebAssembly Micro Runtime";
homepage = "https://github.com/bytecodealliance/wasm-micro-runtime";
license = licenses.asl20;
mainProgram = "iwasm";
maintainers = with maintainers; [ ereslibre ];
# TODO (ereslibre): this derivation should be improved to support
# more platforms.

View File

@ -0,0 +1,42 @@
{ lib
, stdenv
, fetchFromGitHub
, cmake
, gtest
, static ? stdenv.hostPlatform.isStatic
, cxxStandard ? null
}:
stdenv.mkDerivation (finalAttrs: {
pname = "abseil-cpp";
version = "20230802.0";
src = fetchFromGitHub {
owner = "abseil";
repo = "abseil-cpp";
rev = "refs/tags/${finalAttrs.version}";
hash = "sha256-yILAsAERUDMbRWh8t4o6W74YiswvGIHSyBAIuLVbzxY=";
};
cmakeFlags = [
"-DABSL_BUILD_TEST_HELPERS=ON"
"-DABSL_USE_EXTERNAL_GOOGLETEST=ON"
"-DBUILD_SHARED_LIBS=${if static then "OFF" else "ON"}"
] ++ lib.optionals (cxxStandard != null) [
"-DCMAKE_CXX_STANDARD=${cxxStandard}"
];
strictDeps = true;
nativeBuildInputs = [ cmake ];
buildInputs = [ gtest ];
meta = with lib; {
description = "An open-source collection of C++ code designed to augment the C++ standard library";
homepage = "https://abseil.io/";
license = licenses.asl20;
platforms = platforms.all;
maintainers = [ maintainers.andersk ];
};
})

View File

@ -25,13 +25,13 @@ let
in stdenv.mkDerivation rec {
pname = "amdvlk";
version = "2023.Q2.3";
version = "2023.Q3.1";
src = fetchRepoProject {
name = "${pname}-src";
manifest = "https://github.com/GPUOpen-Drivers/AMDVLK.git";
rev = "refs/tags/v-${version}";
sha256 = "FoDXg0ocKBtwY22UJJCnyLn99CpmDE9HOMw7iiZkf6k=";
sha256 = "W+igZbdQG1L62oGJa2Rz0n8YkTsZFqSm7w8VFfPu8k0=";
};
buildInputs = [

View File

@ -3,30 +3,21 @@
, boost
, cmake
, fetchFromGitHub
, fetchpatch
, eigen
, zlib
}:
stdenv.mkDerivation (finalAttrs: {
pname = "libcifpp";
version = "5.1.0.1";
version = "5.1.2";
src = fetchFromGitHub {
owner = "PDB-REDO";
repo = "libcifpp";
rev = "refs/tags/v${finalAttrs.version}";
hash = "sha256-fAH7FIgJuitPUoacLnLs8uf9di5iM0c/2WHZqVjJOUE=";
hash = "sha256-wx5D0kNKetgc/8LFAgNxTAwni+lJb2rajsxh0AASpeY=";
};
patches = [
(fetchpatch {
name = "add-include-compare.patch";
url = "https://github.com/PDB-REDO/libcifpp/commit/676c0c8dc87437e2096718fd8c0750b995e174ba.patch";
hash = "sha256-fbA4fgiTY93+hFct+BQuHF7uv2nG7D9PljQxq1CkReU=";
})
];
nativeBuildInputs = [
cmake
];

View File

@ -7,14 +7,14 @@
}:
stdenv.mkDerivation (finalAttrs: {
name = "librecast";
version = "0.7-RC3";
version = "0.7.0";
src = fetchFromGitea {
domain = "codeberg.org";
owner = "librecast";
repo = "librecast";
rev = "v${finalAttrs.version}";
hash = "sha256-AD3MpWg8Lp+VkizwYTuuS2YWM8e0xaMEavVIvwhSZRo=";
hash = "sha256-NlwYJJn1yewx92y6UKJcj6R2MnPn+XuEiKOmsR2oE3g=";
};
buildInputs = [ lcrq libsodium ];
installFlags = [ "PREFIX=$(out)" ];

View File

@ -22,13 +22,13 @@
stdenv.mkDerivation rec {
pname = "pdal";
version = "2.5.5";
version = "2.5.6";
src = fetchFromGitHub {
owner = "PDAL";
repo = "PDAL";
rev = version;
sha256 = "sha256-AhekpvWAdbDAYAr38VXPBDGE40xvP0BnEAI2ZKF3ctY=";
sha256 = "sha256-JKwa89c05EfZ/FxOkj8lYmw0o2EgSqafRDIV2mTpZ5E=";
};
nativeBuildInputs = [

View File

@ -6,7 +6,7 @@
stdenv.mkDerivation rec {
pname = "libupnp";
version = "1.14.17";
version = "1.14.18";
outputs = [ "out" "dev" ];
@ -14,7 +14,7 @@ stdenv.mkDerivation rec {
owner = "pupnp";
repo = "pupnp";
rev = "release-${version}";
sha256 = "sha256-vb540oqDn6Y+oD0LriOJckYYkI/zcHkEVc8mL/+9bps=";
sha256 = "sha256-eQKtZioZjI53J1fsoer032pzqebbK5IabOnkAXwBPos=";
};
nativeBuildInputs = [

View File

@ -5,13 +5,13 @@
stdenv.mkDerivation rec {
pname = "tbox";
version = "1.7.3";
version = "1.7.4";
src = fetchFromGitHub {
owner = "tboox";
repo = pname;
rev = "v${version}";
hash = "sha256-6SqMvwxKSiJO7Z33xx7cJoECu5AJ1gWF8ZsiERWx8DU=";
hash = "sha256-b461JNTS7jNI/qawumDjL2vfC4fAaWB7a++9PpUUDB0=";
};
configureFlags = [

View File

@ -44,6 +44,7 @@ mapAliases {
"@githubnext/github-copilot-cli" = pkgs.github-copilot-cli; # Added 2023-05-02
"@google/clasp" = pkgs.google-clasp; # Added 2023-05-07
"@maizzle/cli" = pkgs.maizzle; # added 2023-08-17
"@medable/mdctl-cli" = throw "@medable/mdctl-cli was removed because it was broken"; # added 2023-08-21
"@nestjs/cli" = pkgs.nest-cli; # Added 2023-05-06
antennas = pkgs.antennas; # added 2023-07-30
balanceofsatoshis = pkgs.balanceofsatoshis; # added 2023-07-31
@ -54,13 +55,16 @@ mapAliases {
inherit (pkgs) carto; # added 2023-08-17
castnow = pkgs.castnow; # added 2023-07-30
inherit (pkgs) clean-css-cli; # added 2023-08-18
coc-imselect = throw "coc-imselect was removed because it was broken"; # added 2023-08-21
inherit (pkgs) configurable-http-proxy; # added 2023-08-19
inherit (pkgs) cordova; # added 2023-08-18
dat = throw "dat was removed because it was broken"; # added 2023-08-21
eask = pkgs.eask; # added 2023-08-17
inherit (pkgs.elmPackages) elm-test;
eslint_d = pkgs.eslint_d; # Added 2023-05-26
inherit (pkgs) firebase-tools; # added 2023-08-18
flood = pkgs.flood; # Added 2023-07-25
git-ssb = throw "git-ssb was removed because it was broken"; # added 2023-08-21
inherit (pkgs) graphqurl; # added 2023-08-19
gtop = pkgs.gtop; # added 2023-07-31
inherit (pkgs) htmlhint; # added 2023-08-19
@ -74,9 +78,13 @@ mapAliases {
manta = pkgs.node-manta; # Added 2023-05-06
markdownlint-cli = pkgs.markdownlint-cli; # added 2023-07-29
inherit (pkgs) markdownlint-cli2; # added 2023-08-22
mdctl-cli = self."@medable/mdctl-cli"; # added 2023-08-21
node-inspector = throw "node-inspector was removed because it was broken"; # added 2023-08-21
readability-cli = pkgs.readability-cli; # Added 2023-06-12
reveal-md = pkgs.reveal-md; # added 2023-07-31
s3http = throw "s3http was removed because it was abandoned upstream"; # added 2023-08-18
ssb-server = throw "ssb-server was removed because it was broken"; # added 2023-08-21
stf = throw "stf was removed because it was broken"; # added 2023-08-21
thelounge = pkgs.thelounge; # Added 2023-05-22
triton = pkgs.triton; # Added 2023-05-06
typescript = pkgs.typescript; # Added 2023-06-21

View File

@ -17,7 +17,6 @@
"@commitlint/cli" = "commitlint";
"@forge/cli" = "forge";
"@gitbeaker/cli" = "gitbeaker";
"@medable/mdctl-cli" = "mdctl";
"@mermaid-js/mermaid-cli" = "mmdc";
"@nerdwallet/shepherd" = "shepherd";
"@prisma/language-server" = "prisma-language-server";

View File

@ -6,7 +6,6 @@
, "@commitlint/cli"
, "@commitlint/config-conventional"
, "@forge/cli"
, "@medable/mdctl-cli"
, "@mermaid-js/mermaid-cli"
, "@microsoft/rush"
, "@nerdwallet/shepherd"
@ -57,7 +56,6 @@
, "coc-haxe"
, "coc-highlight"
, "coc-html"
, "coc-imselect"
, "coc-java"
, "coc-jest"
, "coc-json"
@ -104,7 +102,6 @@
, "create-react-native-app"
, "cspell"
, "csslint"
, "dat"
, "degit"
, "dhcp"
, "diagnostic-languageserver"
@ -134,7 +131,6 @@
, "generator-code"
, "get-graphql-schema"
, "git-run"
, "git-ssb"
, "git-standup"
, "@gitbeaker/cli"
, "gitmoji-cli"
@ -202,7 +198,6 @@
, "nijs"
, "node-gyp"
, "node-gyp-build"
, "node-inspector"
, "node-pre-gyp"
, "node-red"
, "node2nix"
@ -262,9 +257,7 @@
, "socket.io"
, "speed-test"
, "sql-formatter"
, "ssb-server"
, "stackdriver-statsd-backend"
, "stf"
, "stylelint"
, "surge"
, "svelte-check"

File diff suppressed because it is too large Load Diff

View File

@ -41,25 +41,6 @@ final: prev: {
];
};
"@medable/mdctl-cli" = prev."@medable/mdctl-cli".override (oldAttrs: {
nativeBuildInputs = with pkgs; with darwin.apple_sdk.frameworks; [
glib
libsecret
pkg-config
] ++ lib.optionals stdenv.isDarwin [
AppKit
Security
];
buildInputs = [
final.node-gyp-build
final.node-pre-gyp
nodejs
];
meta = oldAttrs.meta // { broken = since "16"; };
});
mdctl-cli = final."@medable/mdctl-cli";
autoprefixer = prev.autoprefixer.override {
nativeBuildInputs = [ pkgs.buildPackages.makeWrapper ];
postInstall = ''
@ -92,16 +73,6 @@ final: prev: {
'';
};
coc-imselect = prev.coc-imselect.override (oldAttrs: {
meta = oldAttrs.meta // { broken = since "10"; };
});
dat = prev.dat.override (oldAttrs: {
buildInputs = [ final.node-gyp-build pkgs.libtool pkgs.autoconf pkgs.automake ];
meta = oldAttrs.meta // { broken = since "12"; };
});
expo-cli = prev."expo-cli".override (oldAttrs: {
# The traveling-fastlane-darwin optional dependency aborts build on Linux.
dependencies = builtins.filter (d: d.packageName != "@expo/traveling-fastlane-${if stdenv.isLinux then "darwin" else "linux"}") oldAttrs.dependencies;
@ -126,11 +97,6 @@ final: prev: {
};
git-ssb = prev.git-ssb.override (oldAttrs: {
buildInputs = [ final.node-gyp-build ];
meta = oldAttrs.meta // { broken = since "10"; };
});
graphite-cli = prev."@withgraphite/graphite-cli".override {
name = "graphite-cli";
nativeBuildInputs = with pkgs; [ installShellFiles pkg-config ];
@ -260,11 +226,6 @@ final: prev: {
'';
};
node-inspector = prev.node-inspector.override (oldAttrs: {
buildInputs = [ final.node-pre-gyp ];
meta = oldAttrs.meta // { broken = since "10"; };
});
node-red = prev.node-red.override {
buildInputs = [ final.node-pre-gyp ];
};
@ -384,15 +345,6 @@ final: prev: {
name = "rush";
};
ssb-server = prev.ssb-server.override (oldAttrs: {
buildInputs = [ pkgs.automake pkgs.autoconf final.node-gyp-build ];
meta = oldAttrs.meta // { broken = since "10"; };
});
stf = prev.stf.override (oldAttrs: {
meta = oldAttrs.meta // { broken = since "10"; };
});
tailwindcss = prev.tailwindcss.override {
plugins = [ ];
nativeBuildInputs = [ pkgs.buildPackages.makeWrapper ];

View File

@ -0,0 +1,31 @@
{ lib
, fetchPypi
, buildPythonPackage
}:
buildPythonPackage rec {
pname = "about-time";
version = "4.2.1";
# PyPi release does not contain test files, but the repo has no release tags,
# so while having no tests is not ideal, follow the PyPi releases for now
# TODO: switch to fetchFromGitHub once this issue is fixed:
# https://github.com/rsalmei/about-time/issues/15
src = fetchPypi {
inherit pname version;
hash = "sha256-alOIYtM85n2ZdCnRSZgxDh2/2my32bv795nEcJhH/s4=";
};
doCheck = false;
pythonImportsCheck = [
"about_time"
];
meta = with lib; {
description = "A cool helper for tracking time and throughput of code blocks, with beautiful human friendly renditions";
homepage = "https://github.com/rsalmei/about-time";
license = licenses.mit;
maintainers = with maintainers; [ thiagokokada ];
};
}

View File

@ -8,16 +8,16 @@
buildPythonPackage rec {
pname = "aioairzone";
version = "0.6.5";
version = "0.6.7";
format = "pyproject";
disabled = pythonOlder "3.8";
disabled = pythonOlder "3.11";
src = fetchFromGitHub {
owner = "Noltari";
repo = pname;
rev = "refs/tags/${version}";
hash = "sha256-HcO4t+9JigS6SVkwDk+rEWzqSfe0h5ZADdk3Jvxnr3Y=";
hash = "sha256-/LUTHeApktB4okt9bVkZe7G1dApVaYUdwty4sbK7IKQ=";
};
nativeBuildInputs = [

View File

@ -11,7 +11,7 @@
buildPythonPackage rec {
pname = "aiobafi6";
version = "0.8.2";
version = "0.9.0";
format = "pyproject";
disabled = pythonOlder "3.10";
@ -20,7 +20,7 @@ buildPythonPackage rec {
owner = "jfroy";
repo = "aiobafi6";
rev = "refs/tags/${version}";
hash = "sha256-ng+WpLhAfsouFA9biomc0V+L9XQHDthJeJLv8ttnYBc=";
hash = "sha256-QxjrspvNrcMcGChjj1B4QF/SnWCsGmPxnI2bWAL6BiI=";
};
nativeBuildInputs = [

View File

@ -0,0 +1,41 @@
{ lib
, about-time
, buildPythonPackage
, click
, fetchFromGitHub
, grapheme
, pytestCheckHook
}:
buildPythonPackage rec {
pname = "alive-progress";
version = "3.1.4";
src = fetchFromGitHub {
owner = "rsalmei";
repo = pname;
rev = "v${version}";
hash = "sha256-27PgxQ9nw8p5hfaSf/jPYG7419o3i8B8R09o93szSOk=";
};
propagatedBuildInputs = [
about-time
grapheme
];
nativeCheckInputs = [
click
pytestCheckHook
];
pythonImportsCheck = [
"alive_progress"
];
meta = with lib; {
description = "A new kind of Progress Bar, with real-time throughput, ETA, and very cool animations";
homepage = "https://github.com/rsalmei/alive-progress";
license = licenses.mit;
maintainers = with maintainers; [ thiagokokada ];
};
}

View File

@ -0,0 +1,49 @@
{ lib
, buildPythonPackage
, fetchPypi
, pythonOlder
, libssh
, cython
, wheel
, setuptools
, setuptools-scm
, toml
, expandvars
}:
buildPythonPackage rec {
pname = "ansible-pylibssh";
version = "1.1.0";
format = "pyproject";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
hash = "sha256-spaGux6dYvtUtpOdU6oN7SEn8IgBof2NpQSPvr+Zplg=";
};
nativeBuildInputs = [
cython
wheel
setuptools
setuptools-scm
toml
expandvars
];
propagatedBuildInputs = [
libssh
];
pythonImportsCheck = [
"pylibsshext"
];
meta = with lib; {
description = "Python bindings to client functionality of libssh specific to Ansible use case";
homepage = "https://github.com/ansible/pylibssh";
license = licenses.lgpl21Plus;
maintainers = with maintainers; [ geluk ];
};
}

View File

@ -13,6 +13,7 @@
, ncclient
, packaging
, paramiko
, ansible-pylibssh
, passlib
, pexpect
, psutil
@ -63,6 +64,7 @@ buildPythonPackage rec {
lxml
ncclient
paramiko
ansible-pylibssh
pexpect
psutil
pycrypto

View File

@ -7,6 +7,7 @@
, ncclient
, netaddr
, paramiko
, ansible-pylibssh
, pynetbox
, scp
, textfsm
@ -45,6 +46,7 @@ buildPythonPackage {
ncclient
netaddr
paramiko
ansible-pylibssh
xmltodict
# ansible.posix
# ansible.utils
@ -62,6 +64,7 @@ buildPythonPackage {
jxmlease
ncclient
paramiko
ansible-pylibssh
scp
xmltodict
] ++ lib.optionals (withNetbox) [

View File

@ -0,0 +1,44 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, pyqt5
, pytestCheckHook
, nix-update-script
}:
buildPythonPackage rec {
pname = "anyqt";
version = "0.2.0";
src = fetchFromGitHub {
owner = "ales-erjavec";
repo = "anyqt";
rev = version;
hash = "sha256-dL2EUAMzWKq/oN3rXiEC6emDJddmg4KclT5ONKA0jfk=";
};
nativeCheckInputs = [ pyqt5 pytestCheckHook ];
# All of these fail because Qt modules cannot be imported
disabledTestPaths = [
"tests/test_qabstractitemview.py"
"tests/test_qaction_set_menu.py"
"tests/test_qactionevent_action.py"
"tests/test_qfontdatabase_static.py"
"tests/test_qpainter_draw_pixmap_fragments.py"
"tests/test_qsettings.py"
"tests/test_qstandarditem_insertrow.py"
"tests/test_qtest.py"
];
pythonImportsCheck = [ "AnyQt" ];
passthru.updateScript = nix-update-script { };
meta = {
description = "PyQt/PySide compatibility layer";
homepage = "https://github.com/ales-erjavec/anyqt";
license = [ lib.licenses.gpl3Only ];
maintainers = [ lib.maintainers.lucasew ];
};
}

View File

@ -2,9 +2,7 @@
, boto3
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, jsonschema
, mock
, parameterized
, pydantic
, pytest-env
@ -18,7 +16,7 @@
buildPythonPackage rec {
pname = "aws-sam-translator";
version = "1.60.1";
version = "1.73.0";
format = "setuptools";
disabled = pythonOlder "3.6";
@ -27,7 +25,7 @@ buildPythonPackage rec {
owner = "aws";
repo = "serverless-application-model";
rev = "refs/tags/v${version}";
hash = "sha256-exVB1STX8OsFnQ0pzSuR3O/FrvG2GR5MdZa8tZ9IJvI=";
hash = "sha256-rj+q/06gIvPYTJP/EH9ZrP0Sp4J3K1aCRyNkgpphWP4=";
};
propagatedBuildInputs = [
@ -37,17 +35,8 @@ buildPythonPackage rec {
typing-extensions
];
patches = [
(fetchpatch {
# relax typing-extenions dependency
url = "https://github.com/aws/serverless-application-model/commit/d1c26f7ad9510a238ba570d511d5807a81379d0a.patch";
hash = "sha256-nh6MtRgi0RrC8xLkLbU6/Ec0kYtxIG/fgjn/KLiAM0E=";
})
];
postPatch = ''
substituteInPlace requirements/base.txt \
--replace "jsonschema~=3.2" "jsonschema>=3.2"
preCheck = ''
sed -i '2ienv =\n\tAWS_DEFAULT_REGION=us-east-1' pytest.ini
substituteInPlace pytest.ini \
--replace " --cov samtranslator --cov-report term-missing --cov-fail-under 95" ""
'';
@ -61,15 +50,13 @@ buildPythonPackage rec {
pyyaml
];
doCheck = false; # tests fail in weird ways
pythonImportsCheck = [
"samtranslator"
];
meta = with lib; {
description = "Python library to transform SAM templates into AWS CloudFormation templates";
homepage = "https://github.com/awslabs/serverless-application-model";
homepage = "https://github.com/aws/serverless-application-model";
license = licenses.asl20;
maintainers = with maintainers; [ ];
};

View File

@ -0,0 +1,35 @@
{ lib
, buildPythonPackage
, fetchPypi
, numpy
, scipy
, scikit-learn
, matplotlib
, unittestCheckHook
}:
buildPythonPackage rec {
pname = "baycomp";
version = "1.0.2";
src = fetchPypi {
inherit pname version;
hash = "sha256-xDRywWvXzfSITdTHPdMH5KPacJf+Scg81eiNdRQpI7A=";
};
propagatedBuildInputs = [
numpy
scipy
matplotlib
];
nativeCheckInputs = [ unittestCheckHook ];
pythonImportsCheck = [ "baycomp" ];
meta = {
description = "A library for Bayesian comparison of classifiers";
homepage = "https://github.com/janezd/baycomp";
license = [ lib.licenses.mit ];
maintainers = [ lib.maintainers.lucasew ];
};
}

View File

@ -12,6 +12,7 @@
, cloudpickle
, deepmerge
, fs
, inflection
, jinja2
, numpy
, opentelemetry-api
@ -67,7 +68,7 @@
}:
let
version = "1.1.1";
version = "1.1.3";
aws = [ fs-s3fs ];
grpc = [
grpcio
@ -103,7 +104,7 @@ buildPythonPackage {
owner = "bentoml";
repo = "BentoML";
rev = "v${version}";
hash = "sha256-V5lquPZT7XBnRdPIEfgbxIBHX+i4N081SYQVK0CkSo8=";
hash = "sha256-RJDFjFxTO7UhGZQB8flXug0xCpFdJqLlCZUu0O5W1YE=";
};
pythonRelaxDeps = [
@ -125,6 +126,7 @@ buildPythonPackage {
cloudpickle
deepmerge
fs
inflection
jinja2
numpy
opentelemetry-api

View File

@ -5,18 +5,19 @@
, click
, setuptools-scm
, pythonOlder
, typing-extensions
}:
buildPythonPackage rec {
pname = "cloup";
version = "3.0.0";
version = "3.0.1";
format = "setuptools";
disabled = pythonOlder "3.6";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
hash = "sha256-ur1L3P0nT4Z3nW0845ueoPbMWrPU7+NbpoQebW0DeJM=";
hash = "sha256-4ItMwje7mlvY/4G6btSUmOIgDaw5InsWSOlXiCAo6ZM=";
};
nativeBuildInputs = [
@ -25,6 +26,8 @@ buildPythonPackage rec {
propagatedBuildInputs = [
click
] ++ lib.optionals (pythonOlder "3.8") [
typing-extensions
];
nativeCheckInputs = [

View File

@ -5,20 +5,21 @@
, pythonOlder
, aiohttp
, backoff
, fastavro
, importlib-metadata
, requests
}:
buildPythonPackage rec {
pname = "cohere";
version = "4.16.0";
version = "4.21";
format = "pyproject";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
hash = "sha256-i6kbjugulAcmmInFb+rH4WB50dM7SDO1HNW/JgD4OTQ=";
hash = "sha256-9hFDj0Cd/F1aChU6WFNJ9agLFpxxArWZTZmZ7PhECGY=";
};
nativeBuildInputs = [
@ -28,6 +29,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
aiohttp
backoff
fastavro
importlib-metadata
requests
];

View File

@ -1,23 +1,32 @@
{ lib, buildPythonPackage, fetchPypi, isPyPy
, pytest, pytest-cov, pytest-mock, freezegun
, pytest, pytest-cov, pytest-mock, freezegun, safety, pre-commit
, jinja2, future, binaryornot, click, jinja2-time, requests
, python-slugify
, pyyaml
, arrow
, rich
}:
buildPythonPackage rec {
pname = "cookiecutter";
version = "2.1.1";
version = "2.3.0";
# not sure why this is broken
disabled = isPyPy;
src = fetchPypi {
inherit pname version;
hash = "sha256-85gr6NnFPawSYYZAE/3sf4Ov0uQu3m9t0GnF4UnFQNU=";
hash = "sha256-lCp5SYF0f21/Q51uSdOdyRqaZBKDYUFgyTxHTHLCliE=";
};
nativeCheckInputs = [ pytest pytest-cov pytest-mock freezegun ];
nativeCheckInputs = [
pytest
pytest-cov
pytest-mock
freezegun
safety
pre-commit
];
propagatedBuildInputs = [
binaryornot
jinja2
@ -26,6 +35,8 @@ buildPythonPackage rec {
jinja2-time
python-slugify
requests
arrow
rich
];
# requires network access for cloning git repos

View File

@ -36,6 +36,7 @@
, av
, opencv4
, pytest-mock
, pybind11
}:
let
@ -75,6 +76,8 @@ buildPythonPackage {
which
];
buildInputs = [ pybind11 ];
pythonRelaxDeps = [
"black"
];

View File

@ -8,6 +8,7 @@
, pytest-mock
, pytestCheckHook
, pythonOlder
, segno
, setuptools-scm
, syrupy
, zeroconf
@ -15,7 +16,7 @@
buildPythonPackage rec {
pname = "devolo-plc-api";
version = "1.3.2";
version = "1.4.0";
format = "pyproject";
disabled = pythonOlder "3.8";
@ -24,7 +25,7 @@ buildPythonPackage rec {
owner = "2Fake";
repo = "devolo_plc_api";
rev = "refs/tags/v${version}";
hash = "sha256-viOyxgFydPrTPFz6JsjJT6IiUIeoIwd+bcrAJfomDI8=";
hash = "sha256-roKwCNOvSVRFKBxXz0a9SDo925RHqX0qKv/1QWD3diw=";
};
postPatch = ''
@ -41,6 +42,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
httpx
protobuf
segno
zeroconf
];

View File

@ -0,0 +1,39 @@
{ lib
, buildPythonPackage
, fetchPypi
, pythonOlder
, setuptools
, wheel
}:
buildPythonPackage rec {
pname = "expandvars";
version = "0.11.0";
format = "pyproject";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
hash = "sha256-Q7Qn9dMnqzYAY98mFR+Y0qbwj+GPKJWjKn9fDxF7W1I=";
};
nativeBuildInputs = [
setuptools
wheel
];
# The PyPi package does not supply any tests
doCheck = false;
pythonImportsCheck = [
"expandvars"
];
meta = with lib; {
description = "Expand system variables Unix style";
homepage = "https://github.com/sayanarijit/expandvars";
license = licenses.mit;
maintainers = with maintainers; [ geluk ];
};
}

View File

@ -18,7 +18,7 @@
buildPythonPackage rec {
pname = "faraday-plugins";
version = "1.12.1";
version = "1.13.0";
format = "setuptools";
disabled = pythonOlder "3.7";
@ -27,7 +27,7 @@ buildPythonPackage rec {
owner = "infobyte";
repo = "faraday_plugins";
rev = "refs/tags/${version}";
hash = "sha256-sDHqBGRJQuAj2zB7hcIy3u5iNCxBHO1ub0eHxfgd7kI=";
hash = "sha256-t1C9fS0LU46J7y+rp2pTCVma09aFqzuBtslrlU+MS1E=";
};
postPatch = ''

View File

@ -1,12 +1,13 @@
{ lib, buildPythonPackage, fetchPypi, flask, webassets, flask_script, nose }:
{ lib, buildPythonPackage, fetchPypi, flask, webassets, flask-script, nose }:
buildPythonPackage rec {
pname = "Flask-Assets";
pname = "flask-assets";
version = "2.0";
src = fetchPypi {
inherit pname version;
sha256 = "1dfdea35e40744d46aada72831f7613d67bf38e8b20ccaaa9e91fdc37aa3b8c2";
pname = "Flask-Assets";
inherit version;
hash = "sha256-Hf3qNeQHRNRqracoMfdhPWe/OOiyDMqqnpH9w3qjuMI=";
};
patchPhase = ''
@ -15,7 +16,7 @@ buildPythonPackage rec {
substituteInPlace tests/test_integration.py --replace "'/foo'" "'/x/foo'"
'';
propagatedBuildInputs = [ flask webassets flask_script nose ];
propagatedBuildInputs = [ flask webassets flask-script nose ];
meta = with lib; {
homepage = "https://github.com/miracle2k/flask-assets";

View File

@ -2,12 +2,13 @@
, flask, elasticsearch }:
buildPythonPackage rec {
pname = "Flask-Elastic";
pname = "flask-elastic";
version = "0.2";
src = fetchPypi {
inherit pname version;
sha256 = "0hqkwff6z78aspkf1cf815qwp02g3ch1y9dhm5v2ap8vakyac0az";
pname = "Flask-Elastic";
inherit version;
hash = "sha256-XwGm/FQbXSV2qbAlHyAbT4DLcQnIseDm1Qqdb5zjE0M=";
};
propagatedBuildInputs = [ flask elasticsearch ];

View File

@ -3,7 +3,7 @@
}:
buildPythonPackage rec {
pname = "Flask-Mail";
pname = "flask-mail";
version = "0.9.1";
meta = {
@ -13,8 +13,9 @@ buildPythonPackage rec {
};
src = fetchPypi {
inherit pname version;
sha256 = "0hazjc351s3gfbhk975j8k65cg4gf31yq404yfy0gx0bjjdfpr92";
pname = "Flask-Mail";
inherit version;
hash = "sha256-IuXrmpQL9Ae88wQQ7MNwjzxWzESynDThcm/oUAaTX0E=";
};
propagatedBuildInputs = [ blinker flask ];

View File

@ -4,14 +4,14 @@
, pythonOlder
, alembic
, flask
, flask_script
, flask-script
, flask-sqlalchemy
, pytestCheckHook
, setuptools
}:
buildPythonPackage rec {
pname = "Flask-Migrate";
pname = "flask-migrate";
version = "4.0.4";
format = "pyproject";
@ -19,7 +19,7 @@ buildPythonPackage rec {
src = fetchFromGitHub {
owner = "miguelgrinberg";
repo = pname;
repo = "Flask-Migrate";
rev = "v${version}";
hash = "sha256-x52LGYvXuTUCP9dR3FP7a/xNRWyCAV1sReDAYJbYDvE=";
};
@ -40,7 +40,7 @@ buildPythonPackage rec {
nativeCheckInputs = [
pytestCheckHook
flask_script
flask-script
];
meta = with lib; {

View File

@ -1,12 +1,13 @@
{ lib, buildPythonPackage, fetchPypi, flask, blinker, nose }:
buildPythonPackage rec {
pname = "Flask-Principal";
pname = "flask-principal";
version = "0.4.0";
src = fetchPypi {
inherit pname version;
sha256 = "0lwlr5smz8vfm5h9a9i7da3q1c24xqc6vm9jdywdpgxfbi5i7mpm";
pname = "Flask-Principal";
inherit version;
hash = "sha256-9dYTS1yuv9u4bzLVbRjuRLCAh2onJpVgqW6jX3XJlFM=";
};
propagatedBuildInputs = [ flask blinker ];

Some files were not shown because too many files have changed in this diff Show More