Merge master into staging-next

This commit is contained in:
github-actions[bot] 2023-08-24 06:01:14 +00:00 committed by GitHub
commit a451dcf514
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
61 changed files with 3995 additions and 20569 deletions

View File

@ -205,6 +205,8 @@ The module update takes care of the new config syntax and the data itself (user
- `programs.gnupg.agent.pinentryFlavor` is now set in `/etc/gnupg/gpg-agent.conf`, and will no longer take precedence over a `pinentry-program` set in `~/.gnupg/gpg-agent.conf`.
- `services.influxdb2` now supports doing an automatic initial setup and provisioning of users, organizations, buckets and authentication tokens, see [#249502](https://github.com/NixOS/nixpkgs/pull/249502) for more details.
- `wrapHelm` now exposes `passthru.pluginsDir` which can be passed to `helmfile`. For convenience, a top-level package `helmfile-wrapped` has been added, which inherits `passthru.pluginsDir` from `kubernetes-helm-wrapped`. See [#217768](https://github.com/NixOS/nixpkgs/issues/217768) for details.
- `boot.initrd.network.udhcp.enable` allows control over dhcp during stage 1 regardless of what `networking.useDHCP` is set to.

View File

@ -105,36 +105,25 @@ in
}
];
services.zram-generator.enable = true;
system.requiredKernelConfig = with config.lib.kernelConfig; [
(isModule "ZRAM")
];
# Disabling this for the moment, as it would create and mkswap devices twice,
# once in stage 2 boot, and again when the zram-reloader service starts.
# boot.kernelModules = [ "zram" ];
systemd.packages = [ pkgs.zram-generator ];
systemd.services."systemd-zram-setup@".path = [ pkgs.util-linux ]; # for mkswap
environment.etc."systemd/zram-generator.conf".source =
(pkgs.formats.ini { }).generate "zram-generator.conf" (lib.listToAttrs
(builtins.map
(dev: {
name = dev;
value =
let
size = "${toString cfg.memoryPercent} / 100 * ram";
in
{
zram-size = if cfg.memoryMax != null then "min(${size}, ${toString cfg.memoryMax} / 1024 / 1024)" else size;
compression-algorithm = cfg.algorithm;
swap-priority = cfg.priority;
} // lib.optionalAttrs (cfg.writebackDevice != null) {
writeback-device = cfg.writebackDevice;
};
})
devices));
services.zram-generator.settings = lib.listToAttrs
(builtins.map
(dev: {
name = dev;
value =
let
size = "${toString cfg.memoryPercent} / 100 * ram";
in
{
zram-size = if cfg.memoryMax != null then "min(${size}, ${toString cfg.memoryMax} / 1024 / 1024)" else size;
compression-algorithm = cfg.algorithm;
swap-priority = cfg.priority;
} // lib.optionalAttrs (cfg.writebackDevice != null) {
writeback-device = cfg.writebackDevice;
};
})
devices);
};

View File

@ -1172,6 +1172,7 @@
./services/system/self-deploy.nix
./services/system/systembus-notify.nix
./services/system/uptimed.nix
./services/system/zram-generator.nix
./services/torrent/deluge.nix
./services/torrent/flexget.nix
./services/torrent/magnetico.nix

View File

@ -3,34 +3,291 @@
let
inherit
(lib)
any
attrNames
attrValues
count
escapeShellArg
filterAttrs
flatten
flip
getExe
hasAttr
hasInfix
listToAttrs
literalExpression
mapAttrsToList
mdDoc
mkEnableOption
mkIf
mkOption
nameValuePair
optional
subtractLists
types
unique
;
format = pkgs.formats.json { };
cfg = config.services.influxdb2;
configFile = format.generate "config.json" cfg.settings;
validPermissions = [
"authorizations"
"buckets"
"dashboards"
"orgs"
"tasks"
"telegrafs"
"users"
"variables"
"secrets"
"labels"
"views"
"documents"
"notificationRules"
"notificationEndpoints"
"checks"
"dbrp"
"annotations"
"sources"
"scrapers"
"notebooks"
"remotes"
"replications"
];
# Determines whether at least one active api token is defined
anyAuthDefined =
flip any (attrValues cfg.provision.organizations)
(o: o.present && flip any (attrValues o.auths)
(a: a.present && a.tokenFile != null));
provisionState = pkgs.writeText "provision_state.json" (builtins.toJSON {
inherit (cfg.provision) organizations users;
});
provisioningScript = pkgs.writeShellScript "post-start-provision" ''
set -euo pipefail
export INFLUX_HOST="http://"${escapeShellArg (
if ! hasAttr "http-bind-address" cfg.settings
|| hasInfix "0.0.0.0" cfg.settings.http-bind-address
then "localhost:8086"
else cfg.settings.http-bind-address
)}
# Wait for the influxdb server to come online
count=0
while ! influx ping &>/dev/null; do
if [ "$count" -eq 300 ]; then
echo "Tried for 30 seconds, giving up..."
exit 1
fi
if ! kill -0 "$MAINPID"; then
echo "Main server died, giving up..."
exit 1
fi
sleep 0.1
count=$((count++))
done
# Do the initial database setup. Pass /dev/null as configs-path to
# avoid saving the token as the active config.
if test -e "$STATE_DIRECTORY/.first_startup"; then
influx setup \
--configs-path /dev/null \
--org ${escapeShellArg cfg.provision.initialSetup.organization} \
--bucket ${escapeShellArg cfg.provision.initialSetup.bucket} \
--username ${escapeShellArg cfg.provision.initialSetup.username} \
--password "$(< "$CREDENTIALS_DIRECTORY/admin-password")" \
--token "$(< "$CREDENTIALS_DIRECTORY/admin-token")" \
--retention ${toString cfg.provision.initialSetup.retention}s \
--force >/dev/null
rm -f "$STATE_DIRECTORY/.first_startup"
fi
provision_result=$(${getExe pkgs.influxdb2-provision} ${provisionState} "$INFLUX_HOST" "$(< "$CREDENTIALS_DIRECTORY/admin-token")")
if [[ "$(jq '[.auths[] | select(.action == "created")] | length' <<< "$provision_result")" -gt 0 ]]; then
echo "Created at least one new token, queueing service restart so we can manipulate secrets"
touch "$STATE_DIRECTORY/.needs_restart"
fi
'';
restarterScript = pkgs.writeShellScript "post-start-restarter" ''
set -euo pipefail
if test -e "$STATE_DIRECTORY/.needs_restart"; then
rm -f "$STATE_DIRECTORY/.needs_restart"
/run/current-system/systemd/bin/systemctl restart influxdb2
fi
'';
organizationSubmodule = types.submodule (organizationSubmod: let
org = organizationSubmod.config._module.args.name;
in {
options = {
present = mkOption {
description = mdDoc "Whether to ensure that this organization is present or absent.";
type = types.bool;
default = true;
};
description = mkOption {
description = mdDoc "Optional description for the organization.";
default = null;
type = types.nullOr types.str;
};
buckets = mkOption {
description = mdDoc "Buckets to provision in this organization.";
default = {};
type = types.attrsOf (types.submodule (bucketSubmod: let
bucket = bucketSubmod.config._module.args.name;
in {
options = {
present = mkOption {
description = mdDoc "Whether to ensure that this bucket is present or absent.";
type = types.bool;
default = true;
};
description = mkOption {
description = mdDoc "Optional description for the bucket.";
default = null;
type = types.nullOr types.str;
};
retention = mkOption {
type = types.ints.unsigned;
default = 0;
description = mdDoc "The duration in seconds for which the bucket will retain data (0 is infinite).";
};
};
}));
};
auths = mkOption {
description = mdDoc "API tokens to provision for the user in this organization.";
default = {};
type = types.attrsOf (types.submodule (authSubmod: let
auth = authSubmod.config._module.args.name;
in {
options = {
id = mkOption {
description = mdDoc "A unique identifier for this authentication token. Since influx doesn't store names for tokens, this will be hashed and appended to the description to identify the token.";
readOnly = true;
default = builtins.substring 0 32 (builtins.hashString "sha256" "${org}:${auth}");
defaultText = "<a hash derived from org and name>";
type = types.str;
};
present = mkOption {
description = mdDoc "Whether to ensure that this user is present or absent.";
type = types.bool;
default = true;
};
description = mkOption {
description = ''
Optional description for the API token.
Note that the actual token will always be created with a descriptionregardless
of whether this is given or not. The name is always added plus a unique suffix
to later identify the token to track whether it has already been created.
'';
default = null;
type = types.nullOr types.str;
};
tokenFile = mkOption {
type = types.nullOr types.path;
default = null;
description = mdDoc "The token value. If not given, influx will automatically generate one.";
};
operator = mkOption {
description = mdDoc "Grants all permissions in all organizations.";
default = false;
type = types.bool;
};
allAccess = mkOption {
description = mdDoc "Grants all permissions in the associated organization.";
default = false;
type = types.bool;
};
readPermissions = mkOption {
description = mdDoc ''
The read permissions to include for this token. Access is usually granted only
for resources in the associated organization.
Available permissions are `authorizations`, `buckets`, `dashboards`,
`orgs`, `tasks`, `telegrafs`, `users`, `variables`, `secrets`, `labels`, `views`,
`documents`, `notificationRules`, `notificationEndpoints`, `checks`, `dbrp`,
`annotations`, `sources`, `scrapers`, `notebooks`, `remotes`, `replications`.
Refer to `influx auth create --help` for a full list with descriptions.
`buckets` grants read access to all associated buckets. Use `readBuckets` to define
more granular access permissions.
'';
default = [];
type = types.listOf (types.enum validPermissions);
};
writePermissions = mkOption {
description = mdDoc ''
The read permissions to include for this token. Access is usually granted only
for resources in the associated organization.
Available permissions are `authorizations`, `buckets`, `dashboards`,
`orgs`, `tasks`, `telegrafs`, `users`, `variables`, `secrets`, `labels`, `views`,
`documents`, `notificationRules`, `notificationEndpoints`, `checks`, `dbrp`,
`annotations`, `sources`, `scrapers`, `notebooks`, `remotes`, `replications`.
Refer to `influx auth create --help` for a full list with descriptions.
`buckets` grants write access to all associated buckets. Use `writeBuckets` to define
more granular access permissions.
'';
default = [];
type = types.listOf (types.enum validPermissions);
};
readBuckets = mkOption {
description = mdDoc "The organization's buckets which should be allowed to be read";
default = [];
type = types.listOf types.str;
};
writeBuckets = mkOption {
description = mdDoc "The organization's buckets which should be allowed to be written";
default = [];
type = types.listOf types.str;
};
};
}));
};
};
});
in
{
options = {
services.influxdb2 = {
enable = mkEnableOption (lib.mdDoc "the influxdb2 server");
enable = mkEnableOption (mdDoc "the influxdb2 server");
package = mkOption {
default = pkgs.influxdb2-server;
defaultText = literalExpression "pkgs.influxdb2";
description = lib.mdDoc "influxdb2 derivation to use.";
description = mdDoc "influxdb2 derivation to use.";
type = types.package;
};
settings = mkOption {
default = { };
description = lib.mdDoc ''configuration options for influxdb2, see <https://docs.influxdata.com/influxdb/v2.0/reference/config-options> for details.'';
description = mdDoc ''configuration options for influxdb2, see <https://docs.influxdata.com/influxdb/v2.0/reference/config-options> for details.'';
type = format.type;
};
@ -41,52 +298,135 @@ in
organization = mkOption {
type = types.str;
example = "main";
description = "Primary organization name";
description = mdDoc "Primary organization name";
};
bucket = mkOption {
type = types.str;
example = "example";
description = "Primary bucket name";
description = mdDoc "Primary bucket name";
};
username = mkOption {
type = types.str;
default = "admin";
description = "Primary username";
description = mdDoc "Primary username";
};
retention = mkOption {
type = types.str;
default = "0";
description = ''
The duration for which the bucket will retain data (0 is infinite).
Accepted units are `ns` (nanoseconds), `us` or `µs` (microseconds), `ms` (milliseconds),
`s` (seconds), `m` (minutes), `h` (hours), `d` (days) and `w` (weeks).
'';
type = types.ints.unsigned;
default = 0;
description = mdDoc "The duration in seconds for which the bucket will retain data (0 is infinite).";
};
passwordFile = mkOption {
type = types.path;
description = "Password for primary user. Don't use a file from the nix store!";
description = mdDoc "Password for primary user. Don't use a file from the nix store!";
};
tokenFile = mkOption {
type = types.path;
description = "API Token to set for the admin user. Don't use a file from the nix store!";
description = mdDoc "API Token to set for the admin user. Don't use a file from the nix store!";
};
};
organizations = mkOption {
description = mdDoc "Organizations to provision.";
example = literalExpression ''
{
myorg = {
description = "My organization";
buckets.mybucket = {
description = "My bucket";
retention = 31536000; # 1 year
};
auths.mytoken = {
readBuckets = ["mybucket"];
tokenFile = "/run/secrets/mytoken";
};
};
}
'';
default = {};
type = types.attrsOf organizationSubmodule;
};
users = mkOption {
description = mdDoc "Users to provision.";
default = {};
example = literalExpression ''
{
# admin = {}; /* The initialSetup.username will automatically be added. */
myuser.passwordFile = "/run/secrets/myuser_password";
}
'';
type = types.attrsOf (types.submodule (userSubmod: let
user = userSubmod.config._module.args.name;
org = userSubmod.config.org;
in {
options = {
present = mkOption {
description = mdDoc "Whether to ensure that this user is present or absent.";
type = types.bool;
default = true;
};
passwordFile = mkOption {
description = mdDoc "Password for the user. If unset, the user will not be able to log in until a password is set by an operator! Don't use a file from the nix store!";
default = null;
type = types.nullOr types.path;
};
};
}));
};
};
};
};
config = mkIf cfg.enable {
assertions = [
{
assertion = !(hasAttr "bolt-path" cfg.settings) && !(hasAttr "engine-path" cfg.settings);
message = "services.influxdb2.config: bolt-path and engine-path should not be set as they are managed by systemd";
}
];
assertions =
[
{
assertion = !(hasAttr "bolt-path" cfg.settings) && !(hasAttr "engine-path" cfg.settings);
message = "services.influxdb2.config: bolt-path and engine-path should not be set as they are managed by systemd";
}
]
++ flatten (flip mapAttrsToList cfg.provision.organizations (orgName: org:
flip mapAttrsToList org.auths (authName: auth:
[
{
assertion = 1 == count (x: x) [
auth.operator
auth.allAccess
(auth.readPermissions != []
|| auth.writePermissions != []
|| auth.readBuckets != []
|| auth.writeBuckets != [])
];
message = "influxdb2: provision.organizations.${orgName}.auths.${authName}: The `operator` and `allAccess` options are mutually exclusive with each other and the granular permission settings.";
}
(let unknownBuckets = subtractLists (attrNames org.buckets) auth.readBuckets; in {
assertion = unknownBuckets == [];
message = "influxdb2: provision.organizations.${orgName}.auths.${authName}: Refers to invalid buckets in readBuckets: ${toString unknownBuckets}";
})
(let unknownBuckets = subtractLists (attrNames org.buckets) auth.writeBuckets; in {
assertion = unknownBuckets == [];
message = "influxdb2: provision.organizations.${orgName}.auths.${authName}: Refers to invalid buckets in writeBuckets: ${toString unknownBuckets}";
})
]
)
));
services.influxdb2.provision = mkIf cfg.provision.enable {
organizations.${cfg.provision.initialSetup.organization} = {
buckets.${cfg.provision.initialSetup.bucket} = {
inherit (cfg.provision.initialSetup) retention;
};
};
users.${cfg.provision.initialSetup.username} = {
inherit (cfg.provision.initialSetup) passwordFile;
};
};
systemd.services.influxdb2 = {
description = "InfluxDB is an open-source, distributed, time series database";
@ -111,58 +451,38 @@ in
"admin-password:${cfg.provision.initialSetup.passwordFile}"
"admin-token:${cfg.provision.initialSetup.tokenFile}"
];
ExecStartPost = mkIf cfg.provision.enable (
[provisioningScript] ++
# Only the restarter runs with elevated privileges
optional anyAuthDefined "+${restarterScript}"
);
};
path = [pkgs.influxdb2-cli];
path = [
pkgs.influxdb2-cli
pkgs.jq
];
# Mark if this is the first startup so postStart can do the initial setup
preStart = mkIf cfg.provision.enable ''
# Mark if this is the first startup so postStart can do the initial setup.
# Also extract any token secret mappings and apply them if this isn't the first start.
preStart = let
tokenPaths = listToAttrs (flatten
# For all organizations
(flip mapAttrsToList cfg.provision.organizations
# For each contained token that has a token file
(_: org: flip mapAttrsToList (filterAttrs (_: x: x.tokenFile != null) org.auths)
# Collect id -> tokenFile for the mapping
(_: auth: nameValuePair auth.id auth.tokenFile))));
tokenMappings = pkgs.writeText "token_mappings.json" (builtins.toJSON tokenPaths);
in mkIf cfg.provision.enable ''
if ! test -e "$STATE_DIRECTORY/influxd.bolt"; then
touch "$STATE_DIRECTORY/.first_startup"
else
# Manipulate provisioned api tokens if necessary
${getExe pkgs.influxdb2-token-manipulator} "$STATE_DIRECTORY/influxd.bolt" ${tokenMappings}
fi
'';
postStart = let
initCfg = cfg.provision.initialSetup;
in mkIf cfg.provision.enable (
''
set -euo pipefail
export INFLUX_HOST="http://"${escapeShellArg (cfg.settings.http-bind-address or "localhost:8086")}
# Wait for the influxdb server to come online
count=0
while ! influx ping &>/dev/null; do
if [ "$count" -eq 300 ]; then
echo "Tried for 30 seconds, giving up..."
exit 1
fi
if ! kill -0 "$MAINPID"; then
echo "Main server died, giving up..."
exit 1
fi
sleep 0.1
count=$((count++))
done
# Do the initial database setup. Pass /dev/null as configs-path to
# avoid saving the token as the active config.
if test -e "$STATE_DIRECTORY/.first_startup"; then
influx setup \
--configs-path /dev/null \
--org ${escapeShellArg initCfg.organization} \
--bucket ${escapeShellArg initCfg.bucket} \
--username ${escapeShellArg initCfg.username} \
--password "$(< "$CREDENTIALS_DIRECTORY/admin-password")" \
--token "$(< "$CREDENTIALS_DIRECTORY/admin-token")" \
--retention ${escapeShellArg initCfg.retention} \
--force >/dev/null
rm -f "$STATE_DIRECTORY/.first_startup"
fi
''
);
};
users.extraUsers.influxdb2 = {

View File

@ -0,0 +1,38 @@
{ config, lib, pkgs, ... }:
let
cfg = config.services.zram-generator;
settingsFormat = pkgs.formats.ini { };
in
{
meta = {
maintainers = with lib.maintainers; [ nickcao ];
};
options.services.zram-generator = {
enable = lib.mkEnableOption (lib.mdDoc "Systemd unit generator for zram devices");
package = lib.mkPackageOptionMD pkgs "zram-generator" { };
settings = lib.mkOption {
type = lib.types.submodule {
freeformType = settingsFormat.type;
};
default = { };
description = lib.mdDoc ''
Configuration for zram-generator,
see https://github.com/systemd/zram-generator for documentation.
'';
};
};
config = lib.mkIf cfg.enable {
system.requiredKernelConfig = with config.lib.kernelConfig; [
(isModule "ZRAM")
];
systemd.packages = [ cfg.package ];
systemd.services."systemd-zram-setup@".path = [ pkgs.util-linux ]; # for mkswap
environment.etc."systemd/zram-generator.conf".source = settingsFormat.generate "zram-generator.conf" cfg.settings;
};
}

View File

@ -443,10 +443,8 @@ in {
loki = handleTest ./loki.nix {};
luks = handleTest ./luks.nix {};
lvm2 = handleTest ./lvm2 {};
lxd = handleTest ./lxd.nix {};
lxd-nftables = handleTest ./lxd-nftables.nix {};
lxd = handleTest ./lxd {};
lxd-image-server = handleTest ./lxd-image-server.nix {};
lxd-ui = handleTest ./lxd-ui.nix {};
#logstash = handleTest ./logstash.nix {};
lorri = handleTest ./lorri/default.nix {};
maddy = discoverTests (import ./maddy { inherit handleTest; });

View File

@ -1,24 +0,0 @@
storage_pools:
- name: default
driver: dir
config:
source: /var/lxd-pool
networks:
- name: lxdbr0
type: bridge
config:
ipv4.address: auto
ipv6.address: none
profiles:
- name: default
devices:
eth0:
name: eth0
network: lxdbr0
type: nic
root:
path: /
pool: default
type: disk

View File

@ -6,6 +6,9 @@ import ./make-test-python.nix ({ pkgs, ...} : {
nodes.machine = { lib, ... }: {
environment.systemPackages = [ pkgs.influxdb2-cli ];
# Make sure that the service is restarted immediately if tokens need to be rewritten
# without relying on any Restart=on-failure behavior
systemd.services.influxdb2.serviceConfig.RestartSec = 6000;
services.influxdb2.enable = true;
services.influxdb2.provision = {
enable = true;
@ -15,22 +18,208 @@ import ./make-test-python.nix ({ pkgs, ...} : {
passwordFile = pkgs.writeText "admin-pw" "ExAmPl3PA55W0rD";
tokenFile = pkgs.writeText "admin-token" "verysecureadmintoken";
};
organizations.someorg = {
buckets.somebucket = {};
auths.sometoken = {
description = "some auth token";
readBuckets = ["somebucket"];
writeBuckets = ["somebucket"];
};
};
users.someuser.passwordFile = pkgs.writeText "tmp-pw" "abcgoiuhaoga";
};
specialisation.withModifications.configuration = { ... }: {
services.influxdb2.provision = {
organizations.someorg.buckets.somebucket.present = false;
organizations.someorg.auths.sometoken.present = false;
users.someuser.present = false;
organizations.myorg = {
description = "Myorg description";
buckets.mybucket = {
description = "Mybucket description";
};
auths.mytoken = {
operator = true;
description = "operator token";
tokenFile = pkgs.writeText "tmp-tok" "someusertoken";
};
};
users.myuser.passwordFile = pkgs.writeText "tmp-pw" "abcgoiuhaoga";
};
};
specialisation.withParentDelete.configuration = { ... }: {
services.influxdb2.provision = {
organizations.someorg.present = false;
# Deleting the parent implies:
#organizations.someorg.buckets.somebucket.present = false;
#organizations.someorg.auths.sometoken.present = false;
};
};
specialisation.withNewTokens.configuration = { ... }: {
services.influxdb2.provision = {
organizations.default = {
auths.operator = {
operator = true;
description = "new optoken";
tokenFile = pkgs.writeText "tmp-tok" "newoptoken";
};
auths.allaccess = {
operator = true;
description = "new allaccess";
tokenFile = pkgs.writeText "tmp-tok" "newallaccess";
};
auths.specifics = {
description = "new specifics";
readPermissions = ["users" "tasks"];
writePermissions = ["tasks"];
tokenFile = pkgs.writeText "tmp-tok" "newspecificstoken";
};
};
};
};
};
testScript = { nodes, ... }:
let
specialisations = "${nodes.machine.system.build.toplevel}/specialisation";
tokenArg = "--token verysecureadmintoken";
in ''
def assert_contains(haystack, needle):
if needle not in haystack:
print("The haystack that will cause the following exception is:")
print("---")
print(haystack)
print("---")
raise Exception(f"Expected string '{needle}' was not found")
def assert_lacks(haystack, needle):
if needle in haystack:
print("The haystack that will cause the following exception is:")
print("---")
print(haystack, end="")
print("---")
raise Exception(f"Unexpected string '{needle}' was found")
machine.wait_for_unit("influxdb2.service")
machine.fail("curl --fail -X POST 'http://localhost:8086/api/v2/signin' -u admin:wrongpassword")
machine.succeed("curl --fail -X POST 'http://localhost:8086/api/v2/signin' -u admin:ExAmPl3PA55W0rD")
out = machine.succeed("influx org list ${tokenArg}")
assert "default" in out
assert_contains(out, "default")
assert_lacks(out, "myorg")
assert_contains(out, "someorg")
out = machine.succeed("influx bucket list ${tokenArg} --org default")
assert "default" in out
assert_contains(out, "default")
machine.fail("influx bucket list ${tokenArg} --org myorg")
out = machine.succeed("influx bucket list ${tokenArg} --org someorg")
assert_contains(out, "somebucket")
out = machine.succeed("influx user list ${tokenArg}")
assert_contains(out, "admin")
assert_lacks(out, "myuser")
assert_contains(out, "someuser")
out = machine.succeed("influx auth list ${tokenArg}")
assert_lacks(out, "operator token")
assert_contains(out, "some auth token")
with subtest("withModifications"):
machine.succeed('${specialisations}/withModifications/bin/switch-to-configuration test')
machine.wait_for_unit("influxdb2.service")
out = machine.succeed("influx org list ${tokenArg}")
assert_contains(out, "default")
assert_contains(out, "myorg")
assert_contains(out, "someorg")
out = machine.succeed("influx bucket list ${tokenArg} --org myorg")
assert_contains(out, "mybucket")
out = machine.succeed("influx bucket list ${tokenArg} --org someorg")
assert_lacks(out, "somebucket")
out = machine.succeed("influx user list ${tokenArg}")
assert_contains(out, "admin")
assert_contains(out, "myuser")
assert_lacks(out, "someuser")
out = machine.succeed("influx auth list ${tokenArg}")
assert_contains(out, "operator token")
assert_lacks(out, "some auth token")
# Make sure the user token is also usable
machine.succeed("influx auth list --token someusertoken")
with subtest("keepsUnrelated"):
machine.succeed('${nodes.machine.system.build.toplevel}/bin/switch-to-configuration test')
machine.wait_for_unit("influxdb2.service")
out = machine.succeed("influx org list ${tokenArg}")
assert_contains(out, "default")
assert_contains(out, "myorg")
assert_contains(out, "someorg")
out = machine.succeed("influx bucket list ${tokenArg} --org default")
assert_contains(out, "default")
out = machine.succeed("influx bucket list ${tokenArg} --org myorg")
assert_contains(out, "mybucket")
out = machine.succeed("influx bucket list ${tokenArg} --org someorg")
assert_contains(out, "somebucket")
out = machine.succeed("influx user list ${tokenArg}")
assert_contains(out, "admin")
assert_contains(out, "myuser")
assert_contains(out, "someuser")
out = machine.succeed("influx auth list ${tokenArg}")
assert_contains(out, "operator token")
assert_contains(out, "some auth token")
with subtest("withParentDelete"):
machine.succeed('${specialisations}/withParentDelete/bin/switch-to-configuration test')
machine.wait_for_unit("influxdb2.service")
out = machine.succeed("influx org list ${tokenArg}")
assert_contains(out, "default")
assert_contains(out, "myorg")
assert_lacks(out, "someorg")
out = machine.succeed("influx bucket list ${tokenArg} --org default")
assert_contains(out, "default")
out = machine.succeed("influx bucket list ${tokenArg} --org myorg")
assert_contains(out, "mybucket")
machine.fail("influx bucket list ${tokenArg} --org someorg")
out = machine.succeed("influx user list ${tokenArg}")
assert_contains(out, "admin")
assert_contains(out, "myuser")
assert_contains(out, "someuser")
out = machine.succeed("influx auth list ${tokenArg}")
assert_contains(out, "operator token")
assert_lacks(out, "some auth token")
with subtest("withNewTokens"):
machine.succeed('${specialisations}/withNewTokens/bin/switch-to-configuration test')
machine.wait_for_unit("influxdb2.service")
out = machine.succeed("influx auth list ${tokenArg}")
assert_contains(out, "operator token")
assert_contains(out, "some auth token")
assert_contains(out, "new optoken")
assert_contains(out, "new allaccess")
assert_contains(out, "new specifics")
'';
})

View File

@ -61,14 +61,14 @@ in {
machine.wait_for_unit("lxd.service")
machine.wait_for_file("/var/lib/lxd/unix.socket")
# It takes additional second for lxd to settle
machine.sleep(1)
# Wait for lxd to settle
machine.succeed("lxd waitready")
# lxd expects the pool's directory to already exist
machine.succeed("mkdir /var/lxd-pool")
machine.succeed(
"cat ${./common/lxd/config.yaml} | lxd init --preseed"
"lxd init --minimal"
)
machine.succeed(

View File

@ -1,7 +1,7 @@
import ./make-test-python.nix ({ pkgs, lib, ... } :
import ../make-test-python.nix ({ pkgs, lib, ... } :
let
lxd-image = import ../release.nix {
lxd-image = import ../../release.nix {
configuration = {
# Building documentation makes the test unnecessarily take a longer time:
documentation.enable = lib.mkForce false;
@ -38,19 +38,18 @@ in {
};
testScript = ''
def instance_is_up(_) -> bool:
status, _ = machine.execute("lxc exec container --disable-stdin --force-interactive /run/current-system/sw/bin/true")
return status == 0
machine.wait_for_unit("sockets.target")
machine.wait_for_unit("lxd.service")
machine.wait_for_file("/var/lib/lxd/unix.socket")
# It takes additional second for lxd to settle
machine.sleep(1)
# Wait for lxd to settle
machine.succeed("lxd waitready")
# lxd expects the pool's directory to already exist
machine.succeed("mkdir /var/lxd-pool")
machine.succeed(
"cat ${./common/lxd/config.yaml} | lxd init --preseed"
)
machine.succeed("lxd init --minimal")
machine.succeed(
"lxc image import ${lxd-image-metadata}/*/*.tar.xz ${lxd-image-rootfs}/*/*.tar.xz --alias nixos"
@ -58,21 +57,23 @@ in {
with subtest("Container can be managed"):
machine.succeed("lxc launch nixos container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
machine.succeed("echo true | lxc exec container /run/current-system/sw/bin/bash -")
machine.succeed("lxc exec container true")
machine.succeed("lxc delete -f container")
with subtest("Container is mounted with lxcfs inside"):
machine.succeed("lxc launch nixos container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
## ---------- ##
## limits.cpu ##
machine.succeed("lxc config set container limits.cpu 1")
machine.succeed("lxc restart container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
assert (
"1"
@ -81,7 +82,8 @@ in {
machine.succeed("lxc config set container limits.cpu 2")
machine.succeed("lxc restart container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
assert (
"2"
@ -93,7 +95,8 @@ in {
machine.succeed("lxc config set container limits.memory 64MB")
machine.succeed("lxc restart container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
assert (
"MemTotal: 62500 kB"
@ -102,7 +105,8 @@ in {
machine.succeed("lxc config set container limits.memory 128MB")
machine.succeed("lxc restart container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
assert (
"MemTotal: 125000 kB"

View File

@ -0,0 +1,9 @@
{
system ? builtins.currentSystem,
config ? {},
pkgs ? import ../../.. {inherit system config;},
}: {
container = import ./container.nix {inherit system pkgs;};
nftables = import ./nftables.nix {inherit system pkgs;};
ui = import ./ui.nix {inherit system pkgs;};
}

View File

@ -5,7 +5,7 @@
# iptables to nftables requires a full reboot, which is a bit hard inside NixOS
# tests.
import ./make-test-python.nix ({ pkgs, ...} : {
import ../make-test-python.nix ({ pkgs, ...} : {
name = "lxd-nftables";
meta = with pkgs.lib.maintainers; {

View File

@ -1,4 +1,4 @@
import ./make-test-python.nix ({ pkgs, lib, ... }: {
import ../make-test-python.nix ({ pkgs, lib, ... }: {
name = "lxd-ui";
meta = with pkgs.lib.maintainers; {

View File

@ -23,7 +23,24 @@ See the [CONTRIBUTING.md](../CONTRIBUTING.md) document for more general informat
## Quick Start to Adding a Package
To add a package to Nixpkgs:
We welcome new contributors of new packages to Nixpkgs, arguably the greatest software database known. However, each new package comes with a cost for the maintainers, Continuous Integration, caching servers and users downloading Nixpkgs.
Before adding a new package, please consider the following questions:
* Is the package ready for general use? We don't want to include projects that are too immature or are going to be abandoned immediately. In case of doubt, check with upstream.
* Does the project have a clear license statement? Remember that softwares are unfree by default (all rights reserved), and merely providing access to the source code does not imply its redistribution. In case of doubt, ask upstream.
* How realistic is it that it will be used by other people? It's good that nixpkgs caters to various niches, but if it's a niche of 5 people it's probably too small.
* Are you willing to maintain the package? You should care enough about the package to be willing to keep it up and running for at least one complete Nixpkgs' release life-cycle.
If any of these questions' answer is no, then you should probably not add the package.
This is section describes a general framework of understanding and exceptions might apply.
Luckily it's pretty easy to maintain your own package set with Nix, which can then be added to the [Nix User Repository](https://github.com/nix-community/nur) project.
---
Now that this is out of the way. To add a package to Nixpkgs:
1. Checkout the Nixpkgs source tree:
@ -67,7 +84,9 @@ To add a package to Nixpkgs:
Some notes:
- All [`meta`](https://nixos.org/manual/nixpkgs/stable/#chap-meta) attributes are optional, but its still a good idea to provide at least the `description`, `homepage` and [`license`](https://nixos.org/manual/nixpkgs/stable/#sec-meta-license).
- Add yourself as the maintainer of the package.
- All other [`meta`](https://nixos.org/manual/nixpkgs/stable/#chap-meta) attributes are optional, but its still a good idea to provide at least the `description`, `homepage` and [`license`](https://nixos.org/manual/nixpkgs/stable/#sec-meta-license).
- You can use `nix-prefetch-url url` to get the SHA-256 hash of source distributions. There are similar commands as `nix-prefetch-git` and `nix-prefetch-hg` available in `nix-prefetch-scripts` package.

View File

@ -63,6 +63,22 @@ self: let
popd
'';
});
xeft = super.xeft.overrideAttrs (old: let
libExt = pkgs.stdenv.targetPlatform.extensions.sharedLibrary;
in {
dontUnpack = false;
buildInputs = (old.buildInputs or [ ]) ++ [ pkgs.xapian ];
buildPhase = (old.buildPhase or "") + ''
$CXX -shared -o xapian-lite${libExt} xapian-lite.cc $NIX_CFLAGS_COMPILE -lxapian
'';
postInstall = (old.postInstall or "") + "\n" + ''
outd=$out/share/emacs/site-lisp/elpa/xeft-*
install -m444 -t $outd xapian-lite${libExt}
rm $outd/xapian-lite.cc $outd/emacs-module.h $outd/emacs-module-prelude.h $outd/demo.gif $outd/Makefile
'';
});
};
elpaDevelPackages = super // overrides;

View File

@ -158,6 +158,23 @@ self: let
}
);
xeft = super.xeft.overrideAttrs (old: let
libExt = pkgs.stdenv.targetPlatform.extensions.sharedLibrary;
in {
dontUnpack = false;
buildInputs = (old.buildInputs or [ ]) ++ [ pkgs.xapian ];
buildPhase = (old.buildPhase or "") + ''
$CXX -shared -o xapian-lite${libExt} xapian-lite.cc $NIX_CFLAGS_COMPILE -lxapian
'';
postInstall = (old.postInstall or "") + "\n" + ''
outd=$out/share/emacs/site-lisp/elpa/xeft-*
install -m444 -t $outd xapian-lite${libExt}
rm $outd/xapian-lite.cc $outd/emacs-module.h $outd/emacs-module-prelude.h $outd/demo.gif $outd/Makefile
'';
});
};
elpaPackages = super // overrides;

View File

@ -1635,7 +1635,6 @@ self: super: {
"coc-haxe"
"coc-highlight"
"coc-html"
"coc-imselect"
"coc-java"
"coc-jest"
"coc-json"

View File

@ -14,19 +14,19 @@
stdenv.mkDerivation rec {
pname = "drawio";
version = "21.6.1";
version = "21.6.8";
src = fetchFromGitHub {
owner = "jgraph";
repo = "drawio-desktop";
rev = "v${version}";
fetchSubmodules = true;
hash = "sha256-60fOecWDYGkn4rJzxmum14L4IAaHAG+uKyjNo9nkVHg=";
hash = "sha256-k16npV8N4zPIXjc8ZJcQHgv76h2VhbqtT2ZCzDqkF8U";
};
offlineCache = fetchYarnDeps {
yarnLock = src + "/yarn.lock";
hash = "sha256-Knk9ys8Kjk1QOl80vmIA2H6wP8Mj6iNcmb/bR4zMQgw=";
hash = "sha256-rJvwXhtO/HsfpbDyOh+jFc6E9wQ+sZMT8vnhJpGlkF8";
};
nativeBuildInputs = [

View File

@ -1,28 +1,26 @@
{ lib, mkDerivation, fetchpatch, fetchFromGitHub, cmake, qttools, qtwebkit }:
{ lib
, mkDerivation
, fetchFromGitHub
, cmake
, qttools
, qtwebkit
}:
mkDerivation rec {
pname = "fontmatrix";
version = "0.6.0-qt5";
version = "0.9.100";
src = fetchFromGitHub {
owner = "fcoiffie";
owner = "fontmatrix";
repo = "fontmatrix";
rev = "1ff8382d8c85c18d9962918f461341ff4fe21993";
sha256 = "0yx1gbsjj9ddq1kiqplif1w5x5saw250zbmhmd4phqmaqzr60w0h";
rev = "v${version}";
sha256 = "sha256-DtajGhx79DiecglXHja9q/TKVq8Jl2faQdA5Ib/yT88=";
};
# Add missing QAction include
patches = [ (fetchpatch {
url = "https://github.com/fcoiffie/fontmatrix/commit/dc6de8c414ae21516b72daead79c8db88309b102.patch";
sha256 = "092860fdyf5gq67jqfxnlgwzjgpizi6j0njjv3m62aiznrhig7c8";
})];
buildInputs = [ qttools qtwebkit ];
nativeBuildInputs = [ cmake ];
hardeningDisable = [ "format" ];
meta = with lib; {
description = "Fontmatrix is a free/libre font explorer for Linux, Windows and Mac";
homepage = "https://github.com/fontmatrix/fontmatrix";

View File

@ -27,8 +27,8 @@ mkDerivation rec {
src = fetchFromGitHub {
owner = "cnr-isti-vclab";
repo = "meshlab";
rev = "Meshlab-${version}";
sha256 = "sha256-MP+jkiV6yS1T1eWClxM56kZWLXwu0g4w/zBHy6CSL6Y=";
rev = "MeshLab-${version}";
sha256 = "sha256-jcc3PfsiIeYyipteZgzd0NwZgFFgR/mMBiaInzhOcDY=";
fetchSubmodules = true; # for vcglib
};

View File

@ -27,6 +27,11 @@
, freeglut
, libGLU
, xcbuild
# for passthru.tests
, cups-filters
, python3
, zathura
}:
let
@ -146,6 +151,11 @@ stdenv.mkDerivation rec {
enableParallelBuilding = true;
passthru.tests = {
inherit cups-filters zathura;
inherit (python3.pkgs) pikepdf pymupdf;
};
meta = with lib; {
homepage = "https://mupdf.com";
description = "Lightweight PDF, XPS, and E-book viewer and toolkit written in portable C";

View File

@ -9,14 +9,14 @@
"vendorHash": null
},
"acme": {
"hash": "sha256-azNFQ4U7iGIKLingq4GItjXvdcsm0YkrQ4PRvEeDjVU=",
"hash": "sha256-5KR32V4sE5AkOVroLmelNBzBZpD4KfhC491X+5eo+n8=",
"homepage": "https://registry.terraform.io/providers/vancluever/acme",
"owner": "vancluever",
"proxyVendor": true,
"repo": "terraform-provider-acme",
"rev": "v2.16.1",
"rev": "v2.17.0",
"spdx": "MPL-2.0",
"vendorHash": "sha256-9F853+GHfwGH0JQRLawLEB8X76z/Xll1Aa4+vBRWk1o="
"vendorHash": "sha256-UIV0dIoRZxNiaEq1HGPIV4mFLn4pAoGPo6tx6zV3r3A="
},
"age": {
"hash": "sha256-bJrzjvkrCX93bNqCA+FdRibHnAw6cb61StqtwUY5ok4=",
@ -28,13 +28,13 @@
"vendorHash": "sha256-jK7JuARpoxq7hvq5+vTtUwcYot0YqlOZdtDwq4IqKvk="
},
"aiven": {
"hash": "sha256-Nm5flY+BN9PpQY+4LyohFwDfdEPxfVpT/rkfn8aLQyI=",
"hash": "sha256-3agD22viTP+yntNg2nyYi5OpknXnfI2Jk/xEcvXgia8=",
"homepage": "https://registry.terraform.io/providers/aiven/aiven",
"owner": "aiven",
"repo": "terraform-provider-aiven",
"rev": "v4.8.0",
"rev": "v4.8.2",
"spdx": "MIT",
"vendorHash": "sha256-eScN0by/rnCf4+p4g3yhz2kJRyfFyqlVi+0MJXPdzKw="
"vendorHash": "sha256-sVPby/MLAgU7DfBDACqxvkLWblBhisHcUaoOgR3fMaM="
},
"akamai": {
"hash": "sha256-LGgZF2/YCYpoDOSu0UeuPqK9wGXrvPQE4WUGGS0sx30=",
@ -182,13 +182,13 @@
"vendorHash": "sha256-/dOiXO2aPkuZaFiwv/6AXJdIADgx8T7eOwvJfBBoqg8="
},
"buildkite": {
"hash": "sha256-nDJ4XsWvielQYqShBav7g/pZyDcU0jqgemXUqaNJHnA=",
"hash": "sha256-xojTeS+p9XG+wO9thmrSOWrizF56FCg+nwRBdaXqr/4=",
"homepage": "https://registry.terraform.io/providers/buildkite/buildkite",
"owner": "buildkite",
"repo": "terraform-provider-buildkite",
"rev": "v0.25.0",
"rev": "v0.25.1",
"spdx": "MIT",
"vendorHash": "sha256-C/jT+vcZat8UHXgOhtj+gyl8ttCEb564byp/npI2Ei8="
"vendorHash": "sha256-V2BsVBhtdPOT9iseWPhPTOrUe4iMhq4YUiBWd0ne5Xg="
},
"checkly": {
"hash": "sha256-tOTrAi6hd4HFbHAj0p/LTYdxQl1R1WuQ9L4hzqmDVqI=",
@ -218,13 +218,13 @@
"vendorHash": "sha256-qIgr+ynaNSfNx1iW5RJrNHvEnlr46dBzIi+5IXYn+3Q="
},
"cloudflare": {
"hash": "sha256-ayxekJkQt/7K/qwMKvjqkyVkux5+Jw3uyepmaiy3Ptc=",
"hash": "sha256-l1cTzPiOOLyvbvbt7dWR9lRgqVFiO5gRq4XNnLqvac0=",
"homepage": "https://registry.terraform.io/providers/cloudflare/cloudflare",
"owner": "cloudflare",
"repo": "terraform-provider-cloudflare",
"rev": "v4.12.0",
"rev": "v4.13.0",
"spdx": "MPL-2.0",
"vendorHash": "sha256-VTSbi2pDllzyKDhWs5EpWSXO5oKl+khVqLg/Ro3x8ys="
"vendorHash": "sha256-uZ0zc+/RmEiqxBSZLgLPmwN29BEJitPN13HE88zPxcI="
},
"cloudfoundry": {
"hash": "sha256-yEqsdgTSlwppt6ILRZQ6Epyh5WVN6Il3xsBOa/NfIdo=",
@ -282,13 +282,13 @@
"vendorHash": "sha256-ZCMSmOCPEMxCSpl3DjIUGPj1W/KNJgyjtHpmQ19JquA="
},
"datadog": {
"hash": "sha256-sytQJgrfgtJ761mGo0KUTxAukqvmPYyLM8+vsYGtoZc=",
"hash": "sha256-FAqtbze6Lw6SCU84R6aB0oY+pcWyDBdTZRBZhM2pjyc=",
"homepage": "https://registry.terraform.io/providers/DataDog/datadog",
"owner": "DataDog",
"repo": "terraform-provider-datadog",
"rev": "v3.28.0",
"rev": "v3.29.0",
"spdx": "MPL-2.0",
"vendorHash": "sha256-foS7GyRUdhF/M8uTPf2I4WQo7qEg4Z/3FXjagoeSRkU="
"vendorHash": "sha256-UJRuj5qmWWjkqMBGf0500//83ky0Dxx04IQETPwwSsw="
},
"dexidp": {
"hash": "sha256-69r3m3lIKftZQ8NXBD5KEHbsNUwCGpFgn/CYO+921M4=",
@ -363,13 +363,13 @@
"vendorHash": "sha256-oVTanZpCWs05HwyIKW2ajiBPz1HXOFzBAt5Us+EtTRw="
},
"equinix": {
"hash": "sha256-MEsE1OQwKjd1Y+Ek7UmZMbLq4x84iQW40sMl78UbW2c=",
"hash": "sha256-SSCKl0etImK9dXhq9ycQi/U38cZ+SuaoBiaeeg/+JDA=",
"homepage": "https://registry.terraform.io/providers/equinix/equinix",
"owner": "equinix",
"repo": "terraform-provider-equinix",
"rev": "v1.14.7",
"rev": "v1.15.0",
"spdx": "MIT",
"vendorHash": "sha256-cfJG0DJJJX85ISz7dSZ+di1uhgJJd5xUH99PhqGMPgw="
"vendorHash": "sha256-7oLAF+HpL/eNN6KXYp8zA9Yu6h5S+XrWJN4dE3B9H58="
},
"exoscale": {
"hash": "sha256-93pCsHrsYLJYgg8MXHz2Gg+vaPC9gcHdLastb89/BMg=",
@ -827,11 +827,11 @@
"vendorHash": "sha256-LRIfxQGwG988HE5fftGl6JmBG7tTknvmgpm4Fu1NbWI="
},
"oci": {
"hash": "sha256-sxhykS4pXF00VJVtVd7kO2GasAqBUUMqPDPLE3BzUFI=",
"hash": "sha256-S+gHfQsqnOlegd5JcuBOUKO7fynWQAWCZGrlqjY03e0=",
"homepage": "https://registry.terraform.io/providers/oracle/oci",
"owner": "oracle",
"repo": "terraform-provider-oci",
"rev": "v5.9.0",
"rev": "v5.10.0",
"spdx": "MPL-2.0",
"vendorHash": null
},

View File

@ -13,11 +13,11 @@
stdenv.mkDerivation rec {
pname = "appflowy";
version = "0.2.6";
version = "0.3.0";
src = fetchzip {
url = "https://github.com/AppFlowy-IO/appflowy/releases/download/${version}/AppFlowy_x86_64-unknown-linux-gnu_ubuntu-20.04.tar.gz";
sha256 = "sha256-e7nzJ81rMehpxwsbOlwnMh1jzCsGwc+kAo/6+AcCiLE=";
sha256 = "sha256-05RQtvf6I4/sjGtMDfc5U4esxfFFeTwIuxFAkbr6p4A";
stripRoot = false;
};

View File

@ -0,0 +1,55 @@
{ lib
, stdenv
, fetchFromGitHub
, zlib
}:
stdenv.mkDerivation (finalAttrs: {
pname = "bwa-mem2";
version = "unstable-2023-03-18";
src = fetchFromGitHub {
owner = "bwa-mem2";
repo = "bwa-mem2";
rev = "cf4306a47dac35e7e79a9e75398a35f33900cfd0";
fetchSubmodules = true;
hash = "sha256-1AYSn7nBrDwbX7oSrdEoa1d3t6xzwKnA0S87Y/XeXJg=";
};
buildInputs = [ zlib ];
# see https://github.com/bwa-mem2/bwa-mem2/issues/93
postPatch = lib.optionalString stdenv.isDarwin ''
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/include/safe_mem_lib.h
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/safeclib/memset16_s.c
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/safeclib/memset32_s.c
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/safeclib/memset_s.c
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/safeclib/wmemset_s.c
'';
buildFlags = [
(if stdenv.hostPlatform.sse4_2Support then "arch=sse42"
else if stdenv.hostPlatform.avxSupport then "arch=avx"
else if stdenv.hostPlatform.avx2Support then "arch=avx2"
else if stdenv.hostPlatform.avx512Support then "arch=avx512"
else "arch=sse41")
];
enableParallelBuilding = true;
installPhase = ''
runHook preInstall
mkdir -p $out/bin
cp bwa-mem2* $out/bin/
runHook postInstall
'';
meta = with lib; {
description = "Next version of the bwa-mem algorithm in bwa, a software package for mapping low-divergent sequences against a large reference genome";
license = licenses.mit;
homepage = "https://github.com/bwa-mem2/bwa-mem2/";
changelog = "https://github.com/bwa-mem2/bwa-mem2/blob/${finalAttrs.src.rev}/NEWS.md";
platforms = platforms.x86_64;
maintainers = with maintainers; [ alxsimon ];
};
})

View File

@ -8,16 +8,16 @@
rustPlatform.buildRustPackage rec {
pname = "gql";
version = "0.4.1";
version = "0.5.0";
src = fetchFromGitHub {
owner = "AmrDeveloper";
repo = "GQL";
rev = version;
hash = "sha256-d6uncWHq9bLDODFle7xij9YjhpiQPL7mmyFmVxmy8hY=";
hash = "sha256-UTyP9ugUXiPMzkeIvPJUtORvcJ93YOBltglmlcym3sI=";
};
cargoHash = "sha256-jR79xchMpib76oVnpy+UIbcwhDXvDPyl+jWmVPfXVog=";
cargoHash = "sha256-AIt7Ns3vNrHQxJU7cSNr+h3tFGZ9hL1OMBqPHS61YUQ=";
nativeBuildInputs = [
pkg-config

View File

@ -26,14 +26,14 @@
stdenv.mkDerivation (finalAttrs: {
pname = "qmplay2";
version = "23.06.17";
version = "23.08.22";
src = fetchFromGitHub {
owner = "zaps166";
repo = "QMPlay2";
rev = finalAttrs.version;
fetchSubmodules = true;
hash = "sha256-f4lIXB0eTyteCJdWFP0XnsnxGWc32CV+HlqpaCjmgOE=";
hash = "sha256-Ug7WAqZ+BxspQUXweL/OnVBGCsU60DOWNexbi0GpDo0=";
};
nativeBuildInputs = [
@ -79,7 +79,7 @@ stdenv.mkDerivation (finalAttrs: {
'';
changelog = "https://github.com/zaps166/QMPlay2/releases/tag/${finalAttrs.version}";
license = lib.licenses.lgpl3Plus;
maintainers = with lib.maintainers; [ AndersonTorres ];
maintainers = with lib.maintainers; [ AndersonTorres kashw2 ];
platforms = lib.platforms.linux;
};
})

View File

@ -73,6 +73,7 @@ grimshot = stdenv.mkDerivation rec {
meta = with lib; {
description = "A helper for screenshots within sway";
maintainers = with maintainers; [ evils ];
mainProgram = "grimshot";
};
};

View File

@ -244,6 +244,11 @@ let
++ lib.optionals (langD) [
"--with-target-system-zlib=yes"
]
# On mips64-unknown-linux-gnu libsanitizer defines collide with
# glibc's definitions and fail the build. It was fixed in gcc-13+.
++ lib.optionals (targetPlatform.isMips && targetPlatform.parsed.abi.name == "gnu" && lib.versions.major version == "12") [
"--disable-libsanitizer"
]
;
in configureFlags

View File

@ -6,13 +6,13 @@
stdenv.mkDerivation (finalAttrs: {
pname = "wamr";
version = "1.2.2";
version = "1.2.3";
src = fetchFromGitHub {
owner = "bytecodealliance";
repo = "wasm-micro-runtime";
rev = "WAMR-${finalAttrs.version}";
hash = "sha256-jpT42up9HAVJpo03cFrffQQk2JiHEAEepBGlU4RUfNU=";
hash = "sha256-bnia0ORC0YajO7I3XDMdpjlktDqOiXDlGcf12N1G+eg=";
};
nativeBuildInputs = [ cmake ];
@ -23,6 +23,7 @@ stdenv.mkDerivation (finalAttrs: {
description = "WebAssembly Micro Runtime";
homepage = "https://github.com/bytecodealliance/wasm-micro-runtime";
license = licenses.asl20;
mainProgram = "iwasm";
maintainers = with maintainers; [ ereslibre ];
# TODO (ereslibre): this derivation should be improved to support
# more platforms.

View File

@ -0,0 +1,42 @@
{ lib
, stdenv
, fetchFromGitHub
, cmake
, gtest
, static ? stdenv.hostPlatform.isStatic
, cxxStandard ? null
}:
stdenv.mkDerivation (finalAttrs: {
pname = "abseil-cpp";
version = "20230802.0";
src = fetchFromGitHub {
owner = "abseil";
repo = "abseil-cpp";
rev = "refs/tags/${finalAttrs.version}";
hash = "sha256-yILAsAERUDMbRWh8t4o6W74YiswvGIHSyBAIuLVbzxY=";
};
cmakeFlags = [
"-DABSL_BUILD_TEST_HELPERS=ON"
"-DABSL_USE_EXTERNAL_GOOGLETEST=ON"
"-DBUILD_SHARED_LIBS=${if static then "OFF" else "ON"}"
] ++ lib.optionals (cxxStandard != null) [
"-DCMAKE_CXX_STANDARD=${cxxStandard}"
];
strictDeps = true;
nativeBuildInputs = [ cmake ];
buildInputs = [ gtest ];
meta = with lib; {
description = "An open-source collection of C++ code designed to augment the C++ standard library";
homepage = "https://abseil.io/";
license = licenses.asl20;
platforms = platforms.all;
maintainers = [ maintainers.andersk ];
};
})

View File

@ -7,14 +7,14 @@
}:
stdenv.mkDerivation (finalAttrs: {
name = "librecast";
version = "0.7-RC3";
version = "0.7.0";
src = fetchFromGitea {
domain = "codeberg.org";
owner = "librecast";
repo = "librecast";
rev = "v${finalAttrs.version}";
hash = "sha256-AD3MpWg8Lp+VkizwYTuuS2YWM8e0xaMEavVIvwhSZRo=";
hash = "sha256-NlwYJJn1yewx92y6UKJcj6R2MnPn+XuEiKOmsR2oE3g=";
};
buildInputs = [ lcrq libsodium ];
installFlags = [ "PREFIX=$(out)" ];

View File

@ -22,13 +22,13 @@
stdenv.mkDerivation rec {
pname = "pdal";
version = "2.5.5";
version = "2.5.6";
src = fetchFromGitHub {
owner = "PDAL";
repo = "PDAL";
rev = version;
sha256 = "sha256-AhekpvWAdbDAYAr38VXPBDGE40xvP0BnEAI2ZKF3ctY=";
sha256 = "sha256-JKwa89c05EfZ/FxOkj8lYmw0o2EgSqafRDIV2mTpZ5E=";
};
nativeBuildInputs = [

View File

@ -6,7 +6,7 @@
stdenv.mkDerivation rec {
pname = "libupnp";
version = "1.14.17";
version = "1.14.18";
outputs = [ "out" "dev" ];
@ -14,7 +14,7 @@ stdenv.mkDerivation rec {
owner = "pupnp";
repo = "pupnp";
rev = "release-${version}";
sha256 = "sha256-vb540oqDn6Y+oD0LriOJckYYkI/zcHkEVc8mL/+9bps=";
sha256 = "sha256-eQKtZioZjI53J1fsoer032pzqebbK5IabOnkAXwBPos=";
};
nativeBuildInputs = [

View File

@ -5,13 +5,13 @@
stdenv.mkDerivation rec {
pname = "tbox";
version = "1.7.3";
version = "1.7.4";
src = fetchFromGitHub {
owner = "tboox";
repo = pname;
rev = "v${version}";
hash = "sha256-6SqMvwxKSiJO7Z33xx7cJoECu5AJ1gWF8ZsiERWx8DU=";
hash = "sha256-b461JNTS7jNI/qawumDjL2vfC4fAaWB7a++9PpUUDB0=";
};
configureFlags = [

View File

@ -44,6 +44,7 @@ mapAliases {
"@githubnext/github-copilot-cli" = pkgs.github-copilot-cli; # Added 2023-05-02
"@google/clasp" = pkgs.google-clasp; # Added 2023-05-07
"@maizzle/cli" = pkgs.maizzle; # added 2023-08-17
"@medable/mdctl-cli" = throw "@medable/mdctl-cli was removed because it was broken"; # added 2023-08-21
"@nestjs/cli" = pkgs.nest-cli; # Added 2023-05-06
antennas = pkgs.antennas; # added 2023-07-30
balanceofsatoshis = pkgs.balanceofsatoshis; # added 2023-07-31
@ -54,13 +55,16 @@ mapAliases {
inherit (pkgs) carto; # added 2023-08-17
castnow = pkgs.castnow; # added 2023-07-30
inherit (pkgs) clean-css-cli; # added 2023-08-18
coc-imselect = throw "coc-imselect was removed because it was broken"; # added 2023-08-21
inherit (pkgs) configurable-http-proxy; # added 2023-08-19
inherit (pkgs) cordova; # added 2023-08-18
dat = throw "dat was removed because it was broken"; # added 2023-08-21
eask = pkgs.eask; # added 2023-08-17
inherit (pkgs.elmPackages) elm-test;
eslint_d = pkgs.eslint_d; # Added 2023-05-26
inherit (pkgs) firebase-tools; # added 2023-08-18
flood = pkgs.flood; # Added 2023-07-25
git-ssb = throw "git-ssb was removed because it was broken"; # added 2023-08-21
inherit (pkgs) graphqurl; # added 2023-08-19
gtop = pkgs.gtop; # added 2023-07-31
inherit (pkgs) htmlhint; # added 2023-08-19
@ -74,9 +78,13 @@ mapAliases {
manta = pkgs.node-manta; # Added 2023-05-06
markdownlint-cli = pkgs.markdownlint-cli; # added 2023-07-29
inherit (pkgs) markdownlint-cli2; # added 2023-08-22
mdctl-cli = self."@medable/mdctl-cli"; # added 2023-08-21
node-inspector = throw "node-inspector was removed because it was broken"; # added 2023-08-21
readability-cli = pkgs.readability-cli; # Added 2023-06-12
reveal-md = pkgs.reveal-md; # added 2023-07-31
s3http = throw "s3http was removed because it was abandoned upstream"; # added 2023-08-18
ssb-server = throw "ssb-server was removed because it was broken"; # added 2023-08-21
stf = throw "stf was removed because it was broken"; # added 2023-08-21
thelounge = pkgs.thelounge; # Added 2023-05-22
triton = pkgs.triton; # Added 2023-05-06
typescript = pkgs.typescript; # Added 2023-06-21

View File

@ -17,7 +17,6 @@
"@commitlint/cli" = "commitlint";
"@forge/cli" = "forge";
"@gitbeaker/cli" = "gitbeaker";
"@medable/mdctl-cli" = "mdctl";
"@mermaid-js/mermaid-cli" = "mmdc";
"@nerdwallet/shepherd" = "shepherd";
"@prisma/language-server" = "prisma-language-server";

View File

@ -6,7 +6,6 @@
, "@commitlint/cli"
, "@commitlint/config-conventional"
, "@forge/cli"
, "@medable/mdctl-cli"
, "@mermaid-js/mermaid-cli"
, "@microsoft/rush"
, "@nerdwallet/shepherd"
@ -57,7 +56,6 @@
, "coc-haxe"
, "coc-highlight"
, "coc-html"
, "coc-imselect"
, "coc-java"
, "coc-jest"
, "coc-json"
@ -104,7 +102,6 @@
, "create-react-native-app"
, "cspell"
, "csslint"
, "dat"
, "degit"
, "dhcp"
, "diagnostic-languageserver"
@ -134,7 +131,6 @@
, "generator-code"
, "get-graphql-schema"
, "git-run"
, "git-ssb"
, "git-standup"
, "@gitbeaker/cli"
, "gitmoji-cli"
@ -202,7 +198,6 @@
, "nijs"
, "node-gyp"
, "node-gyp-build"
, "node-inspector"
, "node-pre-gyp"
, "node-red"
, "node2nix"
@ -262,9 +257,7 @@
, "socket.io"
, "speed-test"
, "sql-formatter"
, "ssb-server"
, "stackdriver-statsd-backend"
, "stf"
, "stylelint"
, "surge"
, "svelte-check"

File diff suppressed because it is too large Load Diff

View File

@ -41,25 +41,6 @@ final: prev: {
];
};
"@medable/mdctl-cli" = prev."@medable/mdctl-cli".override (oldAttrs: {
nativeBuildInputs = with pkgs; with darwin.apple_sdk.frameworks; [
glib
libsecret
pkg-config
] ++ lib.optionals stdenv.isDarwin [
AppKit
Security
];
buildInputs = [
final.node-gyp-build
final.node-pre-gyp
nodejs
];
meta = oldAttrs.meta // { broken = since "16"; };
});
mdctl-cli = final."@medable/mdctl-cli";
autoprefixer = prev.autoprefixer.override {
nativeBuildInputs = [ pkgs.buildPackages.makeWrapper ];
postInstall = ''
@ -92,16 +73,6 @@ final: prev: {
'';
};
coc-imselect = prev.coc-imselect.override (oldAttrs: {
meta = oldAttrs.meta // { broken = since "10"; };
});
dat = prev.dat.override (oldAttrs: {
buildInputs = [ final.node-gyp-build pkgs.libtool pkgs.autoconf pkgs.automake ];
meta = oldAttrs.meta // { broken = since "12"; };
});
expo-cli = prev."expo-cli".override (oldAttrs: {
# The traveling-fastlane-darwin optional dependency aborts build on Linux.
dependencies = builtins.filter (d: d.packageName != "@expo/traveling-fastlane-${if stdenv.isLinux then "darwin" else "linux"}") oldAttrs.dependencies;
@ -126,11 +97,6 @@ final: prev: {
};
git-ssb = prev.git-ssb.override (oldAttrs: {
buildInputs = [ final.node-gyp-build ];
meta = oldAttrs.meta // { broken = since "10"; };
});
graphite-cli = prev."@withgraphite/graphite-cli".override {
name = "graphite-cli";
nativeBuildInputs = with pkgs; [ installShellFiles pkg-config ];
@ -260,11 +226,6 @@ final: prev: {
'';
};
node-inspector = prev.node-inspector.override (oldAttrs: {
buildInputs = [ final.node-pre-gyp ];
meta = oldAttrs.meta // { broken = since "10"; };
});
node-red = prev.node-red.override {
buildInputs = [ final.node-pre-gyp ];
};
@ -384,15 +345,6 @@ final: prev: {
name = "rush";
};
ssb-server = prev.ssb-server.override (oldAttrs: {
buildInputs = [ pkgs.automake pkgs.autoconf final.node-gyp-build ];
meta = oldAttrs.meta // { broken = since "10"; };
});
stf = prev.stf.override (oldAttrs: {
meta = oldAttrs.meta // { broken = since "10"; };
});
tailwindcss = prev.tailwindcss.override {
plugins = [ ];
nativeBuildInputs = [ pkgs.buildPackages.makeWrapper ];

View File

@ -1,23 +1,32 @@
{ lib, buildPythonPackage, fetchPypi, isPyPy
, pytest, pytest-cov, pytest-mock, freezegun
, pytest, pytest-cov, pytest-mock, freezegun, safety, pre-commit
, jinja2, future, binaryornot, click, jinja2-time, requests
, python-slugify
, pyyaml
, arrow
, rich
}:
buildPythonPackage rec {
pname = "cookiecutter";
version = "2.1.1";
version = "2.3.0";
# not sure why this is broken
disabled = isPyPy;
src = fetchPypi {
inherit pname version;
hash = "sha256-85gr6NnFPawSYYZAE/3sf4Ov0uQu3m9t0GnF4UnFQNU=";
hash = "sha256-lCp5SYF0f21/Q51uSdOdyRqaZBKDYUFgyTxHTHLCliE=";
};
nativeCheckInputs = [ pytest pytest-cov pytest-mock freezegun ];
nativeCheckInputs = [
pytest
pytest-cov
pytest-mock
freezegun
safety
pre-commit
];
propagatedBuildInputs = [
binaryornot
jinja2
@ -26,6 +35,8 @@ buildPythonPackage rec {
jinja2-time
python-slugify
requests
arrow
rich
];
# requires network access for cloning git repos

View File

@ -10,14 +10,14 @@
buildPythonPackage rec {
pname = "mypy-protobuf";
version = "3.4.0";
version = "3.5.0";
format = "pyproject";
disabled = pythonOlder "3.6";
disabled = pythonOlder "3.8";
src = fetchPypi {
inherit pname version;
hash = "sha256-fXWgeWUbEFB2d2o1pUBeP6dzuKFnEY8bcS5EPppsGKI=";
hash = "sha256-IfJw2gqXkqnax2sN9GPAJ+VhZkq2lzxZvk5NBk3+Z9w=";
};
propagatedBuildInputs = [

View File

@ -7,14 +7,15 @@
buildPythonPackage rec {
pname = "u-msgpack-python";
version = "2.7.2";
version = "2.8.0";
format = "setuptools";
src = fetchPypi {
inherit pname version;
hash = "sha256-6G96xqoO9MbEnwBLT9Q1vOmcI+LdXXMAPz+YFgJMK9g=";
hash = "sha256-uAGoPW7XXm30HkRRi08qnCIdwtpLzVOA46D+2lILxho=";
};
LC_ALL="en_US.UTF-8";
env.LC_ALL="en_US.UTF-8";
buildInputs = [ glibcLocales ];
@ -23,7 +24,7 @@ buildPythonPackage rec {
meta = {
description = "A portable, lightweight MessagePack serializer and deserializer written in pure Python";
homepage = "https://github.com/vsergeev/u-msgpack-python";
changelog = "https://github.com/vsergeev/u-msgpack-python/blob/v${version}/CHANGELOG.md";
license = lib.licenses.mit;
};
}

View File

@ -22,14 +22,14 @@ with py.pkgs;
buildPythonApplication rec {
pname = "checkov";
version = "2.4.6";
version = "2.4.7";
format = "setuptools";
src = fetchFromGitHub {
owner = "bridgecrewio";
repo = pname;
rev = "refs/tags/${version}";
hash = "sha256-1o8l/c6DeT35GjEV/7+9+LLJwoCpWuq0LBkyr08mWaE=";
hash = "sha256-NveRGWf0Aghu0fxVAVSukrH9zFl+QJ2rWNYm5JIHIjk=";
};
patches = [

View File

@ -4,13 +4,13 @@
}:
buildGoModule rec {
pname = "litestream";
version = "0.3.9";
version = "0.3.11";
src = fetchFromGitHub {
owner = "benbjohnson";
repo = pname;
rev = "v${version}";
sha256 = "sha256-zs+Li8ylw+zexxuEkXX4qk7qslk23BLBcoHXRIuQNmU=";
sha256 = "sha256-03gGGx8RZEK2RrToN30gkIlHss/e3UcSi3AmMh9twDU=";
};
ldflags = [
@ -19,7 +19,7 @@ buildGoModule rec {
"-X main.Version=${version}"
];
vendorSha256 = "sha256-GiCvifdbWz+hH6aHACzlBpppNC5p24MHRWlbtKLIFhE=";
vendorHash = "sha256-sYIY3Z3VrCqbjEbQtEY7q6Jljg8jMoa2qWEB/IkDjzM=";
meta = with lib; {
description = "Streaming replication for SQLite";

View File

@ -2,7 +2,7 @@
buildGoModule rec {
pname = "doctl";
version = "1.97.1";
version = "1.98.0";
vendorHash = null;
@ -31,7 +31,7 @@ buildGoModule rec {
owner = "digitalocean";
repo = "doctl";
rev = "v${version}";
sha256 = "sha256-qEoSq4sLobsYYdwR8vp5WpugeQdLbXDtBVBTAztxPkY=";
sha256 = "sha256-M9kSQoYcJudL/y/Yc6enVT/rJusd+oe3BdjkaLRQ0gU=";
};
meta = with lib; {

File diff suppressed because it is too large Load Diff

View File

@ -6,24 +6,16 @@
rustPlatform.buildRustPackage rec {
pname = "postgres-lsp";
version = "unstable-2023-08-08";
version = "unstable-2023-08-23";
src = fetchFromGitHub {
owner = "supabase";
repo = "postgres_lsp";
rev = "1250f5ed14a0e86b2b7fa581214284c67b960621";
hash = "sha256-Y43sTgKNcAI3h6McDc0g6o9CX6jOKBfURLWyjJhvmwk=";
rev = "47dd0132b12661ab6c97f5fba892e567a5109c84";
hash = "sha256-aV3QAp6DkNrHiDe1Ytiu6UyTWrelV6vO83Baiv4ONLg=";
};
cargoLock = {
lockFile = ./Cargo.lock;
};
# Cargo.lock is ignored
# https://github.com/supabase/postgres_lsp/pull/28
postPatch = ''
ln -s ${./Cargo.lock} Cargo.lock
'';
cargoHash = "sha256-9d/KiQ7IXhmYvTb97FKJh/cGTdnxAgCXSx4+V74b+RE=";
nativeBuildInputs = [
protobuf

View File

@ -2,12 +2,12 @@
stdenv.mkDerivation rec {
pname = "opengrok";
version = "1.12.12";
version = "1.12.13";
# binary distribution
src = fetchurl {
url = "https://github.com/oracle/opengrok/releases/download/${version}/${pname}-${version}.tar.gz";
hash = "sha256-0ppkexmVchHL+lCfB+xtLcDCZ24Sv0Opr5RNLILKg2M=";
hash = "sha256-DfSLQj6cbgDT56MwwnlC6hK/y3Hce2Ueprw0o3NURW0=";
};
nativeBuildInputs = [ makeWrapper ];

View File

@ -6,13 +6,13 @@
buildGoModule rec {
pname = "oh-my-posh";
version = "18.3.3";
version = "18.3.5";
src = fetchFromGitHub {
owner = "jandedobbeleer";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-AJw+NNTbksYSW2VqUzxLwxwd3OjM9uK/ou2CVS2zNvw=";
hash = "sha256-5wcKG97NXTTTBJOD9kOsOp1MuHazAPlE4yLPWJoeCA8=";
};
vendorHash = "sha256-xkguBWk2Nh8w7C7tKbvaP0tRgZO4z08AEsdjNlJYC6Q=";

View File

@ -13,14 +13,14 @@
rustPlatform.buildRustPackage rec {
pname = "rust-analyzer-unwrapped";
version = "2023-08-14";
cargoSha256 = "sha256-sau5lno9jqC4NVDY62aNlyRMW/T/xEHUtzyL5wIE6yQ=";
version = "2023-08-21";
cargoSha256 = "sha256-aQFBNUXkoEsm5qKsMasqTIKoC0V7UUgmlukgOr5Vqpc=";
src = fetchFromGitHub {
owner = "rust-lang";
repo = "rust-analyzer";
rev = version;
sha256 = "sha256-KxbpMaIH7GkLecWCQsoDtpql1N869RIIfZcLDRcuB5k=";
sha256 = "sha256-ribQkxEbMMb8vcBMKvcrPHFftMmlaF3HIAbJty9fDeY=";
};
cargoBuildFlags = [ "--bin" "rust-analyzer" "--bin" "rust-analyzer-proc-macro-srv" ];

View File

@ -14,13 +14,13 @@
stdenv.mkDerivation rec {
pname = "r2modman";
version = "3.1.42";
version = "3.1.43";
src = fetchFromGitHub {
owner = "ebkr";
repo = "r2modmanPlus";
rev = "v${version}";
hash = "sha256-16sE706iivYoI40JJUkqVmtxkYsgAFBg+0tXOc6scqc=";
hash = "sha256-qZeBF58VB/wW0N2MZgZfiIJdDqHUdfruAoCuDEFeCPA=";
};
offlineCache = fetchYarnDeps {

View File

@ -26,5 +26,6 @@ stdenv.mkDerivation rec {
license = licenses.gpl2;
maintainers = [ ];
platforms = platforms.linux;
mainProgram = "vlock";
};
}

View File

@ -8,7 +8,7 @@
buildGoModule rec {
pname = "telegraf";
version = "1.27.3";
version = "1.27.4";
excludedPackages = "test";
@ -18,10 +18,10 @@ buildGoModule rec {
owner = "influxdata";
repo = "telegraf";
rev = "v${version}";
hash = "sha256-mVkzJ2dFBR8QYGO5rAT3tfA+eFFO01VsPoWp2iadUew=";
hash = "sha256-HcNqvl8wWMMefrRl7cXgfE22+dzebhVmo7vKf0nEIyk=";
};
vendorHash = "sha256-xtPniKc6aR8JRSsaefqoRCLL8AQH6YVTG67trK58YaU=";
vendorHash = "sha256-z1HNOVsSdA5bf0iZcAhbXgv/IaFpHjfoe7rqMtmscQM=";
proxyVendor = true;
ldflags = [

View File

@ -0,0 +1,34 @@
{ lib
, stdenv
, fetchFromGitHub
, python3Packages
, makeWrapper
}:
stdenv.mkDerivation rec {
pname = "influxdb2-provision";
version = "1.0.0";
src = fetchFromGitHub {
owner = "oddlama";
repo = "influxdb2-provision";
rev = "v${version}";
hash = "sha256-kgpUtXmwy9buupNzQj/6AIeN8XG2x0XjIckK3WIFC+I=";
};
nativeBuildInputs = [ makeWrapper ];
buildInputs = [ python3Packages.python python3Packages.influxdb-client ];
installPhase = ''
install -Dm0555 influxdb2-provision.py $out/bin/influxdb2-provision
wrapProgram $out/bin/influxdb2-provision --prefix PYTHONPATH : "$PYTHONPATH"
'';
meta = with lib; {
description = "A small utility to help provisioning influxdb2";
homepage = "https://github.com/oddlama/influxdb2-provision";
license = licenses.mit;
maintainers = with maintainers; [oddlama];
mainProgram = "influxdb2-provision";
};
}

View File

@ -0,0 +1,26 @@
{ buildGoModule
, fetchFromGitHub
, lib
}:
buildGoModule rec {
pname = "influxdb2-token-manipulator";
version = "1.0.0";
src = fetchFromGitHub {
owner = "oddlama";
repo = "influxdb2-token-manipulator";
rev = "v${version}";
hash = "sha256-9glz+TdqvGJgSsbLm4J/fn7kzMC75z74/jxZrEZiooc=";
};
vendorHash = "sha256-zBZk7JbNILX18g9+2ukiESnFtnIVWhdN/J/MBhIITh8=";
meta = with lib; {
description = "Utility program to manipulate influxdb api tokens for declarative setups";
homepage = "https://github.com/oddlama/influxdb2-token-manipulator";
license = licenses.mit;
maintainers = with maintainers; [oddlama];
mainProgram = "influxdb2-token-manipulator";
};
}

View File

@ -76,8 +76,6 @@ buildGoModule rec {
'';
passthru.tests.lxd = nixosTests.lxd;
passthru.tests.lxd-nftables = nixosTests.lxd-nftables;
passthru.tests.lxd-ui = nixosTests.lxd-ui;
passthru.ui = callPackage ./ui.nix { };
passthru.updateScript = gitUpdater {
url = "https://github.com/canonical/lxd.git";

View File

@ -102,5 +102,6 @@ stdenv.mkDerivation rec {
description = "An extraction utility for archives compressed in .zip format";
license = lib.licenses.free; # http://www.info-zip.org/license.html
platforms = lib.platforms.all;
mainProgram = "unzip";
};
}

View File

@ -31,5 +31,6 @@ stdenv.mkDerivation rec {
license = licenses.mit;
maintainers = [ maintainers.woffs ];
platforms = platforms.all;
mainProgram = "pulsemixer";
};
}

View File

@ -4,13 +4,13 @@
stdenv.mkDerivation rec {
pname = "partclone";
version = "0.3.24";
version = "0.3.25";
src = fetchFromGitHub {
owner = "Thomas-Tsai";
repo = "partclone";
rev = version;
sha256 = "sha256-EY5SL8/KITEoOWq2owxQjnemYNsjYqJLLlRVFdYPoVA=";
sha256 = "sha256-DLO0mKQ7Ab+4hwRANnipkaCbS7qldGnfTotAYDy//XU=";
};
nativeBuildInputs = [ autoreconfHook pkg-config ];

View File

@ -7,13 +7,13 @@
buildGoModule rec {
pname = "trufflehog";
version = "3.52.1";
version = "3.53.0";
src = fetchFromGitHub {
owner = "trufflesecurity";
repo = "trufflehog";
rev = "refs/tags/v${version}";
hash = "sha256-T3//AKSgnsdRWEzz+kh8rkHXBnJF9CThXervwAZ7Uog=";
hash = "sha256-O1MCl9chqiOGxOUYxaQNs0gXHgVkT8ZgSSTQXj63f90=";
};
vendorHash = "sha256-RHNt9GxqWb4EDKg5of5s88iUmJPI2w7i5hPoCFMmnew=";

View File

@ -20507,6 +20507,7 @@ with pkgs;
abseil-cpp_202103 = callPackage ../development/libraries/abseil-cpp/202103.nix { };
abseil-cpp_202206 = callPackage ../development/libraries/abseil-cpp/202206.nix { };
abseil-cpp_202301 = callPackage ../development/libraries/abseil-cpp/202301.nix { };
abseil-cpp_202308 = callPackage ../development/libraries/abseil-cpp/202308.nix { };
abseil-cpp = abseil-cpp_202301;
accountsservice = callPackage ../development/libraries/accountsservice { };
@ -27006,6 +27007,8 @@ with pkgs;
influxdb = callPackage ../servers/nosql/influxdb { };
influxdb2-server = callPackage ../servers/nosql/influxdb2 { };
influxdb2-cli = callPackage ../servers/nosql/influxdb2/cli.nix { };
influxdb2-token-manipulator = callPackage ../servers/nosql/influxdb2/token-manipulator.nix { };
influxdb2-provision = callPackage ../servers/nosql/influxdb2/provision.nix { };
# For backwards compatibility with older versions of influxdb2,
# which bundled the server and CLI into the same derivation. Will be
# removed in a few releases.
@ -39155,6 +39158,8 @@ with pkgs;
bwa = callPackage ../applications/science/biology/bwa { };
bwa-mem2 = callPackage ../applications/science/biology/bwa-mem2 { };
### SCIENCE/MACHINE LEARNING
nengo-gui = callPackage ../applications/science/machine-learning/nengo-gui { };