Merge master into staging-next

This commit is contained in:
github-actions[bot] 2024-01-10 00:02:18 +00:00 committed by GitHub
commit 122355be99
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
66 changed files with 1002 additions and 425 deletions

View File

@ -2,35 +2,38 @@
`pkgs.checkpointBuildTools` provides a way to build derivations incrementally. It consists of two functions to make checkpoint builds using Nix possible. `pkgs.checkpointBuildTools` provides a way to build derivations incrementally. It consists of two functions to make checkpoint builds using Nix possible.
For hermeticity, Nix derivations do not allow any state to carry over between builds, making a transparent incremental build within a derivation impossible. For hermeticity, Nix derivations do not allow any state to be carried over between builds, making a transparent incremental build within a derivation impossible.
However, we can tell Nix explicitly what the previous build state was, by representing that previous state as a derivation output. This allows the passed build state to be used for an incremental build. However, we can tell Nix explicitly what the previous build state was, by representing that previous state as a derivation output. This allows the passed build state to be used for an incremental build.
To change a normal derivation to a checkpoint based build, these steps must be taken: To change a normal derivation to a checkpoint based build, these steps must be taken:
- apply `prepareCheckpointBuild` on the desired derivation - apply `prepareCheckpointBuild` on the desired derivation, e.g.
e.g.:
```nix ```nix
checkpointArtifacts = (pkgs.checkpointBuildTools.prepareCheckpointBuild pkgs.virtualbox); checkpointArtifacts = (pkgs.checkpointBuildTools.prepareCheckpointBuild pkgs.virtualbox);
``` ```
- change something you want in the sources of the package. (e.g. using a source override) - change something you want in the sources of the package, e.g. use a source override:
```nix ```nix
changedVBox = pkgs.virtualbox.overrideAttrs (old: { changedVBox = pkgs.virtualbox.overrideAttrs (old: {
src = path/to/vbox/sources; src = path/to/vbox/sources;
} });
``` ```
- use `mkCheckpointedBuild changedVBox buildOutput` - use `mkCheckpointBuild changedVBox checkpointArtifacts`
- enjoy shorter build times - enjoy shorter build times
## Example {#sec-checkpoint-build-example} ## Example {#sec-checkpoint-build-example}
```nix ```nix
{ pkgs ? import <nixpkgs> {} }: with (pkgs) checkpointBuildTools; { pkgs ? import <nixpkgs> {} }:
let let
helloCheckpoint = checkpointBuildTools.prepareCheckpointBuild pkgs.hello; inherit (pkgs.checkpointBuildTools)
prepareCheckpointBuild
mkCheckpointBuild
;
helloCheckpoint = prepareCheckpointBuild pkgs.hello;
changedHello = pkgs.hello.overrideAttrs (_: { changedHello = pkgs.hello.overrideAttrs (_: {
doCheck = false; doCheck = false;
patchPhase = '' patchPhase = ''
sed -i 's/Hello, world!/Hello, Nix!/g' src/hello.c sed -i 's/Hello, world!/Hello, Nix!/g' src/hello.c
''; '';
}); });
in checkpointBuildTools.mkCheckpointBuild changedHello helloCheckpoint in mkCheckpointBuild changedHello helloCheckpoint
``` ```

View File

@ -3864,6 +3864,12 @@
githubId = 6821729; githubId = 6821729;
github = "criyle"; github = "criyle";
}; };
crschnick = {
email = "crschnick@xpipe.io";
name = "Christopher Schnick";
github = "crschnick";
githubId = 72509152;
};
CRTified = { CRTified = {
email = "carl.schneider+nixos@rub.de"; email = "carl.schneider+nixos@rub.de";
matrix = "@schnecfk:ruhr-uni-bochum.de"; matrix = "@schnecfk:ruhr-uni-bochum.de";

View File

@ -834,6 +834,7 @@
./services/monitoring/riemann.nix ./services/monitoring/riemann.nix
./services/monitoring/scollector.nix ./services/monitoring/scollector.nix
./services/monitoring/smartd.nix ./services/monitoring/smartd.nix
./services/monitoring/snmpd.nix
./services/monitoring/statsd.nix ./services/monitoring/statsd.nix
./services/monitoring/sysstat.nix ./services/monitoring/sysstat.nix
./services/monitoring/teamviewer.nix ./services/monitoring/teamviewer.nix

View File

@ -0,0 +1,83 @@
{ pkgs, config, lib, ... }:
let
cfg = config.services.snmpd;
configFile = if cfg.configText != "" then
pkgs.writeText "snmpd.cfg" ''
${cfg.configText}
'' else null;
in {
options.services.snmpd = {
enable = lib.mkEnableOption "snmpd";
package = lib.mkPackageOption pkgs "net-snmp" {};
listenAddress = lib.mkOption {
type = lib.types.str;
default = "0.0.0.0";
description = lib.mdDoc ''
The address to listen on for SNMP and AgentX messages.
'';
example = "127.0.0.1";
};
port = lib.mkOption {
type = lib.types.port;
default = 161;
description = lib.mdDoc ''
The port to listen on for SNMP and AgentX messages.
'';
};
openFirewall = lib.mkOption {
type = lib.types.bool;
default = false;
description = lib.mdDoc ''
Open port in firewall for snmpd.
'';
};
configText = lib.mkOption {
type = lib.types.lines;
default = "";
description = lib.mdDoc ''
The contents of the snmpd.conf. If the {option}`configFile` option
is set, this value will be ignored.
Note that the contents of this option will be added to the Nix
store as world-readable plain text, {option}`configFile` can be used in
addition to a secret management tool to protect sensitive data.
'';
};
configFile = lib.mkOption {
type = lib.types.path;
default = configFile;
defaultText = lib.literalMD "The value of {option}`configText`.";
description = lib.mdDoc ''
Path to the snmpd.conf file. By default, if {option}`configText` is set,
a config file will be automatically generated.
'';
};
};
config = lib.mkIf cfg.enable {
systemd.services."snmpd" = {
description = "Simple Network Management Protocol (SNMP) daemon.";
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = {
Type = "simple";
ExecStart = "${lib.getExe' cfg.package "snmpd"} -f -Lo -c ${cfg.configFile} ${cfg.listenAddress}:${toString cfg.port}";
};
};
networking.firewall.allowedUDPPorts = lib.mkIf cfg.openFirewall [
cfg.port
];
};
meta.maintainers = [ lib.maintainers.eliandoran ];
}

View File

@ -4,9 +4,10 @@ with lib;
let let
inherit (pkgs) cups cups-pk-helper cups-filters xdg-utils; inherit (pkgs) cups-pk-helper cups-filters xdg-utils;
cfg = config.services.printing; cfg = config.services.printing;
cups = cfg.package;
avahiEnabled = config.services.avahi.enable; avahiEnabled = config.services.avahi.enable;
polkitEnabled = config.security.polkit.enable; polkitEnabled = config.security.polkit.enable;
@ -140,6 +141,8 @@ in
''; '';
}; };
package = lib.mkPackageOption pkgs "cups" {};
stateless = mkOption { stateless = mkOption {
type = types.bool; type = types.bool;
default = false; default = false;

View File

@ -33,21 +33,11 @@ in {
''; '';
}; };
package = lib.mkOption { package = lib.mkPackageOption pkgs "lxd" { };
type = lib.types.package;
default = pkgs.lxd;
defaultText = lib.literalExpression "pkgs.lxd";
description = lib.mdDoc ''
The LXD package to use.
'';
};
lxcPackage = lib.mkOption { lxcPackage = lib.mkPackageOption pkgs "lxc" {
type = lib.types.package; extraDescription = ''
default = pkgs.lxc; Required for AppArmor profiles.
defaultText = lib.literalExpression "pkgs.lxc";
description = lib.mdDoc ''
The LXC package to use with LXD (required for AppArmor profiles).
''; '';
}; };
@ -149,7 +139,7 @@ in {
ui = { ui = {
enable = lib.mkEnableOption (lib.mdDoc "(experimental) LXD UI"); enable = lib.mkEnableOption (lib.mdDoc "(experimental) LXD UI");
package = lib.mkPackageOption pkgs.lxd-unwrapped "ui" { }; package = lib.mkPackageOption pkgs [ "lxd-unwrapped" "ui" ] { };
}; };
}; };
}; };

View File

@ -773,6 +773,7 @@ in {
sing-box = handleTest ./sing-box.nix {}; sing-box = handleTest ./sing-box.nix {};
slimserver = handleTest ./slimserver.nix {}; slimserver = handleTest ./slimserver.nix {};
slurm = handleTest ./slurm.nix {}; slurm = handleTest ./slurm.nix {};
snmpd = handleTest ./snmpd.nix {};
smokeping = handleTest ./smokeping.nix {}; smokeping = handleTest ./smokeping.nix {};
snapcast = handleTest ./snapcast.nix {}; snapcast = handleTest ./snapcast.nix {};
snapper = handleTest ./snapper.nix {}; snapper = handleTest ./snapper.nix {};

23
nixos/tests/snmpd.nix Normal file
View File

@ -0,0 +1,23 @@
import ./make-test-python.nix ({ pkgs, lib, ... }: {
name = "snmpd";
nodes.snmpd = {
environment.systemPackages = with pkgs; [
net-snmp
];
services.snmpd = {
enable = true;
configText = ''
rocommunity public
'';
};
};
testScript = ''
start_all();
machine.wait_for_unit("snmpd.service")
machine.succeed("snmpwalk -v 2c -c public localhost | grep SNMPv2-MIB::sysName.0");
'';
})

View File

@ -7,16 +7,16 @@
rustPlatform.buildRustPackage rec { rustPlatform.buildRustPackage rec {
pname = "clipcat"; pname = "clipcat";
version = "0.16.0"; version = "0.16.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "xrelkd"; owner = "xrelkd";
repo = pname; repo = pname;
rev = "v${version}"; rev = "v${version}";
hash = "sha256-9BilasXc/3FFPcKAgPvc0hIHP7NbOqRD8ZwIMRc/Y3M="; hash = "sha256-SqA8UjKTBtkE1IkWGeshI8KBHr86V9r/+YvFZNJ6Oq8=";
}; };
cargoHash = "sha256-zkeKhi0DiYqA5+KiU77ZJXRyhLUKVDmHvF7TG1URzo4="; cargoHash = "sha256-KU3kXqy9zL7GQdSsCNW7jcsxdTuRXjJyDtBpmgoXi6E=";
nativeBuildInputs = [ nativeBuildInputs = [
protobuf protobuf

View File

@ -5,7 +5,7 @@
# python deps # python deps
, python, buildPythonPackage , python, buildPythonPackage
, alembic, beautifulsoup4, chardet, lxml, mako, pyenchant , alembic, beautifulsoup4, chardet, lxml, mako, pyenchant
, pyqt5_with_qtwebkit, pyxdg, sip_4, sqlalchemy, sqlalchemy-migrate , pyqt5-webkit, pyxdg, sip_4, sqlalchemy, sqlalchemy-migrate
}: }:
buildPythonPackage rec { buildPythonPackage rec {
@ -39,7 +39,7 @@ buildPythonPackage rec {
lxml lxml
mako mako
pyenchant pyenchant
pyqt5_with_qtwebkit pyqt5-webkit
pyxdg pyxdg
sip_4 sip_4
sqlalchemy sqlalchemy

View File

@ -2,16 +2,16 @@
buildGoModule rec { buildGoModule rec {
pname = "stern"; pname = "stern";
version = "1.27.0"; version = "1.28.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "stern"; owner = "stern";
repo = "stern"; repo = "stern";
rev = "v${version}"; rev = "v${version}";
sha256 = "sha256-W8jGUs63R6QpwuTgzK5yVLhKGXypvKOyCWHT2xdb6eM="; sha256 = "sha256-Lx5f2dqjdhgMXky1Pv2ik9i56ugsQmZK/ag4veC9Dac=";
}; };
vendorHash = "sha256-LLVd9WB8ixH78CHYe0sS4sCDCD+6SQ7PxWr2MHiAOxI="; vendorHash = "sha256-6jI/I7Nw/vJwKNvgH/35uHYu51SBX+WFH5s0WKfCqBo=";
subPackages = [ "." ]; subPackages = [ "." ];

View File

@ -11,7 +11,7 @@ python3Packages.buildPythonPackage rec {
sha256 = "1ffdy74igll74fwpmnn3brvcxbk4iianqscdzz18sx1pfqpw16cl"; sha256 = "1ffdy74igll74fwpmnn3brvcxbk4iianqscdzz18sx1pfqpw16cl";
}; };
propagatedBuildInputs = with python3Packages; [ pyqt5_with_qtwebkit dbus-python jsmin ]; propagatedBuildInputs = with python3Packages; [ pyqt5-webkit dbus-python jsmin ];
meta = with lib; { meta = with lib; {
description = "Non-official desktop client for Slack"; description = "Non-official desktop client for Slack";

View File

@ -6,16 +6,16 @@
buildGoModule rec { buildGoModule rec {
pname = "rclone"; pname = "rclone";
version = "1.65.0"; version = "1.65.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = pname; owner = pname;
repo = pname; repo = pname;
rev = "v${version}"; rev = "v${version}";
hash = "sha256-hlkX8JrBz/hFwQj0xCZfuBt2t3CP3Xa1JkNDH0zomxg="; hash = "sha256-wRksCRQR6JZjYtXgq3iARCoYck76O17Kd2Ht1XpA9KE=";
}; };
vendorHash = "sha256-qKRIT2HqNDpEtZBNHZMXp4Yhh5fCkQSTPU5MQ7FmCHI="; vendorHash = "sha256-kWaMo6ALieuwf53H05UdoI7xtH1LAnsD6Ak9bJTa6jc=";
subPackages = [ "." ]; subPackages = [ "." ];

View File

@ -0,0 +1,132 @@
{ stdenvNoCC
, lib
, fetchzip
, makeDesktopItem
, autoPatchelfHook
, zlib
, fontconfig
, udev
, gtk3
, freetype
, alsa-lib
, makeShellWrapper
, libX11
, libXext
, libXdamage
, libXfixes
, libxcb
, libXcomposite
, libXcursor
, libXi
, libXrender
, libXtst
, libXxf86vm
}:
let
inherit (stdenvNoCC.hostPlatform) system;
throwSystem = throw "Unsupported system: ${system}";
arch = {
x86_64-linux = "x86_64";
aarch64-linux = "arm64";
}.${system} or throwSystem;
hash = {
x86_64-linux = "sha256-/cumOKaWPdAruMLZP2GMUdocIhsbo59dc4Q3ngc/JOc=";
aarch64-linux = "sha256-xMV+9etnuFwRGIHdaXNViKd4FMOuVtugGDS1xyMwEnM=";
}.${system} or throwSystem;
displayname = "XPipe";
in stdenvNoCC.mkDerivation rec {
pname = "xpipe";
version = "1.7.3";
src = fetchzip {
url = "https://github.com/xpipe-io/xpipe/releases/download/${version}/xpipe-portable-linux-${arch}.tar.gz";
inherit hash;
};
nativeBuildInputs = [
autoPatchelfHook
makeShellWrapper
];
# Ignore libavformat dependencies as we don't need them
autoPatchelfIgnoreMissingDeps = true;
buildInputs = [
fontconfig
zlib
udev
freetype
gtk3
alsa-lib
libX11
libX11
libXext
libXdamage
libXfixes
libxcb
libXcomposite
libXcursor
libXi
libXrender
libXtst
libXxf86vm
];
desktopItem = makeDesktopItem {
categories = [ "Network" ];
comment = "Your entire server infrastructure at your fingertips";
desktopName = displayname;
exec = "/opt/${pname}/cli/bin/xpipe open %U";
genericName = "Shell connection hub";
icon = "/opt/${pname}/logo.png";
name = displayname;
};
installPhase = ''
runHook preInstall
pkg="${pname}"
mkdir -p $out/opt/$pkg
cp -r ./ $out/opt/$pkg
mkdir -p "$out/bin"
ln -s "$out/opt/$pkg/cli/bin/xpipe" "$out/bin/$pkg"
mkdir -p "$out/share/applications"
cp -r "${desktopItem}/share/applications/" "$out/share/"
mkdir -p "$out/etc/bash_completion.d"
ln -s "$out/opt/$pkg/cli/xpipe_completion" "$out/etc/bash_completion.d/$pkg"
substituteInPlace $out/share/applications/${displayname}.desktop --replace "Exec=" "Exec=$out"
substituteInPlace $out/share/applications/${displayname}.desktop --replace "Icon=" "Icon=$out"
mv "$out/opt/xpipe/app/bin/xpiped" "$out/opt/xpipe/app/bin/xpiped_raw"
mv "$out/opt/xpipe/app/lib/app/xpiped.cfg" "$out/opt/xpipe/app/lib/app/xpiped_raw.cfg"
mv "$out/opt/xpipe/app/scripts/xpiped_debug.sh" "$out/opt/xpipe/app/scripts/xpiped_debug_raw.sh"
makeShellWrapper "$out/opt/xpipe/app/bin/xpiped_raw" "$out/opt/xpipe/app/bin/xpiped" \
--prefix LD_LIBRARY_PATH : "${lib.makeLibraryPath [ fontconfig gtk3 udev ]}"
makeShellWrapper "$out/opt/xpipe/app/scripts/xpiped_debug_raw.sh" "$out/opt/xpipe/app/scripts/xpiped_debug.sh" \
--prefix LD_LIBRARY_PATH : "${lib.makeLibraryPath [ fontconfig gtk3 udev ]}"
runHook postInstall
'';
meta = with lib; {
description = "A cross-platform shell connection hub and remote file manager";
homepage = "https://github.com/xpipe-io/${pname}";
downloadPage = "https://github.com/xpipe-io/${pname}/releases/latest";
sourceProvenance = with sourceTypes; [ binaryNativeCode ];
changelog = "https://github.com/xpipe-io/${pname}/releases/tag/${version}";
license = [ licenses.asl20 licenses.unfree ];
maintainers = with maintainers; [ crschnick ];
platforms = [ "x86_64-linux" "aarch64-linux" ];
mainProgram = pname;
};
}

View File

@ -33,11 +33,11 @@ in
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
pname = "scribus"; pname = "scribus";
version = "1.6.0"; version = "1.6.1";
src = fetchurl { src = fetchurl {
url = "mirror://sourceforge/scribus/scribus-devel/scribus-${finalAttrs.version}.tar.xz"; url = "mirror://sourceforge/scribus/scribus-devel/scribus-${finalAttrs.version}.tar.xz";
hash = "sha256-lLl0kOzhcoaNxPBMeqLulQtBtfL/QoXfN9YV8ETQOOU="; hash = "sha256-4J3Xjm22HQG5MhEI/t7bzNbsCrNS3Vuv24sEHw73npk=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [

View File

@ -54,13 +54,13 @@ let
in in
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "cp2k"; pname = "cp2k";
version = "2023.2"; version = "2024.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "cp2k"; owner = "cp2k";
repo = "cp2k"; repo = "cp2k";
rev = "v${version}"; rev = "v${version}";
hash = "sha256-1TJorIjajWFO7i9vqSBDTAIukBdyvxbr5dargt4QB8M="; hash = "sha256-6PB6wjdTOa55dXV7QIsjxI77hhc95WFEjNePfupBUJQ=";
fetchSubmodules = true; fetchSubmodules = true;
}; };
@ -157,6 +157,7 @@ stdenv.mkDerivation rec {
-I${lib.getDev libint}/include ${lib.optionalString enableElpa "$(pkg-config --variable=fcflags elpa)"} \ -I${lib.getDev libint}/include ${lib.optionalString enableElpa "$(pkg-config --variable=fcflags elpa)"} \
-I${lib.getDev sirius}/include/sirius \ -I${lib.getDev sirius}/include/sirius \
-I${lib.getDev libxc}/include -I${lib.getDev libxsmm}/include \ -I${lib.getDev libxc}/include -I${lib.getDev libxsmm}/include \
-I${lib.getDev hdf5-fortran}/include \
-fallow-argument-mismatch -fallow-argument-mismatch
LIBS = -lfftw3 -lfftw3_threads \ LIBS = -lfftw3 -lfftw3_threads \
-lscalapack -lblas -llapack \ -lscalapack -lblas -llapack \

View File

@ -2,16 +2,16 @@
buildGoModule rec { buildGoModule rec {
pname = "gh"; pname = "gh";
version = "2.40.1"; version = "2.41.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "cli"; owner = "cli";
repo = "cli"; repo = "cli";
rev = "v${version}"; rev = "v${version}";
hash = "sha256-KdJZHouMTbbD/8k2VGFvRits7grbbVNUmCM6dSiJXBc="; hash = "sha256-GkrEirunY17WgAv4XOreG+JwPQn7cRTmr7hJ3/2tSrY=";
}; };
vendorHash = "sha256-jM9nwTMOTh+eXztLvHIwwH4qu3ZIMOtBrPEtByB9Ry8="; vendorHash = "sha256-XBoC1sHfxInkamSHNm7Vb3AKCgIch6uYx0jJWqN7PN8=";
nativeBuildInputs = [ installShellFiles ]; nativeBuildInputs = [ installShellFiles ];

View File

@ -1,27 +1,38 @@
{ lib, stdenv, fetchurl { lib
, pkg-config, openssl, libbsd, libevent, libuuid, libossp_uuid, libmd, zlib, ncurses, bison , stdenv
, fetchurl
, pkg-config
, openssl
, libbsd
, libevent
, libuuid
, libossp_uuid
, libmd
, zlib
, ncurses
, bison
, autoPatchelfHook , autoPatchelfHook
}: }:
stdenv.mkDerivation rec { stdenv.mkDerivation (finalAttrs: {
pname = "got"; pname = "got";
version = "0.94"; version = "0.95";
src = fetchurl { src = fetchurl {
url = "https://gameoftrees.org/releases/portable/got-portable-${version}.tar.gz"; url = "https://gameoftrees.org/releases/portable/got-portable-${finalAttrs.version}.tar.gz";
hash = "sha256-hG0/a+sk6uZCxR908YfZCW44qx/SIwwGO9mUaxxHZ3k="; hash = "sha256-5on9ff76OAFmoaKTwVM0hUCGLiAZGJzt6+jCx2Nygg4=";
}; };
nativeBuildInputs = [ pkg-config bison ] nativeBuildInputs = [ pkg-config bison ]
++ lib.optionals stdenv.isLinux [ autoPatchelfHook ]; ++ lib.optionals stdenv.isLinux [ autoPatchelfHook ];
buildInputs = [ openssl libbsd libevent libuuid libmd zlib ncurses ] buildInputs = [ openssl libbsd libevent libuuid libmd zlib ncurses ]
++ lib.optionals stdenv.isDarwin [ libossp_uuid ]; ++ lib.optionals stdenv.isDarwin [ libossp_uuid ];
configureFlags = [ "--enable-gotd" ]; configureFlags = [ "--enable-gotd" ];
preConfigure = lib.optionalString stdenv.isDarwin '' preConfigure = lib.optionalString stdenv.isDarwin ''
# The configure script assumes dependencies on Darwin are install via # The configure script assumes dependencies on Darwin are installed via
# Homebrew or MacPorts and hardcodes assumptions about the paths of # Homebrew or MacPorts and hardcodes assumptions about the paths of
# dependencies which fails the nixpkgs configurePhase. # dependencies which fails the nixpkgs configurePhase.
substituteInPlace configure --replace 'xdarwin' 'xhomebrew' substituteInPlace configure --replace 'xdarwin' 'xhomebrew'
@ -38,7 +49,7 @@ stdenv.mkDerivation rec {
installCheckPhase = '' installCheckPhase = ''
runHook preInstallCheck runHook preInstallCheck
test "$($out/bin/got --version)" = '${pname} ${version}' test "$($out/bin/got --version)" = "${finalAttrs.pname} ${finalAttrs.version}"
runHook postInstallCheck runHook postInstallCheck
''; '';
@ -59,4 +70,4 @@ stdenv.mkDerivation rec {
platforms = platforms.linux ++ platforms.darwin; platforms = platforms.linux ++ platforms.darwin;
maintainers = with maintainers; [ abbe afh ]; maintainers = with maintainers; [ abbe afh ];
}; };
} })

View File

@ -3,11 +3,11 @@
buildKodiAddon rec { buildKodiAddon rec {
pname = "arteplussept"; pname = "arteplussept";
namespace = "plugin.video.arteplussept"; namespace = "plugin.video.arteplussept";
version = "1.4.1"; version = "1.4.2";
src = fetchzip { src = fetchzip {
url = "https://mirrors.kodi.tv/addons/nexus/${namespace}/${namespace}-${version}.zip"; url = "https://mirrors.kodi.tv/addons/nexus/${namespace}/${namespace}-${version}.zip";
hash = "sha256-4lPJIFBF4zXr1bEyv9tVUPXw9JFt2by/tcOwihib6aQ="; hash = "sha256-dqxGKaOnEYOI33Aw76zbjma5z7MqOUh367dFsV87olU=";
}; };
propagatedBuildInputs = [ propagatedBuildInputs = [

View File

@ -49,21 +49,24 @@
, asio , asio
, decklinkSupport ? false , decklinkSupport ? false
, blackmagic-desktop-video , blackmagic-desktop-video
, libdatachannel
, libvpl
, qrcodegencpp
}: }:
let let
inherit (lib) optional optionals; inherit (lib) optional optionals;
in in
stdenv.mkDerivation rec {
stdenv.mkDerivation (finalAttrs: {
pname = "obs-studio"; pname = "obs-studio";
version = "29.1.3"; version = "30.0.2";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "obsproject"; owner = "obsproject";
repo = "obs-studio"; repo = finalAttrs.pname;
rev = version; rev = finalAttrs.version;
sha256 = "sha256-D0DPueMtopwz5rLgM8QcPT7DgTKcJKQHnst69EY9V6Q="; sha256 = "sha256-8pX1kqibrtDIaE1+/Pey1A5bu6MwFTXLrBOah4rsF+4=";
fetchSubmodules = true; fetchSubmodules = true;
}; };
@ -132,6 +135,9 @@ stdenv.mkDerivation rec {
nlohmann_json nlohmann_json
websocketpp websocketpp
asio asio
libdatachannel
libvpl
qrcodegencpp
] ]
++ optionals scriptingSupport [ luajit python3 ] ++ optionals scriptingSupport [ luajit python3 ]
++ optional alsaSupport alsa-lib ++ optional alsaSupport alsa-lib
@ -151,7 +157,7 @@ stdenv.mkDerivation rec {
''; '';
cmakeFlags = [ cmakeFlags = [
"-DOBS_VERSION_OVERRIDE=${version}" "-DOBS_VERSION_OVERRIDE=${finalAttrs.version}"
"-Wno-dev" # kill dev warnings that are useless for packaging "-Wno-dev" # kill dev warnings that are useless for packaging
# Add support for browser source # Add support for browser source
"-DBUILD_BROWSER=ON" "-DBUILD_BROWSER=ON"
@ -183,7 +189,7 @@ stdenv.mkDerivation rec {
addOpenGLRunpath $out/lib/obs-plugins/*.so addOpenGLRunpath $out/lib/obs-plugins/*.so
# Link libcef again after patchelfing other libs # Link libcef again after patchelfing other libs
ln -s ${libcef}/lib/libcef.so $out/lib/obs-plugins/libcef.so ln -s ${libcef}/lib/* $out/lib/obs-plugins/
''; '';
meta = with lib; { meta = with lib; {
@ -194,9 +200,9 @@ stdenv.mkDerivation rec {
video content, efficiently video content, efficiently
''; '';
homepage = "https://obsproject.com"; homepage = "https://obsproject.com";
maintainers = with maintainers; [ jb55 MP2E materus ]; maintainers = with maintainers; [ jb55 MP2E materus fpletz ];
license = licenses.gpl2Plus; license = licenses.gpl2Plus;
platforms = [ "x86_64-linux" "i686-linux" "aarch64-linux" ]; platforms = [ "x86_64-linux" "i686-linux" "aarch64-linux" ];
mainProgram = "obs"; mainProgram = "obs";
}; };
} })

View File

@ -33,6 +33,5 @@ stdenv.mkDerivation rec {
license = licenses.mit; license = licenses.mit;
maintainers = with maintainers; [ algram ]; maintainers = with maintainers; [ algram ];
platforms = [ "x86_64-linux" ]; platforms = [ "x86_64-linux" ];
broken = true; # Not compatible with qt6 yet but required by OBS28
}; };
} }

View File

@ -27,10 +27,6 @@ stdenv.mkDerivation rec {
"-Wno-dev" "-Wno-dev"
]; ];
preConfigure = ''
cp ${obs-studio.src}/cmake/external/ObsPluginHelpers.cmake cmake/FindLibObs.cmake
'';
meta = with lib; { meta = with lib; {
description = "Audio device and application capture for OBS Studio using PipeWire"; description = "Audio device and application capture for OBS Studio using PipeWire";
homepage = "https://github.com/dimtpap/obs-pipewire-audio-capture"; homepage = "https://github.com/dimtpap/obs-pipewire-audio-capture";

View File

@ -28,6 +28,9 @@ stdenv.mkDerivation (finalAttrs: {
fetchSubmodules = true; fetchSubmodules = true;
}; };
# obs_frontend_add_dock() deprecated in obs 30
env.NIX_CFLAGS_COMPILE = "-Wno-error=deprecated-declarations";
patches = [ patches = [
# fix build with qt 6.6.0 # fix build with qt 6.6.0
# treewide: replace deprecated qAsConst with std::as_const() # treewide: replace deprecated qAsConst with std::as_const()
@ -36,6 +39,11 @@ stdenv.mkDerivation (finalAttrs: {
url = "https://github.com/univrsal/tuna/commit/0d570e771f8d8e6ae7c85bd2b86bbf59c264789e.patch"; url = "https://github.com/univrsal/tuna/commit/0d570e771f8d8e6ae7c85bd2b86bbf59c264789e.patch";
hash = "sha256-A5idhMiM9funqhTm5XMIBqwy+FO1SaNPtgZjo+Vws6k="; hash = "sha256-A5idhMiM9funqhTm5XMIBqwy+FO1SaNPtgZjo+Vws6k=";
}) })
# fix build with obs 30
(fetchpatch2 {
url = "https://github.com/univrsal/tuna/commit/723bd3c7b4e257cf0997611426e555068de77ae7.patch";
hash = "sha256-MF5vghGYknL6q+A8BJ1yrQcEKIu9I+PWk+RZNYg3fRU=";
})
]; ];
postInstall = '' postInstall = ''

View File

@ -1,40 +1,53 @@
{ pkgs }: { lib
, buildPackages
}:
let
# rudimentary support for cross-compiling
# see: https://github.com/NixOS/nixpkgs/pull/279487#discussion_r1444449726
inherit (buildPackages)
mktemp
rsync
;
in
rec { rec {
/* Prepare a derivation for local builds. /* Prepare a derivation for local builds.
* *
* This function prepares checkpoint builds by provinding, * This function prepares checkpoint builds by storing
* containing the build output and the sources for cross checking. * the build output and the sources for cross checking.
* The build output can be used later to allow checkpoint builds * The build output can be used later to allow checkpoint builds
* by passing the derivation output to the `mkCheckpointBuild` function. * by passing the derivation output to the `mkCheckpointBuild` function.
* *
* To build a project with checkpoints follow these steps: * To build a project with checkpoints, follow these steps:
* - run prepareIncrementalBuild on the desired derivation * - run `prepareCheckpointBuild` on the desired derivation, e.g.
* e.G `incrementalBuildArtifacts = (pkgs.checkpointBuildTools.prepareCheckpointBuild pkgs.virtualbox);` * checkpointArtifacts = prepareCheckpointBuild virtualbox;
* - change something you want in the sources of the package( e.G using source override) * - change something you want in the sources of the package,
* changedVBox = pkgs.virtuabox.overrideAttrs (old: { * e.g. using source override:
* src = path/to/vbox/sources; * changedVBox = pkgs.virtuabox.overrideAttrs (old: {
* } * src = path/to/vbox/sources;
* - use `mkCheckpointedBuild changedVBox buildOutput` * };
* - use `mkCheckpointBuild changedVBox checkpointArtifacts`
* - enjoy shorter build times * - enjoy shorter build times
*/ */
prepareCheckpointBuild = drv: drv.overrideAttrs (old: { prepareCheckpointBuild = drv: drv.overrideAttrs (old: {
outputs = [ "out" ]; outputs = [ "out" ];
name = drv.name + "-checkpointArtifacts"; name = drv.name + "-checkpointArtifacts";
# To determine differences between the state of the build directory # To determine differences between the state of the build directory
# from an earlier build and a later one we store the state of the build # from an earlier build and a later one we store the state of the build
# directory before build, but after patch phases. # directory before build, but after patch phases.
# This way, the same derivation can be used multiple times and only changes are detected. # This way, the same derivation can be used multiple times and only changes are detected.
# Additionally Removed files are handled correctly in later builds. # Additionally, removed files are handled correctly in later builds.
preBuild = (old.preBuild or "") + '' preBuild = (old.preBuild or "") + ''
mkdir -p $out/sources mkdir -p $out/sources
cp -r ./* $out/sources/ cp -r ./* $out/sources/
''; '';
# After the build the build directory is copied again # After the build, the build directory is copied again
# to get the output files. # to get the output files.
# We copy the complete build folder, to take care for # We copy the complete build folder, to take care of
# Build tools, building in the source directory, instead of # build tools that build in the source directory, instead of
# having a build root directory, e.G the Linux kernel. # having a separate build directory such as the Linux kernel.
installPhase = '' installPhase = ''
runHook preCheckpointInstall runHook preCheckpointInstall
mkdir -p $out/outputs mkdir -p $out/outputs
@ -44,26 +57,34 @@ rec {
}); });
/* Build a derivation based on the checkpoint output generated by /* Build a derivation based on the checkpoint output generated by
* the `prepareCheckpointBuild function. * the `prepareCheckpointBuild` function.
* *
* Usage: * Usage:
* let * let
* checkpointArtifacts = prepareCheckpointBuild drv * checkpointArtifacts = prepareCheckpointBuild drv;
* in mkCheckpointedBuild drv checkpointArtifacts * in mkCheckpointBuild drv checkpointArtifacts
*/ */
mkCheckpointedBuild = drv: previousBuildArtifacts: drv.overrideAttrs (old: { mkCheckpointBuild = drv: checkpointArtifacts: drv.overrideAttrs (old: {
# The actual checkpoint build phase. # The actual checkpoint build phase.
# We compare the changed sources from a previous build with the current and create a patch # We compare the changed sources from a previous build with the current and create a patch.
# Afterwards we clean the build directory to copy the previous output files (Including the sources) # Afterwards we clean the build directory and copy the previous output files (including the sources).
# The source difference patch is applied to get the latest changes again to allow short build times. # The source difference patch is then applied to get the latest changes again to allow short build times.
preBuild = (old.preBuild or "") + '' preBuild = (old.preBuild or "") + ''
set +e set +e
diff -ur ${previousBuildArtifacts}/sources ./ > sourceDifference.patch sourceDifferencePatchFile=$(${mktemp}/bin/mktemp)
diff -ur ${checkpointArtifacts}/sources ./ > "$sourceDifferencePatchFile"
set -e set -e
shopt -s extglob dotglob shopt -s dotglob
rm -r !("sourceDifference.patch") rm -r *
${pkgs.rsync}/bin/rsync -cutU --chown=$USER:$USER --chmod=+w -r ${previousBuildArtifacts}/outputs/* . ${rsync}/bin/rsync \
patch -p 1 -i sourceDifference.patch --checksum --times --atimes --chown=$USER:$USER --chmod=+w \
-r ${checkpointArtifacts}/outputs/ .
patch -p 1 -i "$sourceDifferencePatchFile"
rm "$sourceDifferencePatchFile"
''; '';
}); });
mkCheckpointedBuild = lib.warn
"`mkCheckpointedBuild` is deprecated, use `mkCheckpointBuild` instead!"
mkCheckpointBuild;
} }

View File

@ -0,0 +1,24 @@
{ lib
, python3Packages
, fetchFromGitHub
}:
python3Packages.buildPythonApplication {
pname = "fileinfo";
version = "unstable-2022-09-16";
src = fetchFromGitHub {
owner = "sdushantha";
repo = "fileinfo";
rev = "503f26189ad5043bad3fe71333dd5ba3ffbce485";
hash = "sha256-tEmCsR3LmTxeDZAbMvbIwqp/6uaGNUhgGlm18gdsnOw=";
};
propagatedBuildInputs = with python3Packages; [ requests ];
meta = with lib; {
homepage = "https://github.com/sdushantha/fileinfo";
description = "A file extension metadata lookup tool";
license = licenses.mit;
maintainers = with maintainers; [ h7x4 ];
mainProgram = "fileinfo";
};
}

View File

@ -0,0 +1,41 @@
{ stdenv
, lib
, fetchFromGitHub
, cmake
, pkg-config
}:
stdenv.mkDerivation (finalAttrs: {
pname = "libvpl";
version = "2.10.1";
src = fetchFromGitHub {
owner = "intel";
repo = finalAttrs.pname;
rev = "v${finalAttrs.version}";
hash = "sha256-2yfJo4iwI/h0CJ+mJJ3cAyG5S7KksUibwJHebF3MR+E=";
};
nativeBuildInputs = [
cmake
pkg-config
];
cmakeFlags = [
"-DCMAKE_BUILD_TYPE=Release"
"-DENABLE_DRI3=ON"
"-DENABLE_DRM=ON"
"-DENABLE_VA=ON"
"-DENABLE_WAYLAND=ON"
"-DENABLE_X11=ON"
"-DINSTALL_EXAMPLE_CODE=OFF"
"-DBUILD_TOOLS=OFF"
];
meta = with lib; {
description = "Intel Video Processing Library";
homepage = "https://intel.github.io/libvpl/";
license = licenses.mit;
platforms = platforms.linux;
};
})

View File

@ -0,0 +1,32 @@
{ lib
, stdenv
, qrcodegen
}:
stdenv.mkDerivation (finalAttrs: {
pname = "qrcodegencpp";
version = qrcodegen.version;
src = qrcodegen.src;
sourceRoot = "${finalAttrs.src.name}/cpp";
nativeBuildInputs = lib.optionals stdenv.cc.isClang [
stdenv.cc.cc.libllvm.out
];
makeFlags = lib.optionals stdenv.cc.isClang [ "AR=llvm-ar" ];
installPhase = ''
runHook preInstall
install -Dt $out/lib/ libqrcodegencpp.a
install -Dt $out/include/qrcodegen/ qrcodegen.hpp
runHook postInstall
'';
meta = {
inherit (qrcodegen.meta) description homepage license maintainers platforms;
};
})

View File

@ -10,7 +10,7 @@ elfHasDynamicSection() {
autoAddCudaCompatRunpathPhase() ( autoAddCudaCompatRunpathPhase() (
local outputPaths local outputPaths
mapfile -t outputPaths < <(for o in $(getAllOutputNames); do echo "${!o}"; done) mapfile -t outputPaths < <(for o in $(getAllOutputNames); do echo "${!o}"; done)
find "${outputPaths[@]}" -type f -executable -print0 | while IFS= read -rd "" f; do find "${outputPaths[@]}" -type f -print0 | while IFS= read -rd "" f; do
if isELF "$f"; then if isELF "$f"; then
# patchelf returns an error on statically linked ELF files # patchelf returns an error on statically linked ELF files
if elfHasDynamicSection "$f" ; then if elfHasDynamicSection "$f" ; then

View File

@ -9,7 +9,7 @@ elfHasDynamicSection() {
autoAddOpenGLRunpathPhase() ( autoAddOpenGLRunpathPhase() (
local outputPaths local outputPaths
mapfile -t outputPaths < <(for o in $(getAllOutputNames); do echo "${!o}"; done) mapfile -t outputPaths < <(for o in $(getAllOutputNames); do echo "${!o}"; done)
find "${outputPaths[@]}" -type f -executable -print0 | while IFS= read -rd "" f; do find "${outputPaths[@]}" -type f -print0 | while IFS= read -rd "" f; do
if isELF "$f"; then if isELF "$f"; then
# patchelf returns an error on statically linked ELF files # patchelf returns an error on statically linked ELF files
if elfHasDynamicSection "$f" ; then if elfHasDynamicSection "$f" ; then

View File

@ -30,6 +30,10 @@
}: }:
let let
gl_rpath = lib.makeLibraryPath [
stdenv.cc.cc.lib
];
rpath = lib.makeLibraryPath [ rpath = lib.makeLibraryPath [
glib glib
nss nss
@ -92,7 +96,11 @@ stdenv.mkDerivation rec {
mkdir -p $out/lib/ $out/share/cef/ mkdir -p $out/lib/ $out/share/cef/
cp libcef_dll_wrapper/libcef_dll_wrapper.a $out/lib/ cp libcef_dll_wrapper/libcef_dll_wrapper.a $out/lib/
cp ../Release/libcef.so $out/lib/ cp ../Release/libcef.so $out/lib/
cp ../Release/libEGL.so $out/lib/
cp ../Release/libGLESv2.so $out/lib/
patchelf --set-rpath "${rpath}" $out/lib/libcef.so patchelf --set-rpath "${rpath}" $out/lib/libcef.so
patchelf --set-rpath "${gl_rpath}" $out/lib/libEGL.so
patchelf --set-rpath "${gl_rpath}" $out/lib/libGLESv2.so
cp ../Release/*.bin $out/share/cef/ cp ../Release/*.bin $out/share/cef/
cp -r ../Resources/* $out/share/cef/ cp -r ../Resources/* $out/share/cef/
cp -r ../include $out/ cp -r ../include $out/

View File

@ -1,5 +1,6 @@
{ lib { lib
, stdenv , stdenv
, fetchpatch
, fetchurl , fetchurl
, fetchFromGitHub , fetchFromGitHub
, fixDarwinDylibNames , fixDarwinDylibNames
@ -20,30 +21,30 @@ stdenv.mkDerivation rec {
patches = [ patches = [
# Fixes build with Musl. # Fixes build with Musl.
(fetchurl { (fetchpatch {
url = "https://github.com/openembedded/meta-openembedded/raw/39185eb1d1615e919e3ae14ae63b8ed7d3e5d83f/meta-oe/recipes-support/tbb/tbb/GLIBC-PREREQ-is-not-defined-on-musl.patch"; url = "https://github.com/openembedded/meta-openembedded/raw/39185eb1d1615e919e3ae14ae63b8ed7d3e5d83f/meta-oe/recipes-support/tbb/tbb/GLIBC-PREREQ-is-not-defined-on-musl.patch";
sha256 = "gUfXQ9OZQ82qD6brgauBCsKdjLvyHafMc18B+KxZoYs="; hash = "sha256-Oo5FSBPPBaOziWEBOlRmTmbulExMsAmQWBR5faOj1a0=";
}) })
# Fixes build with Musl. # Fixes build with Musl.
(fetchurl { (fetchpatch {
url = "https://github.com/openembedded/meta-openembedded/raw/39185eb1d1615e919e3ae14ae63b8ed7d3e5d83f/meta-oe/recipes-support/tbb/tbb/0001-mallinfo-is-glibc-specific-API-mark-it-so.patch"; url = "https://github.com/openembedded/meta-openembedded/raw/39185eb1d1615e919e3ae14ae63b8ed7d3e5d83f/meta-oe/recipes-support/tbb/tbb/0001-mallinfo-is-glibc-specific-API-mark-it-so.patch";
sha256 = "fhorfqO1hHKZ61uq+yTR7eQ8KYdyLwpM3K7WpwJpV74="; hash = "sha256-xp8J/il855VTFIKCN/bFtf+vif6HzcVl4t4/L9nW/xk=";
}) })
# Fixes build with upcoming gcc-13: # Fixes build with upcoming gcc-13:
# https://github.com/oneapi-src/oneTBB/pull/833 # https://github.com/oneapi-src/oneTBB/pull/833
(fetchurl { (fetchpatch {
name = "gcc-13.patch"; name = "gcc-13.patch";
url = "https://github.com/oneapi-src/oneTBB/pull/833/commits/c18342ba667d1f33f5e9a773aa86b091a9694b97.patch"; url = "https://github.com/oneapi-src/oneTBB/pull/833/commits/c18342ba667d1f33f5e9a773aa86b091a9694b97.patch";
sha256 = "ZUExE3nsW80Z5GPWZnDNuDiHHaD1EF7qNl/G5M+Wcxg="; hash = "sha256-LWgf7Rm6Zp4TJdvMqnAkoAebbVS+WV2kB+4iY6jRka4=";
}) })
# Fixes build for aarch64-darwin # Fixes build for aarch64-darwin
(fetchurl { (fetchpatch {
name = "aarch64-darwin.patch"; name = "aarch64-darwin.patch";
url = "https://github.com/oneapi-src/oneTBB/pull/258/commits/86f6dcdc17a8f5ef2382faaef860cfa5243984fe.patch"; url = "https://github.com/oneapi-src/oneTBB/pull/258/commits/86f6dcdc17a8f5ef2382faaef860cfa5243984fe.patch";
sha256 = "sha256-JXqrFPCb3q1vfxk752tQu7HhApCB4YH2LoVnGRwmspk="; hash = "sha256-+sNU8yEsVVmQYOCKmlNiyJfKmB/U0GKAmrydwkfrDFQ=";
}) })
]; ];

View File

@ -2,11 +2,11 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "maestro"; pname = "maestro";
version = "1.34.1"; version = "1.35.0";
src = fetchurl { src = fetchurl {
url = "https://github.com/mobile-dev-inc/maestro/releases/download/cli-${version}/maestro.zip"; url = "https://github.com/mobile-dev-inc/maestro/releases/download/cli-${version}/maestro.zip";
sha256 = "0whnhcf7a3j01693254qqwfk9d3xa4icv4kyqkn4ihxyibznb91d"; sha256 = "1rr3ihirga9jjw1n9z45hby6j68d0q11alzhqz4yv2ibvrjykzai";
}; };
dontUnpack = true; dontUnpack = true;

View File

@ -0,0 +1,60 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, setuptools
, strct
, pytestCheckHook
, pyyaml
}:
buildPythonPackage rec {
pname = "birch";
version = "0.0.35";
pyproject = true;
src = fetchFromGitHub {
owner = "shaypal5";
repo = "birch";
rev = "v${version}";
hash = "sha256-KdQZzQJvJ+logpcLQfaqqEEZJ/9VmNTQX/a4v0oBC98=";
};
postPatch = ''
substituteInPlace pytest.ini \
--replace \
"--cov" \
"#--cov"
'';
nativeBuildInputs = [
setuptools
];
propagatedBuildInputs = [
strct
];
pythonImportsCheck = [
"birch"
"birch.casters"
"birch.exceptions"
"birch.paths"
];
nativeCheckInputs = [
pytestCheckHook
pyyaml
];
preCheck = ''
export HOME="$(mktemp -d)"
'';
meta = with lib; {
description = "Simple hierarchical configuration for Python packages";
homepage = "https://github.com/shaypal5/birch";
license = licenses.mit;
maintainers = with maintainers; [ pbsds ];
};
}

View File

@ -1,29 +1,31 @@
{ lib { lib
, buildPythonPackage , buildPythonPackage
, pythonOlder , pythonOlder
, fetchPypi , fetchFromGitHub
, pythonRelaxDepsHook , pythonRelaxDepsHook
, setuptools , setuptools
, watchdog , watchdog
, portalocker , portalocker
, pathtools
, pytestCheckHook , pytestCheckHook
, pymongo , pymongo
, dnspython , dnspython
, pymongo-inmemory , pymongo-inmemory
, pandas , pandas
, birch
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "cachier"; pname = "cachier";
version = "2.2.1"; version = "2.2.2";
format = "setuptools"; pyproject = true;
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.8";
src = fetchPypi { src = fetchFromGitHub {
inherit pname version; owner = "python-cachier";
hash = "sha256-nm98LT87Z7yErKvIqMp93OEX9TDojqqtItgryHgSQJQ="; repo = "cachier";
rev = "v${version}";
hash = "sha256-zUZqT4SIwZRqhRS/wHIzIYVULnp5aYcytCQd17T0D/4=";
}; };
pythonRemoveDeps = [ "setuptools" ]; pythonRemoveDeps = [ "setuptools" ];
@ -36,7 +38,6 @@ buildPythonPackage rec {
propagatedBuildInputs = [ propagatedBuildInputs = [
watchdog watchdog
portalocker portalocker
pathtools
]; ];
preCheck = '' preCheck = ''
@ -52,6 +53,7 @@ buildPythonPackage rec {
dnspython dnspython
pymongo-inmemory pymongo-inmemory
pandas pandas
birch
]; ];
disabledTests = [ disabledTests = [

View File

@ -9,14 +9,14 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "ftputil"; pname = "ftputil";
version = "5.0.4"; version = "5.1.0";
format = "setuptools"; format = "setuptools";
disabled = pythonOlder "3.6"; disabled = pythonOlder "3.6";
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
hash = "sha256-aInbhkndINm21ApsXw+EzPNAp9rB4L/A8AJAkPwq+zM="; hash = "sha256-6eYtP9MH75xS5Dsz/ZJ1n8lMBNi1F4+F9kGxg5BtQ1M=";
}; };
nativeCheckInputs = [ nativeCheckInputs = [

View File

@ -15,7 +15,7 @@
, httpx , httpx
}: }:
let let
version = "1.15.0"; version = "1.16.19";
in in
buildPythonPackage rec { buildPythonPackage rec {
pname = "litellm"; pname = "litellm";
@ -26,7 +26,7 @@ buildPythonPackage rec {
owner = "BerriAI"; owner = "BerriAI";
repo = "litellm"; repo = "litellm";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-s3Ue/N04YZHEfEnVxPHupRSVDHxWjVse8FDlRF5yKCk="; hash = "sha256-KNQuTgJj7oLJsxfi8g9ShC5WHyrdpZGI5Nfgxzu/eak=";
}; };
postPatch = '' postPatch = ''

View File

@ -11,14 +11,14 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "opencensus-ext-azure"; pname = "opencensus-ext-azure";
version = "1.1.12"; version = "1.1.13";
format = "setuptools"; format = "setuptools";
disabled = pythonOlder "3.4"; disabled = pythonOlder "3.4";
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
hash = "sha256-hrseR84dIKytlq08Efjvsvp6tensSJbzBj2F+JlJBGI="; hash = "sha256-rsMEchdwBTebpWpwKgl9YYxfV1WOG7ZnbsdflIEwaSo=";
}; };
propagatedBuildInputs = [ propagatedBuildInputs = [

View File

@ -16,7 +16,7 @@
, python-docx , python-docx
}: }:
let let
version = "0.5.6"; version = "0.5.7";
in in
buildPythonPackage { buildPythonPackage {
pname = "pdf2docx"; pname = "pdf2docx";
@ -26,8 +26,8 @@ buildPythonPackage {
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "dothinking"; owner = "dothinking";
repo = "pdf2docx"; repo = "pdf2docx";
rev = "v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-NrT4GURQIJbqnHstfJrPzwLXT9c2oGBi4QJ6eGIFwu4="; hash = "sha256-GDftANn+ioaNR28VfRFDuFgdKoy7D4xiy0ezvWJ3zy0=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [

View File

@ -1,25 +1,27 @@
{ lib { lib
, buildPythonPackage , buildPythonPackage
, isPy3k , pythonOlder
, fetchPypi , fetchFromGitHub
, substituteAll , substituteAll
, graphviz , graphviz
, coreutils , coreutils
, pkg-config , pkg-config
, setuptools
, pytest , pytest
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "pygraphviz"; pname = "pygraphviz";
version = "1.11"; version = "1.12";
format = "setuptools"; pyproject = true;
disabled = !isPy3k; disabled = pythonOlder "3.10";
src = fetchPypi { src = fetchFromGitHub {
inherit pname version; owner = "pygraphviz";
hash = "sha256-qX61ztJm9FBT67HyxsbSkJFpBQPjpcFL5/kIs3sG8tQ="; repo = "pygraphviz";
extension = "zip"; rev = "pygraphviz-${version}";
hash = "sha256-XDP77H724eiMa/V18OtLxpUpxlIVDmcFLMYOAbazquo=";
}; };
patches = [ patches = [
@ -30,7 +32,10 @@ buildPythonPackage rec {
}) })
]; ];
nativeBuildInputs = [ pkg-config ]; nativeBuildInputs = [
pkg-config
setuptools
];
buildInputs = [ graphviz ]; buildInputs = [ graphviz ];

View File

@ -7,14 +7,14 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "rapidgzip"; pname = "rapidgzip";
version = "0.11.1"; version = "0.12.1";
format = "setuptools"; format = "setuptools";
disabled = pythonOlder "3.6"; disabled = pythonOlder "3.6";
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
hash = "sha256-pcKO9BovkUDlRjE8MZQEfTSutVMB/9beyAyP3vChMUE="; hash = "sha256-s4MLxhwoGS7Zvx6k5qh1PWpyTRBUBGVIkPW9q94u+2Q=";
}; };
nativeBuildInputs = [ nasm ]; nativeBuildInputs = [ nasm ];

View File

@ -0,0 +1,52 @@
{ lib
, fetchFromGitHub
, buildPythonPackage
, setuptools
, pytestCheckHook
, sortedcontainers
}:
buildPythonPackage rec {
pname = "strct";
version = "0.0.32";
pyproject = true;
src = fetchFromGitHub {
owner = "shaypal5";
repo = "strct";
rev = "v${version}";
hash = "sha256-ctafvdfSOdp7tlCUYg7d5XTXR1qBcWvOVtGtNUnhYIw=";
};
postPatch = ''
substituteInPlace pytest.ini \
--replace \
"--cov" \
"#--cov"
'';
nativeBuildInputs = [
setuptools
];
nativeCheckInputs = [
pytestCheckHook
sortedcontainers
];
pythonImportsCheck = [
"strct"
"strct.dicts"
"strct.hash"
"strct.lists"
"strct.sets"
"strct.sortedlists"
];
meta = with lib; {
description = "A small pure-python package for data structure related utility functions";
homepage = "https://github.com/shaypal5/strct";
license = licenses.mit;
maintainers = with maintainers; [ pbsds ];
};
}

View File

@ -1,5 +1,6 @@
{ lib { lib
, stdenv , stdenv
, linkFarm
, buildPythonPackage , buildPythonPackage
, cargo , cargo
, datasets , datasets
@ -21,41 +22,43 @@
let let
# See https://github.com/huggingface/tokenizers/blob/main/bindings/python/tests/utils.py for details # See https://github.com/huggingface/tokenizers/blob/main/bindings/python/tests/utils.py for details
# about URLs and file names # about URLs and file names
robertaVocab = fetchurl { test-data = linkFarm "tokenizers-test-data" {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/roberta-base-vocab.json"; "roberta-base-vocab.json" = fetchurl {
sha256 = "0m86wpkfb2gdh9x9i9ng2fvwk1rva4p0s98xw996nrjxs7166zwy"; url = "https://s3.amazonaws.com/models.huggingface.co/bert/roberta-base-vocab.json";
}; sha256 = "0m86wpkfb2gdh9x9i9ng2fvwk1rva4p0s98xw996nrjxs7166zwy";
robertaMerges = fetchurl { };
url = "https://s3.amazonaws.com/models.huggingface.co/bert/roberta-base-merges.txt"; "roberta-base-merges.txt" = fetchurl {
sha256 = "1idd4rvkpqqbks51i2vjbd928inw7slij9l4r063w3y5fd3ndq8w"; url = "https://s3.amazonaws.com/models.huggingface.co/bert/roberta-base-merges.txt";
}; sha256 = "1idd4rvkpqqbks51i2vjbd928inw7slij9l4r063w3y5fd3ndq8w";
albertVocab = fetchurl { };
url = "https://s3.amazonaws.com/models.huggingface.co/bert/albert-base-v1-tokenizer.json"; "albert-base-v1-tokenizer.json" = fetchurl {
sha256 = "1hra9pn8rczx7378z88zjclw2qsdrdwq20m56sy42s2crbas6akf"; url = "https://s3.amazonaws.com/models.huggingface.co/bert/albert-base-v1-tokenizer.json";
}; sha256 = "1hra9pn8rczx7378z88zjclw2qsdrdwq20m56sy42s2crbas6akf";
bertVocab = fetchurl { };
url = "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt"; "bert-base-uncased-vocab.txt" = fetchurl {
sha256 = "18rq42cmqa8zanydsbzrb34xwy4l6cz1y900r4kls57cbhvyvv07"; url = "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt";
}; sha256 = "18rq42cmqa8zanydsbzrb34xwy4l6cz1y900r4kls57cbhvyvv07";
norvigBig = fetchurl { };
url = "https://norvig.com/big.txt"; "big.txt" = fetchurl {
sha256 = "0yz80icdly7na03cfpl0nfk5h3j3cam55rj486n03wph81ynq1ps"; url = "https://norvig.com/big.txt";
}; sha256 = "0yz80icdly7na03cfpl0nfk5h3j3cam55rj486n03wph81ynq1ps";
docPipelineTokenizer = fetchurl { };
url = "https://s3.amazonaws.com/models.huggingface.co/bert/anthony/doc-pipeline/tokenizer.json"; "bert-wiki.json" = fetchurl {
hash = "sha256-i533xC8J5CDMNxBjo+p6avIM8UOcui8RmGAmK0GmfBc="; url = "https://s3.amazonaws.com/models.huggingface.co/bert/anthony/doc-pipeline/tokenizer.json";
}; hash = "sha256-i533xC8J5CDMNxBjo+p6avIM8UOcui8RmGAmK0GmfBc=";
docQuicktourTokenizer = fetchurl { };
url = "https://s3.amazonaws.com/models.huggingface.co/bert/anthony/doc-quicktour/tokenizer.json"; "tokenizer-wiki.json" = fetchurl {
hash = "sha256-ipY9d5DR5nxoO6kj7rItueZ9AO5wq9+Nzr6GuEIfIBI="; url = "https://s3.amazonaws.com/models.huggingface.co/bert/anthony/doc-quicktour/tokenizer.json";
}; hash = "sha256-ipY9d5DR5nxoO6kj7rItueZ9AO5wq9+Nzr6GuEIfIBI=";
openaiVocab = fetchurl { };
url = "https://s3.amazonaws.com/models.huggingface.co/bert/openai-gpt-vocab.json"; "openai-gpt-vocab.json" = fetchurl {
sha256 = "0y40gc9bixj5rxv674br1rxmxkd3ly29p80x1596h8yywwcrpx7x"; url = "https://s3.amazonaws.com/models.huggingface.co/bert/openai-gpt-vocab.json";
}; sha256 = "0y40gc9bixj5rxv674br1rxmxkd3ly29p80x1596h8yywwcrpx7x";
openaiMerges = fetchurl { };
url = "https://s3.amazonaws.com/models.huggingface.co/bert/openai-gpt-merges.txt"; "openai-gpt-merges.txt" = fetchurl {
sha256 = "09a754pm4djjglv3x5pkgwd6f79i2rq8ydg0f7c3q1wmwqdbba8f"; url = "https://s3.amazonaws.com/models.huggingface.co/bert/openai-gpt-merges.txt";
sha256 = "09a754pm4djjglv3x5pkgwd6f79i2rq8ydg0f7c3q1wmwqdbba8f";
};
}; };
in in
buildPythonPackage rec { buildPythonPackage rec {
@ -107,16 +110,7 @@ buildPythonPackage rec {
postUnpack = '' postUnpack = ''
# Add data files for tests, otherwise tests attempt network access # Add data files for tests, otherwise tests attempt network access
mkdir $sourceRoot/tests/data mkdir $sourceRoot/tests/data
( cd $sourceRoot/tests/data ln -s ${test-data}/* $sourceRoot/tests/data/
ln -s ${robertaVocab} roberta-base-vocab.json
ln -s ${robertaMerges} roberta-base-merges.txt
ln -s ${albertVocab} albert-base-v1-tokenizer.json
ln -s ${bertVocab} bert-base-uncased-vocab.txt
ln -s ${docPipelineTokenizer} bert-wiki.json
ln -s ${docQuicktourTokenizer} tokenizer-wiki.json
ln -s ${norvigBig} big.txt
ln -s ${openaiVocab} openai-gpt-vocab.json
ln -s ${openaiMerges} openai-gpt-merges.txt )
''; '';
preCheck = '' preCheck = ''

View File

@ -68,7 +68,7 @@ in buildPythonPackage {
jinja2 jinja2
networkx networkx
filelock filelock
] ++ lib.optionals stdenv.isx86_64 [ ] ++ lib.optionals (stdenv.isLinux && stdenv.isx86_64) [
openai-triton openai-triton
]; ];

View File

@ -7,16 +7,16 @@
buildGoModule rec { buildGoModule rec {
pname = "bearer"; pname = "bearer";
version = "1.33.1"; version = "1.34.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "bearer"; owner = "bearer";
repo = "bearer"; repo = "bearer";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-cdD4LYQZwkS5dRhmvyHkio7TXPDgfDo7kutVAGJCitc="; hash = "sha256-JNYjBcuA2KDdhd1yF0E7mEhNJ7xQRT+wFlnAnal/P9I=";
}; };
vendorHash = "sha256-nh2hkwscb4EYEfumBXPFrLgxIxRlkVqBCnQZ4eMZbgg="; vendorHash = "sha256-DykY1PFKsJ++F8ToAhyss5nAmsTOfXQXJpSo21oEhYc=";
subPackages = [ subPackages = [
"cmd/bearer" "cmd/bearer"

View File

@ -20,12 +20,12 @@
"hash": "sha256:0ns8qxcrxj9i76b93xcghl002l8vbkg7ksd435sikig62qr62gf4" "hash": "sha256:0ns8qxcrxj9i76b93xcghl002l8vbkg7ksd435sikig62qr62gf4"
}, },
"5.4": { "5.4": {
"version": "5.4.265", "version": "5.4.266",
"hash": "sha256:05cvvwjiznn7hfd02qklklalg0chahvh5v18w64lcva6kzj9kbjd" "hash": "sha256:1dmcn9i3nvf1gldm1a32gnl5ybwbk2lizb3wa4gc06g7dxz2y1ys"
}, },
"4.19": { "4.19": {
"version": "4.19.303", "version": "4.19.304",
"hash": "sha256:0dlbl47xs7z4yf9cxbxqzd7zs1f9070jr6ck231wgppa6lwwwb82" "hash": "sha256:165mljr8v1cf4vf4a4b44hx089rprkssvi2azq5wbxxg3basbind"
}, },
"6.6": { "6.6": {
"version": "6.6.10", "version": "6.6.10",

View File

@ -1,8 +1,8 @@
{ stdenv, lib, fetchsvn, linux { stdenv, lib, fetchsvn, linux
, scripts ? fetchsvn { , scripts ? fetchsvn {
url = "https://www.fsfla.org/svn/fsfla/software/linux-libre/releases/branches/"; url = "https://www.fsfla.org/svn/fsfla/software/linux-libre/releases/branches/";
rev = "19459"; rev = "19473";
sha256 = "12qx165i6dp9mrsbmizw6ynyxwvq11dmwz00xgy5qgr4ag3y4z4c"; sha256 = "0k9pgjg6k9j00x4m3g6chnhgznr5r1yyqd9x8q7a9q9j88vygszs";
} }
, ... , ...
}: }:

View File

@ -18,11 +18,11 @@ let
''; '';
in stdenv.mkDerivation rec { in stdenv.mkDerivation rec {
pname = "keycloak"; pname = "keycloak";
version = "23.0.3"; version = "23.0.4";
src = fetchzip { src = fetchzip {
url = "https://github.com/keycloak/keycloak/releases/download/${version}/keycloak-${version}.zip"; url = "https://github.com/keycloak/keycloak/releases/download/${version}/keycloak-${version}.zip";
hash = "sha256-5K8+pfn1zoXzBWJevZBx+9kZmefs1AvPoshOKP/dkNY="; hash = "sha256-qvgYH/e+V++Tk39sgELTiUqyoEbBuUoCRNaCiM8ZuoA=";
}; };
nativeBuildInputs = [ makeWrapper jre ]; nativeBuildInputs = [ makeWrapper jre ];

View File

@ -11,7 +11,7 @@ maven.buildMavenPackage rec {
hash = "sha256-pacmx5w1VVWz3HmHO6sc2friNUpzo4zyJI1/TQgCXlc="; hash = "sha256-pacmx5w1VVWz3HmHO6sc2friNUpzo4zyJI1/TQgCXlc=";
}; };
mvnHash = "sha256-rwAc2KtKo4vJ0JWwPquMyt+FHVNTmMpzBPbo8lWDN/A="; mvnHash = "sha256-RjERY434UL9z/gNZFV+wMTITCmTPGanwu61L8sEGaKY=";
installPhase = '' installPhase = ''
runHook preInstall runHook preInstall

View File

@ -8,7 +8,7 @@
buildGoModule rec { buildGoModule rec {
pname = "telegraf"; pname = "telegraf";
version = "1.29.1"; version = "1.29.2";
subPackages = [ "cmd/telegraf" ]; subPackages = [ "cmd/telegraf" ];
@ -16,10 +16,10 @@ buildGoModule rec {
owner = "influxdata"; owner = "influxdata";
repo = "telegraf"; repo = "telegraf";
rev = "v${version}"; rev = "v${version}";
hash = "sha256-iEVVMARdt3gibahxU9snwo13yi6gINWWdhFkTHLYAuU="; hash = "sha256-Z2+G4H1O4e77V9jfW+REK4PGdJgoPz+JgLxX/WqBoaY=";
}; };
vendorHash = "sha256-R6+GKyGD7tUulOA6qEPUlSMj2/zXdLmmrX1HubLNCEc="; vendorHash = "sha256-mPw3KfQy9DRqv8E6zzYAbeUaLaNfiNPU77ic+JqqBuM=";
proxyVendor = true; proxyVendor = true;
ldflags = [ ldflags = [

View File

@ -10,7 +10,7 @@ let
patch -p1 < ${./hello.patch} patch -p1 < ${./hello.patch}
''; '';
}); });
checkpointBuiltHello = checkpointBuildTools.mkCheckpointedBuild patchedHello baseHelloArtifacts; checkpointBuiltHello = checkpointBuildTools.mkCheckpointBuild patchedHello baseHelloArtifacts;
checkpointBuiltHelloWithCheck = checkpointBuiltHello.overrideAttrs (old: { checkpointBuiltHelloWithCheck = checkpointBuiltHello.overrideAttrs (old: {
doCheck = true; doCheck = true;
@ -41,7 +41,7 @@ let
''; '';
}); });
checkpointBuiltHelloWithRemovedFile = checkpointBuildTools.mkCheckpointedBuild patchedHelloRemoveFile baseHelloRemoveFileArtifacts; checkpointBuiltHelloWithRemovedFile = checkpointBuildTools.mkCheckpointBuild patchedHelloRemoveFile baseHelloRemoveFileArtifacts;
in in
stdenv.mkDerivation { stdenv.mkDerivation {
name = "patched-hello-returns-correct-output"; name = "patched-hello-returns-correct-output";

View File

@ -113,7 +113,7 @@ with pkgs;
install-shell-files = callPackage ./install-shell-files {}; install-shell-files = callPackage ./install-shell-files {};
checkpoint-build = callPackage ./checkpointBuild {}; checkpointBuildTools = callPackage ./checkpointBuild {};
kernel-config = callPackage ./kernel.nix {}; kernel-config = callPackage ./kernel.nix {};

View File

@ -28,6 +28,8 @@ These checks are performed by this tool:
- Each package directory must not refer to files outside itself using symlinks or Nix path expressions. - Each package directory must not refer to files outside itself using symlinks or Nix path expressions.
### Nix evaluation checks ### Nix evaluation checks
Evaluate Nixpkgs with `system` set to `x86_64-linux` and check that:
- For each package directory, the `pkgs.${name}` attribute must be defined as `callPackage pkgs/by-name/${shard}/${name}/package.nix args` for some `args`. - For each package directory, the `pkgs.${name}` attribute must be defined as `callPackage pkgs/by-name/${shard}/${name}/package.nix args` for some `args`.
- For each package directory, `pkgs.lib.isDerivation pkgs.${name}` must be `true`. - For each package directory, `pkgs.lib.isDerivation pkgs.${name}` must be `true`.

View File

@ -1,11 +1,7 @@
# Takes a path to nixpkgs and a path to the json-encoded list of attributes to check. # Takes a path to nixpkgs and a path to the json-encoded list of attributes to check.
# Returns an attribute set containing information on each requested attribute. # Returns an value containing information on each requested attribute,
# If the attribute is missing from Nixpkgs it's also missing from the result. # which is decoded on the Rust side.
# # See ./eval.rs for the meaning of the returned values
# The returned information is an attribute set with:
# - call_package_path: The <path> from `<attr> = callPackage <path> { ... }`,
# or null if it's not defined as with callPackage, or if the <path> is not a path
# - is_derivation: The result of `lib.isDerivation <attr>`
{ {
attrsPath, attrsPath,
nixpkgsPath, nixpkgsPath,
@ -13,70 +9,85 @@
let let
attrs = builtins.fromJSON (builtins.readFile attrsPath); attrs = builtins.fromJSON (builtins.readFile attrsPath);
# This overlay mocks callPackage to persist the path of the first argument nixpkgsPathLength = builtins.stringLength (toString nixpkgsPath) + 1;
callPackageOverlay = self: super: { removeNixpkgsPrefix = builtins.substring nixpkgsPathLength (-1);
# We need access to the `callPackage` arguments of each attribute.
# The only way to do so is to override `callPackage` with our own version that adds this information to the result,
# and then try to access this information.
overlay = final: prev: {
# Information for attributes defined using `callPackage`
callPackage = fn: args: callPackage = fn: args:
let addVariantInfo (prev.callPackage fn args) {
result = super.callPackage fn args; Manual = {
variantInfo._attributeVariant = { path =
# These names are used by the deserializer on the Rust side
CallPackage.path =
if builtins.isPath fn then if builtins.isPath fn then
toString fn removeNixpkgsPrefix (toString fn)
else else
null; null;
CallPackage.empty_arg = empty_arg =
args == { }; args == { };
}; };
in };
if builtins.isAttrs result then
# If this was the last overlay to be applied, we could just only return the `_callPackagePath`,
# but that's not the case because stdenv has another overlays on top of user-provided ones.
# So to not break the stdenv build we need to return the mostly proper result here
result // variantInfo
else
# It's very rare that callPackage doesn't return an attribute set, but it can occur.
variantInfo;
# Information for attributes that are auto-called from pkgs/by-name.
# This internal attribute is only used by pkgs/by-name
_internalCallByNamePackageFile = file: _internalCallByNamePackageFile = file:
let addVariantInfo (prev._internalCallByNamePackageFile file) {
result = super._internalCallByNamePackageFile file; Auto = null;
variantInfo._attributeVariant = { };
# This name is used by the deserializer on the Rust side
AutoCalled = null;
};
in
if builtins.isAttrs result then
# If this was the last overlay to be applied, we could just only return the `_callPackagePath`,
# but that's not the case because stdenv has another overlays on top of user-provided ones.
# So to not break the stdenv build we need to return the mostly proper result here
result // variantInfo
else
# It's very rare that callPackage doesn't return an attribute set, but it can occur.
variantInfo;
}; };
# We can't just replace attribute values with their info in the overlay,
# because attributes can depend on other attributes, so this would break evaluation.
addVariantInfo = value: variant:
if builtins.isAttrs value then
value // {
_callPackageVariant = variant;
}
else
# It's very rare that callPackage doesn't return an attribute set, but it can occur.
# In such a case we can't really return anything sensible that would include the info,
# so just don't return the info and let the consumer handle it.
value;
pkgs = import nixpkgsPath { pkgs = import nixpkgsPath {
# Don't let the users home directory influence this result # Don't let the users home directory influence this result
config = { }; config = { };
overlays = [ callPackageOverlay ]; overlays = [ overlay ];
# We check evaluation and callPackage only for x86_64-linux.
# Not ideal, but hard to fix
system = "x86_64-linux";
}; };
attrInfo = attr: attrInfo = name: value:
let if ! builtins.isAttrs value then
value = pkgs.${attr}; {
in NonAttributeSet = null;
{ }
# These names are used by the deserializer on the Rust side else if ! value ? _callPackageVariant then
variant = value._attributeVariant or { Other = null; }; {
is_derivation = pkgs.lib.isDerivation value; NonCallPackage = null;
}; }
else
{
CallPackage = {
call_package_variant = value._callPackageVariant;
is_derivation = pkgs.lib.isDerivation value;
};
};
attrInfos = builtins.listToAttrs (map (name: { attrInfos = map (name: [
inherit name; name
value = attrInfo name; (
}) attrs); if ! pkgs ? ${name} then
{ Missing = null; }
else
{ Existing = attrInfo name pkgs.${name}; }
)
]) attrs;
in in
# Filter out attributes not in Nixpkgs attrInfos
builtins.intersectAttrs pkgs attrInfos

View File

@ -6,33 +6,48 @@ use std::path::Path;
use anyhow::Context; use anyhow::Context;
use serde::Deserialize; use serde::Deserialize;
use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use std::process; use std::process;
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
/// Attribute set of this structure is returned by eval.nix /// Attribute set of this structure is returned by eval.nix
#[derive(Deserialize)] #[derive(Deserialize)]
struct AttributeInfo { enum ByNameAttribute {
variant: AttributeVariant, /// The attribute doesn't exist at all
Missing,
Existing(AttributeInfo),
}
#[derive(Deserialize)]
enum AttributeInfo {
/// The attribute exists, but its value isn't an attribute set
NonAttributeSet,
/// The attribute exists, but its value isn't defined using callPackage
NonCallPackage,
/// The attribute exists and its value is an attribute set
CallPackage(CallPackageInfo),
}
#[derive(Deserialize)]
struct CallPackageInfo {
call_package_variant: CallPackageVariant,
/// Whether the attribute is a derivation (`lib.isDerivation`)
is_derivation: bool, is_derivation: bool,
} }
#[derive(Deserialize)] #[derive(Deserialize)]
enum AttributeVariant { enum CallPackageVariant {
/// The attribute is auto-called as pkgs.callPackage using pkgs/by-name, /// The attribute is auto-called as pkgs.callPackage using pkgs/by-name,
/// and it is not overridden by a definition in all-packages.nix /// and it is not overridden by a definition in all-packages.nix
AutoCalled, Auto,
/// The attribute is defined as a pkgs.callPackage <path> <args>, /// The attribute is defined as a pkgs.callPackage <path> <args>,
/// and overridden by all-packages.nix /// and overridden by all-packages.nix
CallPackage { Manual {
/// The <path> argument or None if it's not a path /// The <path> argument or None if it's not a path
path: Option<PathBuf>, path: Option<PathBuf>,
/// true if <args> is { } /// true if <args> is { }
empty_arg: bool, empty_arg: bool,
}, },
/// The attribute is not defined as pkgs.callPackage
Other,
} }
/// Check that the Nixpkgs attribute values corresponding to the packages in pkgs/by-name are /// Check that the Nixpkgs attribute values corresponding to the packages in pkgs/by-name are
@ -45,20 +60,22 @@ pub fn check_values(
) -> validation::Result<ratchet::Nixpkgs> { ) -> validation::Result<ratchet::Nixpkgs> {
// Write the list of packages we need to check into a temporary JSON file. // Write the list of packages we need to check into a temporary JSON file.
// This can then get read by the Nix evaluation. // This can then get read by the Nix evaluation.
let attrs_file = NamedTempFile::new().context("Failed to create a temporary file")?; let attrs_file = NamedTempFile::new().with_context(|| "Failed to create a temporary file")?;
// We need to canonicalise this path because if it's a symlink (which can be the case on // We need to canonicalise this path because if it's a symlink (which can be the case on
// Darwin), Nix would need to read both the symlink and the target path, therefore need 2 // Darwin), Nix would need to read both the symlink and the target path, therefore need 2
// NIX_PATH entries for restrict-eval. But if we resolve the symlinks then only one predictable // NIX_PATH entries for restrict-eval. But if we resolve the symlinks then only one predictable
// entry is needed. // entry is needed.
let attrs_file_path = attrs_file.path().canonicalize()?; let attrs_file_path = attrs_file.path().canonicalize()?;
serde_json::to_writer(&attrs_file, &package_names).context(format!( serde_json::to_writer(&attrs_file, &package_names).with_context(|| {
"Failed to serialise the package names to the temporary path {}", format!(
attrs_file_path.display() "Failed to serialise the package names to the temporary path {}",
))?; attrs_file_path.display()
)
})?;
let expr_path = std::env::var("NIX_CHECK_BY_NAME_EXPR_PATH") let expr_path = std::env::var("NIX_CHECK_BY_NAME_EXPR_PATH")
.context("Could not get environment variable NIX_CHECK_BY_NAME_EXPR_PATH")?; .with_context(|| "Could not get environment variable NIX_CHECK_BY_NAME_EXPR_PATH")?;
// With restrict-eval, only paths in NIX_PATH can be accessed, so we explicitly specify the // With restrict-eval, only paths in NIX_PATH can be accessed, so we explicitly specify the
// ones needed needed // ones needed needed
let mut command = process::Command::new("nix-instantiate"); let mut command = process::Command::new("nix-instantiate");
@ -97,80 +114,96 @@ pub fn check_values(
let result = command let result = command
.output() .output()
.context(format!("Failed to run command {command:?}"))?; .with_context(|| format!("Failed to run command {command:?}"))?;
if !result.status.success() { if !result.status.success() {
anyhow::bail!("Failed to run command {command:?}"); anyhow::bail!("Failed to run command {command:?}");
} }
// Parse the resulting JSON value // Parse the resulting JSON value
let actual_files: HashMap<String, AttributeInfo> = serde_json::from_slice(&result.stdout) let attributes: Vec<(String, ByNameAttribute)> = serde_json::from_slice(&result.stdout)
.context(format!( .with_context(|| {
"Failed to deserialise {}", format!(
String::from_utf8_lossy(&result.stdout) "Failed to deserialise {}",
))?; String::from_utf8_lossy(&result.stdout)
)
})?;
Ok( let check_result = validation::sequence(attributes.into_iter().map(
validation::sequence(package_names.into_iter().map(|package_name| { |(attribute_name, attribute_value)| {
let relative_package_file = structure::relative_file_for_package(&package_name); let relative_package_file = structure::relative_file_for_package(&attribute_name);
let absolute_package_file = nixpkgs_path.join(&relative_package_file);
if let Some(attribute_info) = actual_files.get(&package_name) { use ratchet::RatchetState::*;
let check_result = if !attribute_info.is_derivation { use AttributeInfo::*;
NixpkgsProblem::NonDerivation { use ByNameAttribute::*;
relative_package_file: relative_package_file.clone(), use CallPackageVariant::*;
package_name: package_name.clone(),
}
.into()
} else {
Success(())
};
let check_result = check_result.and(match &attribute_info.variant { let check_result = match attribute_value {
AttributeVariant::AutoCalled => Success(ratchet::Package { Missing => NixpkgsProblem::UndefinedAttr {
empty_non_auto_called: ratchet::EmptyNonAutoCalled::Valid,
}),
AttributeVariant::CallPackage { path, empty_arg } => {
let correct_file = if let Some(call_package_path) = path {
absolute_package_file == *call_package_path
} else {
false
};
if correct_file {
Success(ratchet::Package {
// Empty arguments for non-auto-called packages are not allowed anymore.
empty_non_auto_called: if *empty_arg {
ratchet::EmptyNonAutoCalled::Invalid
} else {
ratchet::EmptyNonAutoCalled::Valid
},
})
} else {
NixpkgsProblem::WrongCallPackage {
relative_package_file: relative_package_file.clone(),
package_name: package_name.clone(),
}
.into()
}
}
AttributeVariant::Other => NixpkgsProblem::WrongCallPackage {
relative_package_file: relative_package_file.clone(),
package_name: package_name.clone(),
}
.into(),
});
check_result.map(|value| (package_name.clone(), value))
} else {
NixpkgsProblem::UndefinedAttr {
relative_package_file: relative_package_file.clone(), relative_package_file: relative_package_file.clone(),
package_name: package_name.clone(), package_name: attribute_name.clone(),
} }
.into() .into(),
} Existing(NonAttributeSet) => NixpkgsProblem::NonDerivation {
})) relative_package_file: relative_package_file.clone(),
.map(|elems| ratchet::Nixpkgs { package_name: attribute_name.clone(),
packages: elems.into_iter().collect(), }
}), .into(),
) Existing(NonCallPackage) => NixpkgsProblem::WrongCallPackage {
relative_package_file: relative_package_file.clone(),
package_name: attribute_name.clone(),
}
.into(),
Existing(CallPackage(CallPackageInfo {
is_derivation,
call_package_variant,
})) => {
let check_result = if !is_derivation {
NixpkgsProblem::NonDerivation {
relative_package_file: relative_package_file.clone(),
package_name: attribute_name.clone(),
}
.into()
} else {
Success(())
};
check_result.and(match &call_package_variant {
Auto => Success(ratchet::Package {
empty_non_auto_called: Tight,
}),
Manual { path, empty_arg } => {
let correct_file = if let Some(call_package_path) = path {
relative_package_file == *call_package_path
} else {
false
};
if correct_file {
Success(ratchet::Package {
// Empty arguments for non-auto-called packages are not allowed anymore.
empty_non_auto_called: if *empty_arg {
Loose(ratchet::EmptyNonAutoCalled)
} else {
Tight
},
})
} else {
NixpkgsProblem::WrongCallPackage {
relative_package_file: relative_package_file.clone(),
package_name: attribute_name.clone(),
}
.into()
}
}
})
}
};
check_result.map(|value| (attribute_name.clone(), value))
},
));
Ok(check_result.map(|elems| ratchet::Nixpkgs {
package_names,
package_map: elems.into_iter().collect(),
}))
} }

View File

@ -38,15 +38,13 @@ pub struct Args {
/// Path to the base Nixpkgs to run ratchet checks against. /// Path to the base Nixpkgs to run ratchet checks against.
/// For PRs, this should be set to a checkout of the PRs base branch. /// For PRs, this should be set to a checkout of the PRs base branch.
/// If not specified, no ratchet checks will be performed.
/// However, this flag will become required once CI uses it.
#[arg(long)] #[arg(long)]
base: Option<PathBuf>, base: PathBuf,
} }
fn main() -> ExitCode { fn main() -> ExitCode {
let args = Args::parse(); let args = Args::parse();
match process(args.base.as_deref(), &args.nixpkgs, &[], &mut io::stderr()) { match process(&args.base, &args.nixpkgs, &[], &mut io::stderr()) {
Ok(true) => { Ok(true) => {
eprintln!("{}", "Validated successfully".green()); eprintln!("{}", "Validated successfully".green());
ExitCode::SUCCESS ExitCode::SUCCESS
@ -77,7 +75,7 @@ fn main() -> ExitCode {
/// - `Ok(false)` if there are problems, all of which will be written to `error_writer`. /// - `Ok(false)` if there are problems, all of which will be written to `error_writer`.
/// - `Ok(true)` if there are no problems /// - `Ok(true)` if there are no problems
pub fn process<W: io::Write>( pub fn process<W: io::Write>(
base_nixpkgs: Option<&Path>, base_nixpkgs: &Path,
main_nixpkgs: &Path, main_nixpkgs: &Path,
eval_accessible_paths: &[&Path], eval_accessible_paths: &[&Path],
error_writer: &mut W, error_writer: &mut W,
@ -87,18 +85,14 @@ pub fn process<W: io::Write>(
let check_result = main_result.result_map(|nixpkgs_version| { let check_result = main_result.result_map(|nixpkgs_version| {
// If the main Nixpkgs doesn't have any problems, run the ratchet checks against the base // If the main Nixpkgs doesn't have any problems, run the ratchet checks against the base
// Nixpkgs // Nixpkgs
if let Some(base) = base_nixpkgs { check_nixpkgs(base_nixpkgs, eval_accessible_paths, error_writer)?.result_map(
check_nixpkgs(base, eval_accessible_paths, error_writer)?.result_map( |base_nixpkgs_version| {
|base_nixpkgs_version| { Ok(ratchet::Nixpkgs::compare(
Ok(ratchet::Nixpkgs::compare( base_nixpkgs_version,
Some(base_nixpkgs_version), nixpkgs_version,
nixpkgs_version, ))
)) },
}, )
)
} else {
Ok(ratchet::Nixpkgs::compare(None, nixpkgs_version))
}
})?; })?;
match check_result { match check_result {
@ -123,10 +117,12 @@ pub fn check_nixpkgs<W: io::Write>(
error_writer: &mut W, error_writer: &mut W,
) -> validation::Result<ratchet::Nixpkgs> { ) -> validation::Result<ratchet::Nixpkgs> {
Ok({ Ok({
let nixpkgs_path = nixpkgs_path.canonicalize().context(format!( let nixpkgs_path = nixpkgs_path.canonicalize().with_context(|| {
"Nixpkgs path {} could not be resolved", format!(
nixpkgs_path.display() "Nixpkgs path {} could not be resolved",
))?; nixpkgs_path.display()
)
})?;
if !nixpkgs_path.join(utils::BASE_SUBPATH).exists() { if !nixpkgs_path.join(utils::BASE_SUBPATH).exists() {
writeln!( writeln!(
@ -234,16 +230,16 @@ mod tests {
let base_path = path.join("base"); let base_path = path.join("base");
let base_nixpkgs = if base_path.exists() { let base_nixpkgs = if base_path.exists() {
Some(base_path.as_path()) base_path.as_path()
} else { } else {
None Path::new("tests/empty-base")
}; };
// We don't want coloring to mess up the tests // We don't want coloring to mess up the tests
let writer = temp_env::with_var("NO_COLOR", Some("1"), || -> anyhow::Result<_> { let writer = temp_env::with_var("NO_COLOR", Some("1"), || -> anyhow::Result<_> {
let mut writer = vec![]; let mut writer = vec![];
process(base_nixpkgs, &path, &[&extra_nix_path], &mut writer) process(base_nixpkgs, &path, &[&extra_nix_path], &mut writer)
.context(format!("Failed test case {name}"))?; .with_context(|| format!("Failed test case {name}"))?;
Ok(writer) Ok(writer)
})?; })?;

View File

@ -10,31 +10,20 @@ use std::collections::HashMap;
/// The ratchet value for the entirety of Nixpkgs. /// The ratchet value for the entirety of Nixpkgs.
#[derive(Default)] #[derive(Default)]
pub struct Nixpkgs { pub struct Nixpkgs {
/// The ratchet values for each package in `pkgs/by-name` /// Sorted list of attributes in package_map
pub packages: HashMap<String, Package>, pub package_names: Vec<String>,
/// The ratchet values for all packages
pub package_map: HashMap<String, Package>,
} }
impl Nixpkgs { impl Nixpkgs {
/// Validates the ratchet checks for Nixpkgs /// Validates the ratchet checks for Nixpkgs
pub fn compare(optional_from: Option<Self>, to: Self) -> Validation<()> { pub fn compare(from: Self, to: Self) -> Validation<()> {
validation::sequence_( validation::sequence_(
// We only loop over the current attributes, // We only loop over the current attributes,
// we don't need to check ones that were removed // we don't need to check ones that were removed
to.packages.into_iter().map(|(name, attr_to)| { to.package_names.into_iter().map(|name| {
let attr_from = if let Some(from) = &optional_from { Package::compare(&name, from.package_map.get(&name), &to.package_map[&name])
from.packages.get(&name)
} else {
// This pretends that if there's no base version to compare against, all
// attributes existed without conforming to the new strictness check for
// backwards compatibility.
// TODO: Remove this case. This is only needed because the `--base`
// argument is still optional, which doesn't need to be once CI is updated
// to pass it.
Some(&Package {
empty_non_auto_called: EmptyNonAutoCalled::Invalid,
})
};
Package::compare(&name, attr_from, &attr_to)
}), }),
) )
} }
@ -43,13 +32,13 @@ impl Nixpkgs {
/// The ratchet value for a single package in `pkgs/by-name` /// The ratchet value for a single package in `pkgs/by-name`
pub struct Package { pub struct Package {
/// The ratchet value for the check for non-auto-called empty arguments /// The ratchet value for the check for non-auto-called empty arguments
pub empty_non_auto_called: EmptyNonAutoCalled, pub empty_non_auto_called: RatchetState<EmptyNonAutoCalled>,
} }
impl Package { impl Package {
/// Validates the ratchet checks for a single package defined in `pkgs/by-name` /// Validates the ratchet checks for a single package defined in `pkgs/by-name`
pub fn compare(name: &str, optional_from: Option<&Self>, to: &Self) -> Validation<()> { pub fn compare(name: &str, optional_from: Option<&Self>, to: &Self) -> Validation<()> {
EmptyNonAutoCalled::compare( RatchetState::<EmptyNonAutoCalled>::compare(
name, name,
optional_from.map(|x| &x.empty_non_auto_called), optional_from.map(|x| &x.empty_non_auto_called),
&to.empty_non_auto_called, &to.empty_non_auto_called,
@ -57,29 +46,59 @@ impl Package {
} }
} }
/// The ratchet value of a single package in `pkgs/by-name` /// The ratchet state of a generic ratchet check.
pub enum RatchetState<Context> {
/// The ratchet is loose, it can be tightened more.
/// In other words, this is the legacy state we're trying to move away from.
/// Introducing new instances is not allowed but previous instances will continue to be allowed.
/// The `Context` is context for error messages in case a new instance of this state is
/// introduced
Loose(Context),
/// The ratchet is tight, it can't be tightened any further.
/// This is either because we already use the latest state, or because the ratchet isn't
/// relevant.
Tight,
}
/// A trait that can convert an attribute-specific error context into a NixpkgsProblem
pub trait ToNixpkgsProblem {
/// How to convert an attribute-specific error context into a NixpkgsProblem
fn to_nixpkgs_problem(name: &str, context: &Self, existed_before: bool) -> NixpkgsProblem;
}
impl<Context: ToNixpkgsProblem> RatchetState<Context> {
/// Compare the previous ratchet state of an attribute to the new state.
/// The previous state may be `None` in case the attribute is new.
fn compare(name: &str, optional_from: Option<&Self>, to: &Self) -> Validation<()> {
// If we don't have a previous state, enforce a tight ratchet
let from = optional_from.unwrap_or(&RatchetState::Tight);
match (from, to) {
// Always okay to keep it tight or tighten the ratchet
(_, RatchetState::Tight) => Success(()),
// Grandfathering policy for a loose ratchet
(RatchetState::Loose { .. }, RatchetState::Loose { .. }) => Success(()),
// Loosening a ratchet is now allowed
(RatchetState::Tight, RatchetState::Loose(context)) => {
Context::to_nixpkgs_problem(name, context, optional_from.is_some()).into()
}
}
}
}
/// The ratchet value of an attribute
/// for the non-auto-called empty argument check of a single. /// for the non-auto-called empty argument check of a single.
/// ///
/// This checks that packages defined in `pkgs/by-name` cannot be overridden /// This checks that packages defined in `pkgs/by-name` cannot be overridden
/// with an empty second argument like `callPackage ... { }`. /// with an empty second argument like `callPackage ... { }`.
#[derive(PartialEq, PartialOrd)] pub struct EmptyNonAutoCalled;
pub enum EmptyNonAutoCalled {
Invalid,
Valid,
}
impl EmptyNonAutoCalled { impl ToNixpkgsProblem for EmptyNonAutoCalled {
/// Validates the non-auto-called empty argument ratchet check for a single package defined in `pkgs/by-name` fn to_nixpkgs_problem(name: &str, _context: &Self, _existed_before: bool) -> NixpkgsProblem {
fn compare(name: &str, optional_from: Option<&Self>, to: &Self) -> Validation<()> { NixpkgsProblem::WrongCallPackage {
let from = optional_from.unwrap_or(&Self::Valid); relative_package_file: structure::relative_file_for_package(name),
if to >= from { package_name: name.to_owned(),
Success(())
} else {
NixpkgsProblem::WrongCallPackage {
relative_package_file: structure::relative_file_for_package(name),
package_name: name.to_owned(),
}
.into()
} }
} }
} }

View File

@ -17,10 +17,12 @@ pub fn check_references(
) -> validation::Result<()> { ) -> validation::Result<()> {
// The empty argument here is the subpath under the package directory to check // The empty argument here is the subpath under the package directory to check
// An empty one means the package directory itself // An empty one means the package directory itself
check_path(relative_package_dir, absolute_package_dir, Path::new("")).context(format!( check_path(relative_package_dir, absolute_package_dir, Path::new("")).with_context(|| {
"While checking the references in package directory {}", format!(
relative_package_dir.display() "While checking the references in package directory {}",
)) relative_package_dir.display()
)
})
} }
/// Checks for a specific path to not have references outside /// Checks for a specific path to not have references outside
@ -62,7 +64,9 @@ fn check_path(
.map(|entry| { .map(|entry| {
let entry_subpath = subpath.join(entry.file_name()); let entry_subpath = subpath.join(entry.file_name());
check_path(relative_package_dir, absolute_package_dir, &entry_subpath) check_path(relative_package_dir, absolute_package_dir, &entry_subpath)
.context(format!("Error while recursing into {}", subpath.display())) .with_context(|| {
format!("Error while recursing into {}", subpath.display())
})
}) })
.collect_vec()?, .collect_vec()?,
) )
@ -70,8 +74,8 @@ fn check_path(
// Only check Nix files // Only check Nix files
if let Some(ext) = path.extension() { if let Some(ext) = path.extension() {
if ext == OsStr::new("nix") { if ext == OsStr::new("nix") {
check_nix_file(relative_package_dir, absolute_package_dir, subpath).context( check_nix_file(relative_package_dir, absolute_package_dir, subpath).with_context(
format!("Error while checking Nix file {}", subpath.display()), || format!("Error while checking Nix file {}", subpath.display()),
)? )?
} else { } else {
Success(()) Success(())
@ -93,13 +97,12 @@ fn check_nix_file(
subpath: &Path, subpath: &Path,
) -> validation::Result<()> { ) -> validation::Result<()> {
let path = absolute_package_dir.join(subpath); let path = absolute_package_dir.join(subpath);
let parent_dir = path.parent().context(format!( let parent_dir = path
"Could not get parent of path {}", .parent()
subpath.display() .with_context(|| format!("Could not get parent of path {}", subpath.display()))?;
))?;
let contents = let contents = read_to_string(&path)
read_to_string(&path).context(format!("Could not read file {}", subpath.display()))?; .with_context(|| format!("Could not read file {}", subpath.display()))?;
let root = Root::parse(&contents); let root = Root::parse(&contents);
if let Some(error) = root.errors().first() { if let Some(error) = root.errors().first() {

View File

@ -10,10 +10,10 @@ pub const PACKAGE_NIX_FILENAME: &str = "package.nix";
pub fn read_dir_sorted(base_dir: &Path) -> anyhow::Result<Vec<fs::DirEntry>> { pub fn read_dir_sorted(base_dir: &Path) -> anyhow::Result<Vec<fs::DirEntry>> {
let listing = base_dir let listing = base_dir
.read_dir() .read_dir()
.context(format!("Could not list directory {}", base_dir.display()))?; .with_context(|| format!("Could not list directory {}", base_dir.display()))?;
let mut shard_entries = listing let mut shard_entries = listing
.collect::<io::Result<Vec<_>>>() .collect::<io::Result<Vec<_>>>()
.context(format!("Could not list directory {}", base_dir.display()))?; .with_context(|| format!("Could not list directory {}", base_dir.display()))?;
shard_entries.sort_by_key(|entry| entry.file_name()); shard_entries.sort_by_key(|entry| entry.file_name());
Ok(shard_entries) Ok(shard_entries)
} }

View File

@ -0,0 +1 @@
import ../mock-nixpkgs.nix { root = ./.; }

View File

@ -19,6 +19,8 @@ It returns a Nixpkgs-like function that can be auto-called and evaluates to an a
overlays ? [], overlays ? [],
# Passed by the checker to make sure a real Nixpkgs isn't influenced by impurities # Passed by the checker to make sure a real Nixpkgs isn't influenced by impurities
config ? {}, config ? {},
# Passed by the checker to make sure a real Nixpkgs isn't influenced by impurities
system ? null,
}: }:
let let

View File

@ -5,13 +5,13 @@
buildGoModule rec { buildGoModule rec {
pname = "ddns-go"; pname = "ddns-go";
version = "5.7.0"; version = "5.7.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "jeessy2"; owner = "jeessy2";
repo = pname; repo = pname;
rev = "v${version}"; rev = "v${version}";
hash = "sha256-/GZxPM0f1W72OtpEknw0TLQ1eFDF5C98umX0Q8MX46s="; hash = "sha256-PKshYKywqL706pVgruWQ9M0QbK2btKu28+wmnlFdDgE=";
}; };
vendorHash = "sha256-/kKFMo4PRWwXUuurNHMG36TV3EpcEikgf03/y/aKpXo="; vendorHash = "sha256-/kKFMo4PRWwXUuurNHMG36TV3EpcEikgf03/y/aKpXo=";

View File

@ -14,17 +14,17 @@ in
assert stdenv.isLinux; # better than `called with unexpected argument 'enableJavaFX'` assert stdenv.isLinux; # better than `called with unexpected argument 'enableJavaFX'`
mavenJdk.buildMavenPackage rec { mavenJdk.buildMavenPackage rec {
pname = "cryptomator"; pname = "cryptomator";
version = "1.11.0"; version = "1.11.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "cryptomator"; owner = "cryptomator";
repo = "cryptomator"; repo = "cryptomator";
rev = version; rev = version;
hash = "sha256-NMNlDEUpwKUywzhXhxlNX7NiE+6wOov2Yt8nTfbKTNI="; hash = "sha256-Y+oG2NF4Vsklp1W22Xv+XrkY6vwn23FkzAXG/5828Og=";
}; };
mvnParameters = "-Dmaven.test.skip=true -Plinux"; mvnParameters = "-Dmaven.test.skip=true -Plinux";
mvnHash = "sha256-cmwU9k7TRRJ07bT1EmY3pIBkvvqmFyE7WJeVL7VFDyc="; mvnHash = "sha256-cXmnJHgKW6SGnhHFuFJP/DKNmFacfHbC3nQ2uVdIvUE=";
preBuild = '' preBuild = ''
VERSION=${version} VERSION=${version}

View File

@ -41457,6 +41457,8 @@ with pkgs;
xpad = callPackage ../applications/misc/xpad { }; xpad = callPackage ../applications/misc/xpad { };
xpipe = callPackage ../applications/networking/xpipe { };
xsane = callPackage ../applications/graphics/sane/xsane.nix { }; xsane = callPackage ../applications/graphics/sane/xsane.nix { };
xsser = python3Packages.callPackage ../tools/security/xsser { }; xsser = python3Packages.callPackage ../tools/security/xsser { };

View File

@ -342,6 +342,7 @@ mapAliases ({
pymyq = python-myq; # added 2023-10-20 pymyq = python-myq; # added 2023-10-20
python-myq = throw "python-myq has been removed, as the service provider has decided to block its API requests"; # added 2023-12-07 python-myq = throw "python-myq has been removed, as the service provider has decided to block its API requests"; # added 2023-12-07
pyqt4 = throw "pyqt4 has been removed, because it depended on the long EOL qt4"; # added 2022-06-09 pyqt4 = throw "pyqt4 has been removed, because it depended on the long EOL qt4"; # added 2022-06-09
pyqt5_with_qtwebkit = pyqt5-webkit; # added 2024-01-07
pyramid_beaker = pyramid-beaker; # added 2023-08-23 pyramid_beaker = pyramid-beaker; # added 2023-08-23
pyramid_chameleon = pyramid-chameleon; # added 2023-08-23 pyramid_chameleon = pyramid-chameleon; # added 2023-08-23
pyramid_exclog = pyramid-exclog; # added 2023-08-24 pyramid_exclog = pyramid-exclog; # added 2023-08-24

View File

@ -1487,6 +1487,8 @@ self: super: with self; {
bip32 = callPackage ../development/python-modules/bip32 { }; bip32 = callPackage ../development/python-modules/bip32 { };
birch = callPackage ../development/python-modules/birch { };
bitarray = callPackage ../development/python-modules/bitarray { }; bitarray = callPackage ../development/python-modules/bitarray { };
bitbox02 = callPackage ../development/python-modules/bitbox02 { }; bitbox02 = callPackage ../development/python-modules/bitbox02 { };
@ -11008,11 +11010,11 @@ self: super: with self; {
}; };
/* /*
`pyqt5_with_qtwebkit` should not be used by python libraries in `pyqt5-webkit` should not be used by python libraries in
pkgs/development/python-modules/*. Putting this attribute in pkgs/development/python-modules/*. Putting this attribute in
`propagatedBuildInputs` may cause collisions. `propagatedBuildInputs` may cause collisions.
*/ */
pyqt5_with_qtwebkit = self.pyqt5.override { pyqt5-webkit = self.pyqt5.override {
withWebKit = true; withWebKit = true;
}; };
@ -13814,6 +13816,8 @@ self: super: with self; {
strawberry-graphql = callPackage ../development/python-modules/strawberry-graphql { }; strawberry-graphql = callPackage ../development/python-modules/strawberry-graphql { };
strct = callPackage ../development/python-modules/strct { };
streamdeck = callPackage ../development/python-modules/streamdeck { }; streamdeck = callPackage ../development/python-modules/streamdeck { };
streaming-form-data = callPackage ../development/python-modules/streaming-form-data { }; streaming-form-data = callPackage ../development/python-modules/streaming-form-data { };