Merge master into staging-next

This commit is contained in:
github-actions[bot] 2024-01-10 00:02:18 +00:00 committed by GitHub
commit 122355be99
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
66 changed files with 1002 additions and 425 deletions

View File

@ -2,35 +2,38 @@
`pkgs.checkpointBuildTools` provides a way to build derivations incrementally. It consists of two functions to make checkpoint builds using Nix possible.
For hermeticity, Nix derivations do not allow any state to carry over between builds, making a transparent incremental build within a derivation impossible.
For hermeticity, Nix derivations do not allow any state to be carried over between builds, making a transparent incremental build within a derivation impossible.
However, we can tell Nix explicitly what the previous build state was, by representing that previous state as a derivation output. This allows the passed build state to be used for an incremental build.
To change a normal derivation to a checkpoint based build, these steps must be taken:
- apply `prepareCheckpointBuild` on the desired derivation
e.g.:
- apply `prepareCheckpointBuild` on the desired derivation, e.g.
```nix
checkpointArtifacts = (pkgs.checkpointBuildTools.prepareCheckpointBuild pkgs.virtualbox);
```
- change something you want in the sources of the package. (e.g. using a source override)
- change something you want in the sources of the package, e.g. use a source override:
```nix
changedVBox = pkgs.virtualbox.overrideAttrs (old: {
src = path/to/vbox/sources;
}
});
```
- use `mkCheckpointedBuild changedVBox buildOutput`
- use `mkCheckpointBuild changedVBox checkpointArtifacts`
- enjoy shorter build times
## Example {#sec-checkpoint-build-example}
```nix
{ pkgs ? import <nixpkgs> {} }: with (pkgs) checkpointBuildTools;
{ pkgs ? import <nixpkgs> {} }:
let
helloCheckpoint = checkpointBuildTools.prepareCheckpointBuild pkgs.hello;
inherit (pkgs.checkpointBuildTools)
prepareCheckpointBuild
mkCheckpointBuild
;
helloCheckpoint = prepareCheckpointBuild pkgs.hello;
changedHello = pkgs.hello.overrideAttrs (_: {
doCheck = false;
patchPhase = ''
sed -i 's/Hello, world!/Hello, Nix!/g' src/hello.c
'';
});
in checkpointBuildTools.mkCheckpointBuild changedHello helloCheckpoint
in mkCheckpointBuild changedHello helloCheckpoint
```

View File

@ -3864,6 +3864,12 @@
githubId = 6821729;
github = "criyle";
};
crschnick = {
email = "crschnick@xpipe.io";
name = "Christopher Schnick";
github = "crschnick";
githubId = 72509152;
};
CRTified = {
email = "carl.schneider+nixos@rub.de";
matrix = "@schnecfk:ruhr-uni-bochum.de";

View File

@ -834,6 +834,7 @@
./services/monitoring/riemann.nix
./services/monitoring/scollector.nix
./services/monitoring/smartd.nix
./services/monitoring/snmpd.nix
./services/monitoring/statsd.nix
./services/monitoring/sysstat.nix
./services/monitoring/teamviewer.nix

View File

@ -0,0 +1,83 @@
{ pkgs, config, lib, ... }:
let
cfg = config.services.snmpd;
configFile = if cfg.configText != "" then
pkgs.writeText "snmpd.cfg" ''
${cfg.configText}
'' else null;
in {
options.services.snmpd = {
enable = lib.mkEnableOption "snmpd";
package = lib.mkPackageOption pkgs "net-snmp" {};
listenAddress = lib.mkOption {
type = lib.types.str;
default = "0.0.0.0";
description = lib.mdDoc ''
The address to listen on for SNMP and AgentX messages.
'';
example = "127.0.0.1";
};
port = lib.mkOption {
type = lib.types.port;
default = 161;
description = lib.mdDoc ''
The port to listen on for SNMP and AgentX messages.
'';
};
openFirewall = lib.mkOption {
type = lib.types.bool;
default = false;
description = lib.mdDoc ''
Open port in firewall for snmpd.
'';
};
configText = lib.mkOption {
type = lib.types.lines;
default = "";
description = lib.mdDoc ''
The contents of the snmpd.conf. If the {option}`configFile` option
is set, this value will be ignored.
Note that the contents of this option will be added to the Nix
store as world-readable plain text, {option}`configFile` can be used in
addition to a secret management tool to protect sensitive data.
'';
};
configFile = lib.mkOption {
type = lib.types.path;
default = configFile;
defaultText = lib.literalMD "The value of {option}`configText`.";
description = lib.mdDoc ''
Path to the snmpd.conf file. By default, if {option}`configText` is set,
a config file will be automatically generated.
'';
};
};
config = lib.mkIf cfg.enable {
systemd.services."snmpd" = {
description = "Simple Network Management Protocol (SNMP) daemon.";
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = {
Type = "simple";
ExecStart = "${lib.getExe' cfg.package "snmpd"} -f -Lo -c ${cfg.configFile} ${cfg.listenAddress}:${toString cfg.port}";
};
};
networking.firewall.allowedUDPPorts = lib.mkIf cfg.openFirewall [
cfg.port
];
};
meta.maintainers = [ lib.maintainers.eliandoran ];
}

View File

@ -4,9 +4,10 @@ with lib;
let
inherit (pkgs) cups cups-pk-helper cups-filters xdg-utils;
inherit (pkgs) cups-pk-helper cups-filters xdg-utils;
cfg = config.services.printing;
cups = cfg.package;
avahiEnabled = config.services.avahi.enable;
polkitEnabled = config.security.polkit.enable;
@ -140,6 +141,8 @@ in
'';
};
package = lib.mkPackageOption pkgs "cups" {};
stateless = mkOption {
type = types.bool;
default = false;

View File

@ -33,21 +33,11 @@ in {
'';
};
package = lib.mkOption {
type = lib.types.package;
default = pkgs.lxd;
defaultText = lib.literalExpression "pkgs.lxd";
description = lib.mdDoc ''
The LXD package to use.
'';
};
package = lib.mkPackageOption pkgs "lxd" { };
lxcPackage = lib.mkOption {
type = lib.types.package;
default = pkgs.lxc;
defaultText = lib.literalExpression "pkgs.lxc";
description = lib.mdDoc ''
The LXC package to use with LXD (required for AppArmor profiles).
lxcPackage = lib.mkPackageOption pkgs "lxc" {
extraDescription = ''
Required for AppArmor profiles.
'';
};
@ -149,7 +139,7 @@ in {
ui = {
enable = lib.mkEnableOption (lib.mdDoc "(experimental) LXD UI");
package = lib.mkPackageOption pkgs.lxd-unwrapped "ui" { };
package = lib.mkPackageOption pkgs [ "lxd-unwrapped" "ui" ] { };
};
};
};

View File

@ -773,6 +773,7 @@ in {
sing-box = handleTest ./sing-box.nix {};
slimserver = handleTest ./slimserver.nix {};
slurm = handleTest ./slurm.nix {};
snmpd = handleTest ./snmpd.nix {};
smokeping = handleTest ./smokeping.nix {};
snapcast = handleTest ./snapcast.nix {};
snapper = handleTest ./snapper.nix {};

23
nixos/tests/snmpd.nix Normal file
View File

@ -0,0 +1,23 @@
import ./make-test-python.nix ({ pkgs, lib, ... }: {
name = "snmpd";
nodes.snmpd = {
environment.systemPackages = with pkgs; [
net-snmp
];
services.snmpd = {
enable = true;
configText = ''
rocommunity public
'';
};
};
testScript = ''
start_all();
machine.wait_for_unit("snmpd.service")
machine.succeed("snmpwalk -v 2c -c public localhost | grep SNMPv2-MIB::sysName.0");
'';
})

View File

@ -7,16 +7,16 @@
rustPlatform.buildRustPackage rec {
pname = "clipcat";
version = "0.16.0";
version = "0.16.1";
src = fetchFromGitHub {
owner = "xrelkd";
repo = pname;
rev = "v${version}";
hash = "sha256-9BilasXc/3FFPcKAgPvc0hIHP7NbOqRD8ZwIMRc/Y3M=";
hash = "sha256-SqA8UjKTBtkE1IkWGeshI8KBHr86V9r/+YvFZNJ6Oq8=";
};
cargoHash = "sha256-zkeKhi0DiYqA5+KiU77ZJXRyhLUKVDmHvF7TG1URzo4=";
cargoHash = "sha256-KU3kXqy9zL7GQdSsCNW7jcsxdTuRXjJyDtBpmgoXi6E=";
nativeBuildInputs = [
protobuf

View File

@ -5,7 +5,7 @@
# python deps
, python, buildPythonPackage
, alembic, beautifulsoup4, chardet, lxml, mako, pyenchant
, pyqt5_with_qtwebkit, pyxdg, sip_4, sqlalchemy, sqlalchemy-migrate
, pyqt5-webkit, pyxdg, sip_4, sqlalchemy, sqlalchemy-migrate
}:
buildPythonPackage rec {
@ -39,7 +39,7 @@ buildPythonPackage rec {
lxml
mako
pyenchant
pyqt5_with_qtwebkit
pyqt5-webkit
pyxdg
sip_4
sqlalchemy

View File

@ -2,16 +2,16 @@
buildGoModule rec {
pname = "stern";
version = "1.27.0";
version = "1.28.0";
src = fetchFromGitHub {
owner = "stern";
repo = "stern";
rev = "v${version}";
sha256 = "sha256-W8jGUs63R6QpwuTgzK5yVLhKGXypvKOyCWHT2xdb6eM=";
sha256 = "sha256-Lx5f2dqjdhgMXky1Pv2ik9i56ugsQmZK/ag4veC9Dac=";
};
vendorHash = "sha256-LLVd9WB8ixH78CHYe0sS4sCDCD+6SQ7PxWr2MHiAOxI=";
vendorHash = "sha256-6jI/I7Nw/vJwKNvgH/35uHYu51SBX+WFH5s0WKfCqBo=";
subPackages = [ "." ];

View File

@ -11,7 +11,7 @@ python3Packages.buildPythonPackage rec {
sha256 = "1ffdy74igll74fwpmnn3brvcxbk4iianqscdzz18sx1pfqpw16cl";
};
propagatedBuildInputs = with python3Packages; [ pyqt5_with_qtwebkit dbus-python jsmin ];
propagatedBuildInputs = with python3Packages; [ pyqt5-webkit dbus-python jsmin ];
meta = with lib; {
description = "Non-official desktop client for Slack";

View File

@ -6,16 +6,16 @@
buildGoModule rec {
pname = "rclone";
version = "1.65.0";
version = "1.65.1";
src = fetchFromGitHub {
owner = pname;
repo = pname;
rev = "v${version}";
hash = "sha256-hlkX8JrBz/hFwQj0xCZfuBt2t3CP3Xa1JkNDH0zomxg=";
hash = "sha256-wRksCRQR6JZjYtXgq3iARCoYck76O17Kd2Ht1XpA9KE=";
};
vendorHash = "sha256-qKRIT2HqNDpEtZBNHZMXp4Yhh5fCkQSTPU5MQ7FmCHI=";
vendorHash = "sha256-kWaMo6ALieuwf53H05UdoI7xtH1LAnsD6Ak9bJTa6jc=";
subPackages = [ "." ];

View File

@ -0,0 +1,132 @@
{ stdenvNoCC
, lib
, fetchzip
, makeDesktopItem
, autoPatchelfHook
, zlib
, fontconfig
, udev
, gtk3
, freetype
, alsa-lib
, makeShellWrapper
, libX11
, libXext
, libXdamage
, libXfixes
, libxcb
, libXcomposite
, libXcursor
, libXi
, libXrender
, libXtst
, libXxf86vm
}:
let
inherit (stdenvNoCC.hostPlatform) system;
throwSystem = throw "Unsupported system: ${system}";
arch = {
x86_64-linux = "x86_64";
aarch64-linux = "arm64";
}.${system} or throwSystem;
hash = {
x86_64-linux = "sha256-/cumOKaWPdAruMLZP2GMUdocIhsbo59dc4Q3ngc/JOc=";
aarch64-linux = "sha256-xMV+9etnuFwRGIHdaXNViKd4FMOuVtugGDS1xyMwEnM=";
}.${system} or throwSystem;
displayname = "XPipe";
in stdenvNoCC.mkDerivation rec {
pname = "xpipe";
version = "1.7.3";
src = fetchzip {
url = "https://github.com/xpipe-io/xpipe/releases/download/${version}/xpipe-portable-linux-${arch}.tar.gz";
inherit hash;
};
nativeBuildInputs = [
autoPatchelfHook
makeShellWrapper
];
# Ignore libavformat dependencies as we don't need them
autoPatchelfIgnoreMissingDeps = true;
buildInputs = [
fontconfig
zlib
udev
freetype
gtk3
alsa-lib
libX11
libX11
libXext
libXdamage
libXfixes
libxcb
libXcomposite
libXcursor
libXi
libXrender
libXtst
libXxf86vm
];
desktopItem = makeDesktopItem {
categories = [ "Network" ];
comment = "Your entire server infrastructure at your fingertips";
desktopName = displayname;
exec = "/opt/${pname}/cli/bin/xpipe open %U";
genericName = "Shell connection hub";
icon = "/opt/${pname}/logo.png";
name = displayname;
};
installPhase = ''
runHook preInstall
pkg="${pname}"
mkdir -p $out/opt/$pkg
cp -r ./ $out/opt/$pkg
mkdir -p "$out/bin"
ln -s "$out/opt/$pkg/cli/bin/xpipe" "$out/bin/$pkg"
mkdir -p "$out/share/applications"
cp -r "${desktopItem}/share/applications/" "$out/share/"
mkdir -p "$out/etc/bash_completion.d"
ln -s "$out/opt/$pkg/cli/xpipe_completion" "$out/etc/bash_completion.d/$pkg"
substituteInPlace $out/share/applications/${displayname}.desktop --replace "Exec=" "Exec=$out"
substituteInPlace $out/share/applications/${displayname}.desktop --replace "Icon=" "Icon=$out"
mv "$out/opt/xpipe/app/bin/xpiped" "$out/opt/xpipe/app/bin/xpiped_raw"
mv "$out/opt/xpipe/app/lib/app/xpiped.cfg" "$out/opt/xpipe/app/lib/app/xpiped_raw.cfg"
mv "$out/opt/xpipe/app/scripts/xpiped_debug.sh" "$out/opt/xpipe/app/scripts/xpiped_debug_raw.sh"
makeShellWrapper "$out/opt/xpipe/app/bin/xpiped_raw" "$out/opt/xpipe/app/bin/xpiped" \
--prefix LD_LIBRARY_PATH : "${lib.makeLibraryPath [ fontconfig gtk3 udev ]}"
makeShellWrapper "$out/opt/xpipe/app/scripts/xpiped_debug_raw.sh" "$out/opt/xpipe/app/scripts/xpiped_debug.sh" \
--prefix LD_LIBRARY_PATH : "${lib.makeLibraryPath [ fontconfig gtk3 udev ]}"
runHook postInstall
'';
meta = with lib; {
description = "A cross-platform shell connection hub and remote file manager";
homepage = "https://github.com/xpipe-io/${pname}";
downloadPage = "https://github.com/xpipe-io/${pname}/releases/latest";
sourceProvenance = with sourceTypes; [ binaryNativeCode ];
changelog = "https://github.com/xpipe-io/${pname}/releases/tag/${version}";
license = [ licenses.asl20 licenses.unfree ];
maintainers = with maintainers; [ crschnick ];
platforms = [ "x86_64-linux" "aarch64-linux" ];
mainProgram = pname;
};
}

View File

@ -33,11 +33,11 @@ in
stdenv.mkDerivation (finalAttrs: {
pname = "scribus";
version = "1.6.0";
version = "1.6.1";
src = fetchurl {
url = "mirror://sourceforge/scribus/scribus-devel/scribus-${finalAttrs.version}.tar.xz";
hash = "sha256-lLl0kOzhcoaNxPBMeqLulQtBtfL/QoXfN9YV8ETQOOU=";
hash = "sha256-4J3Xjm22HQG5MhEI/t7bzNbsCrNS3Vuv24sEHw73npk=";
};
nativeBuildInputs = [

View File

@ -54,13 +54,13 @@ let
in
stdenv.mkDerivation rec {
pname = "cp2k";
version = "2023.2";
version = "2024.1";
src = fetchFromGitHub {
owner = "cp2k";
repo = "cp2k";
rev = "v${version}";
hash = "sha256-1TJorIjajWFO7i9vqSBDTAIukBdyvxbr5dargt4QB8M=";
hash = "sha256-6PB6wjdTOa55dXV7QIsjxI77hhc95WFEjNePfupBUJQ=";
fetchSubmodules = true;
};
@ -157,6 +157,7 @@ stdenv.mkDerivation rec {
-I${lib.getDev libint}/include ${lib.optionalString enableElpa "$(pkg-config --variable=fcflags elpa)"} \
-I${lib.getDev sirius}/include/sirius \
-I${lib.getDev libxc}/include -I${lib.getDev libxsmm}/include \
-I${lib.getDev hdf5-fortran}/include \
-fallow-argument-mismatch
LIBS = -lfftw3 -lfftw3_threads \
-lscalapack -lblas -llapack \

View File

@ -2,16 +2,16 @@
buildGoModule rec {
pname = "gh";
version = "2.40.1";
version = "2.41.0";
src = fetchFromGitHub {
owner = "cli";
repo = "cli";
rev = "v${version}";
hash = "sha256-KdJZHouMTbbD/8k2VGFvRits7grbbVNUmCM6dSiJXBc=";
hash = "sha256-GkrEirunY17WgAv4XOreG+JwPQn7cRTmr7hJ3/2tSrY=";
};
vendorHash = "sha256-jM9nwTMOTh+eXztLvHIwwH4qu3ZIMOtBrPEtByB9Ry8=";
vendorHash = "sha256-XBoC1sHfxInkamSHNm7Vb3AKCgIch6uYx0jJWqN7PN8=";
nativeBuildInputs = [ installShellFiles ];

View File

@ -1,27 +1,38 @@
{ lib, stdenv, fetchurl
, pkg-config, openssl, libbsd, libevent, libuuid, libossp_uuid, libmd, zlib, ncurses, bison
{ lib
, stdenv
, fetchurl
, pkg-config
, openssl
, libbsd
, libevent
, libuuid
, libossp_uuid
, libmd
, zlib
, ncurses
, bison
, autoPatchelfHook
}:
stdenv.mkDerivation rec {
stdenv.mkDerivation (finalAttrs: {
pname = "got";
version = "0.94";
version = "0.95";
src = fetchurl {
url = "https://gameoftrees.org/releases/portable/got-portable-${version}.tar.gz";
hash = "sha256-hG0/a+sk6uZCxR908YfZCW44qx/SIwwGO9mUaxxHZ3k=";
url = "https://gameoftrees.org/releases/portable/got-portable-${finalAttrs.version}.tar.gz";
hash = "sha256-5on9ff76OAFmoaKTwVM0hUCGLiAZGJzt6+jCx2Nygg4=";
};
nativeBuildInputs = [ pkg-config bison ]
++ lib.optionals stdenv.isLinux [ autoPatchelfHook ];
buildInputs = [ openssl libbsd libevent libuuid libmd zlib ncurses ]
++ lib.optionals stdenv.isDarwin [ libossp_uuid ];
++ lib.optionals stdenv.isDarwin [ libossp_uuid ];
configureFlags = [ "--enable-gotd" ];
preConfigure = lib.optionalString stdenv.isDarwin ''
# The configure script assumes dependencies on Darwin are install via
# The configure script assumes dependencies on Darwin are installed via
# Homebrew or MacPorts and hardcodes assumptions about the paths of
# dependencies which fails the nixpkgs configurePhase.
substituteInPlace configure --replace 'xdarwin' 'xhomebrew'
@ -38,7 +49,7 @@ stdenv.mkDerivation rec {
installCheckPhase = ''
runHook preInstallCheck
test "$($out/bin/got --version)" = '${pname} ${version}'
test "$($out/bin/got --version)" = "${finalAttrs.pname} ${finalAttrs.version}"
runHook postInstallCheck
'';
@ -59,4 +70,4 @@ stdenv.mkDerivation rec {
platforms = platforms.linux ++ platforms.darwin;
maintainers = with maintainers; [ abbe afh ];
};
}
})

View File

@ -3,11 +3,11 @@
buildKodiAddon rec {
pname = "arteplussept";
namespace = "plugin.video.arteplussept";
version = "1.4.1";
version = "1.4.2";
src = fetchzip {
url = "https://mirrors.kodi.tv/addons/nexus/${namespace}/${namespace}-${version}.zip";
hash = "sha256-4lPJIFBF4zXr1bEyv9tVUPXw9JFt2by/tcOwihib6aQ=";
hash = "sha256-dqxGKaOnEYOI33Aw76zbjma5z7MqOUh367dFsV87olU=";
};
propagatedBuildInputs = [

View File

@ -49,21 +49,24 @@
, asio
, decklinkSupport ? false
, blackmagic-desktop-video
, libdatachannel
, libvpl
, qrcodegencpp
}:
let
inherit (lib) optional optionals;
in
stdenv.mkDerivation rec {
stdenv.mkDerivation (finalAttrs: {
pname = "obs-studio";
version = "29.1.3";
version = "30.0.2";
src = fetchFromGitHub {
owner = "obsproject";
repo = "obs-studio";
rev = version;
sha256 = "sha256-D0DPueMtopwz5rLgM8QcPT7DgTKcJKQHnst69EY9V6Q=";
repo = finalAttrs.pname;
rev = finalAttrs.version;
sha256 = "sha256-8pX1kqibrtDIaE1+/Pey1A5bu6MwFTXLrBOah4rsF+4=";
fetchSubmodules = true;
};
@ -132,6 +135,9 @@ stdenv.mkDerivation rec {
nlohmann_json
websocketpp
asio
libdatachannel
libvpl
qrcodegencpp
]
++ optionals scriptingSupport [ luajit python3 ]
++ optional alsaSupport alsa-lib
@ -151,7 +157,7 @@ stdenv.mkDerivation rec {
'';
cmakeFlags = [
"-DOBS_VERSION_OVERRIDE=${version}"
"-DOBS_VERSION_OVERRIDE=${finalAttrs.version}"
"-Wno-dev" # kill dev warnings that are useless for packaging
# Add support for browser source
"-DBUILD_BROWSER=ON"
@ -183,7 +189,7 @@ stdenv.mkDerivation rec {
addOpenGLRunpath $out/lib/obs-plugins/*.so
# Link libcef again after patchelfing other libs
ln -s ${libcef}/lib/libcef.so $out/lib/obs-plugins/libcef.so
ln -s ${libcef}/lib/* $out/lib/obs-plugins/
'';
meta = with lib; {
@ -194,9 +200,9 @@ stdenv.mkDerivation rec {
video content, efficiently
'';
homepage = "https://obsproject.com";
maintainers = with maintainers; [ jb55 MP2E materus ];
maintainers = with maintainers; [ jb55 MP2E materus fpletz ];
license = licenses.gpl2Plus;
platforms = [ "x86_64-linux" "i686-linux" "aarch64-linux" ];
mainProgram = "obs";
};
}
})

View File

@ -33,6 +33,5 @@ stdenv.mkDerivation rec {
license = licenses.mit;
maintainers = with maintainers; [ algram ];
platforms = [ "x86_64-linux" ];
broken = true; # Not compatible with qt6 yet but required by OBS28
};
}

View File

@ -27,10 +27,6 @@ stdenv.mkDerivation rec {
"-Wno-dev"
];
preConfigure = ''
cp ${obs-studio.src}/cmake/external/ObsPluginHelpers.cmake cmake/FindLibObs.cmake
'';
meta = with lib; {
description = "Audio device and application capture for OBS Studio using PipeWire";
homepage = "https://github.com/dimtpap/obs-pipewire-audio-capture";

View File

@ -28,6 +28,9 @@ stdenv.mkDerivation (finalAttrs: {
fetchSubmodules = true;
};
# obs_frontend_add_dock() deprecated in obs 30
env.NIX_CFLAGS_COMPILE = "-Wno-error=deprecated-declarations";
patches = [
# fix build with qt 6.6.0
# treewide: replace deprecated qAsConst with std::as_const()
@ -36,6 +39,11 @@ stdenv.mkDerivation (finalAttrs: {
url = "https://github.com/univrsal/tuna/commit/0d570e771f8d8e6ae7c85bd2b86bbf59c264789e.patch";
hash = "sha256-A5idhMiM9funqhTm5XMIBqwy+FO1SaNPtgZjo+Vws6k=";
})
# fix build with obs 30
(fetchpatch2 {
url = "https://github.com/univrsal/tuna/commit/723bd3c7b4e257cf0997611426e555068de77ae7.patch";
hash = "sha256-MF5vghGYknL6q+A8BJ1yrQcEKIu9I+PWk+RZNYg3fRU=";
})
];
postInstall = ''

View File

@ -1,40 +1,53 @@
{ pkgs }:
{ lib
, buildPackages
}:
let
# rudimentary support for cross-compiling
# see: https://github.com/NixOS/nixpkgs/pull/279487#discussion_r1444449726
inherit (buildPackages)
mktemp
rsync
;
in
rec {
/* Prepare a derivation for local builds.
*
* This function prepares checkpoint builds by provinding,
* containing the build output and the sources for cross checking.
* This function prepares checkpoint builds by storing
* the build output and the sources for cross checking.
* The build output can be used later to allow checkpoint builds
* by passing the derivation output to the `mkCheckpointBuild` function.
*
* To build a project with checkpoints follow these steps:
* - run prepareIncrementalBuild on the desired derivation
* e.G `incrementalBuildArtifacts = (pkgs.checkpointBuildTools.prepareCheckpointBuild pkgs.virtualbox);`
* - change something you want in the sources of the package( e.G using source override)
* changedVBox = pkgs.virtuabox.overrideAttrs (old: {
* src = path/to/vbox/sources;
* }
* - use `mkCheckpointedBuild changedVBox buildOutput`
* To build a project with checkpoints, follow these steps:
* - run `prepareCheckpointBuild` on the desired derivation, e.g.
* checkpointArtifacts = prepareCheckpointBuild virtualbox;
* - change something you want in the sources of the package,
* e.g. using source override:
* changedVBox = pkgs.virtuabox.overrideAttrs (old: {
* src = path/to/vbox/sources;
* };
* - use `mkCheckpointBuild changedVBox checkpointArtifacts`
* - enjoy shorter build times
*/
prepareCheckpointBuild = drv: drv.overrideAttrs (old: {
outputs = [ "out" ];
name = drv.name + "-checkpointArtifacts";
# To determine differences between the state of the build directory
# from an earlier build and a later one we store the state of the build
# from an earlier build and a later one we store the state of the build
# directory before build, but after patch phases.
# This way, the same derivation can be used multiple times and only changes are detected.
# Additionally Removed files are handled correctly in later builds.
# Additionally, removed files are handled correctly in later builds.
preBuild = (old.preBuild or "") + ''
mkdir -p $out/sources
cp -r ./* $out/sources/
'';
# After the build the build directory is copied again
# After the build, the build directory is copied again
# to get the output files.
# We copy the complete build folder, to take care for
# Build tools, building in the source directory, instead of
# having a build root directory, e.G the Linux kernel.
# We copy the complete build folder, to take care of
# build tools that build in the source directory, instead of
# having a separate build directory such as the Linux kernel.
installPhase = ''
runHook preCheckpointInstall
mkdir -p $out/outputs
@ -44,26 +57,34 @@ rec {
});
/* Build a derivation based on the checkpoint output generated by
* the `prepareCheckpointBuild function.
* the `prepareCheckpointBuild` function.
*
* Usage:
* let
* checkpointArtifacts = prepareCheckpointBuild drv
* in mkCheckpointedBuild drv checkpointArtifacts
* checkpointArtifacts = prepareCheckpointBuild drv;
* in mkCheckpointBuild drv checkpointArtifacts
*/
mkCheckpointedBuild = drv: previousBuildArtifacts: drv.overrideAttrs (old: {
mkCheckpointBuild = drv: checkpointArtifacts: drv.overrideAttrs (old: {
# The actual checkpoint build phase.
# We compare the changed sources from a previous build with the current and create a patch
# Afterwards we clean the build directory to copy the previous output files (Including the sources)
# The source difference patch is applied to get the latest changes again to allow short build times.
# We compare the changed sources from a previous build with the current and create a patch.
# Afterwards we clean the build directory and copy the previous output files (including the sources).
# The source difference patch is then applied to get the latest changes again to allow short build times.
preBuild = (old.preBuild or "") + ''
set +e
diff -ur ${previousBuildArtifacts}/sources ./ > sourceDifference.patch
sourceDifferencePatchFile=$(${mktemp}/bin/mktemp)
diff -ur ${checkpointArtifacts}/sources ./ > "$sourceDifferencePatchFile"
set -e
shopt -s extglob dotglob
rm -r !("sourceDifference.patch")
${pkgs.rsync}/bin/rsync -cutU --chown=$USER:$USER --chmod=+w -r ${previousBuildArtifacts}/outputs/* .
patch -p 1 -i sourceDifference.patch
shopt -s dotglob
rm -r *
${rsync}/bin/rsync \
--checksum --times --atimes --chown=$USER:$USER --chmod=+w \
-r ${checkpointArtifacts}/outputs/ .
patch -p 1 -i "$sourceDifferencePatchFile"
rm "$sourceDifferencePatchFile"
'';
});
mkCheckpointedBuild = lib.warn
"`mkCheckpointedBuild` is deprecated, use `mkCheckpointBuild` instead!"
mkCheckpointBuild;
}

View File

@ -0,0 +1,24 @@
{ lib
, python3Packages
, fetchFromGitHub
}:
python3Packages.buildPythonApplication {
pname = "fileinfo";
version = "unstable-2022-09-16";
src = fetchFromGitHub {
owner = "sdushantha";
repo = "fileinfo";
rev = "503f26189ad5043bad3fe71333dd5ba3ffbce485";
hash = "sha256-tEmCsR3LmTxeDZAbMvbIwqp/6uaGNUhgGlm18gdsnOw=";
};
propagatedBuildInputs = with python3Packages; [ requests ];
meta = with lib; {
homepage = "https://github.com/sdushantha/fileinfo";
description = "A file extension metadata lookup tool";
license = licenses.mit;
maintainers = with maintainers; [ h7x4 ];
mainProgram = "fileinfo";
};
}

View File

@ -0,0 +1,41 @@
{ stdenv
, lib
, fetchFromGitHub
, cmake
, pkg-config
}:
stdenv.mkDerivation (finalAttrs: {
pname = "libvpl";
version = "2.10.1";
src = fetchFromGitHub {
owner = "intel";
repo = finalAttrs.pname;
rev = "v${finalAttrs.version}";
hash = "sha256-2yfJo4iwI/h0CJ+mJJ3cAyG5S7KksUibwJHebF3MR+E=";
};
nativeBuildInputs = [
cmake
pkg-config
];
cmakeFlags = [
"-DCMAKE_BUILD_TYPE=Release"
"-DENABLE_DRI3=ON"
"-DENABLE_DRM=ON"
"-DENABLE_VA=ON"
"-DENABLE_WAYLAND=ON"
"-DENABLE_X11=ON"
"-DINSTALL_EXAMPLE_CODE=OFF"
"-DBUILD_TOOLS=OFF"
];
meta = with lib; {
description = "Intel Video Processing Library";
homepage = "https://intel.github.io/libvpl/";
license = licenses.mit;
platforms = platforms.linux;
};
})

View File

@ -0,0 +1,32 @@
{ lib
, stdenv
, qrcodegen
}:
stdenv.mkDerivation (finalAttrs: {
pname = "qrcodegencpp";
version = qrcodegen.version;
src = qrcodegen.src;
sourceRoot = "${finalAttrs.src.name}/cpp";
nativeBuildInputs = lib.optionals stdenv.cc.isClang [
stdenv.cc.cc.libllvm.out
];
makeFlags = lib.optionals stdenv.cc.isClang [ "AR=llvm-ar" ];
installPhase = ''
runHook preInstall
install -Dt $out/lib/ libqrcodegencpp.a
install -Dt $out/include/qrcodegen/ qrcodegen.hpp
runHook postInstall
'';
meta = {
inherit (qrcodegen.meta) description homepage license maintainers platforms;
};
})

View File

@ -10,7 +10,7 @@ elfHasDynamicSection() {
autoAddCudaCompatRunpathPhase() (
local outputPaths
mapfile -t outputPaths < <(for o in $(getAllOutputNames); do echo "${!o}"; done)
find "${outputPaths[@]}" -type f -executable -print0 | while IFS= read -rd "" f; do
find "${outputPaths[@]}" -type f -print0 | while IFS= read -rd "" f; do
if isELF "$f"; then
# patchelf returns an error on statically linked ELF files
if elfHasDynamicSection "$f" ; then

View File

@ -9,7 +9,7 @@ elfHasDynamicSection() {
autoAddOpenGLRunpathPhase() (
local outputPaths
mapfile -t outputPaths < <(for o in $(getAllOutputNames); do echo "${!o}"; done)
find "${outputPaths[@]}" -type f -executable -print0 | while IFS= read -rd "" f; do
find "${outputPaths[@]}" -type f -print0 | while IFS= read -rd "" f; do
if isELF "$f"; then
# patchelf returns an error on statically linked ELF files
if elfHasDynamicSection "$f" ; then

View File

@ -30,6 +30,10 @@
}:
let
gl_rpath = lib.makeLibraryPath [
stdenv.cc.cc.lib
];
rpath = lib.makeLibraryPath [
glib
nss
@ -92,7 +96,11 @@ stdenv.mkDerivation rec {
mkdir -p $out/lib/ $out/share/cef/
cp libcef_dll_wrapper/libcef_dll_wrapper.a $out/lib/
cp ../Release/libcef.so $out/lib/
cp ../Release/libEGL.so $out/lib/
cp ../Release/libGLESv2.so $out/lib/
patchelf --set-rpath "${rpath}" $out/lib/libcef.so
patchelf --set-rpath "${gl_rpath}" $out/lib/libEGL.so
patchelf --set-rpath "${gl_rpath}" $out/lib/libGLESv2.so
cp ../Release/*.bin $out/share/cef/
cp -r ../Resources/* $out/share/cef/
cp -r ../include $out/

View File

@ -1,5 +1,6 @@
{ lib
, stdenv
, fetchpatch
, fetchurl
, fetchFromGitHub
, fixDarwinDylibNames
@ -20,30 +21,30 @@ stdenv.mkDerivation rec {
patches = [
# Fixes build with Musl.
(fetchurl {
(fetchpatch {
url = "https://github.com/openembedded/meta-openembedded/raw/39185eb1d1615e919e3ae14ae63b8ed7d3e5d83f/meta-oe/recipes-support/tbb/tbb/GLIBC-PREREQ-is-not-defined-on-musl.patch";
sha256 = "gUfXQ9OZQ82qD6brgauBCsKdjLvyHafMc18B+KxZoYs=";
hash = "sha256-Oo5FSBPPBaOziWEBOlRmTmbulExMsAmQWBR5faOj1a0=";
})
# Fixes build with Musl.
(fetchurl {
(fetchpatch {
url = "https://github.com/openembedded/meta-openembedded/raw/39185eb1d1615e919e3ae14ae63b8ed7d3e5d83f/meta-oe/recipes-support/tbb/tbb/0001-mallinfo-is-glibc-specific-API-mark-it-so.patch";
sha256 = "fhorfqO1hHKZ61uq+yTR7eQ8KYdyLwpM3K7WpwJpV74=";
hash = "sha256-xp8J/il855VTFIKCN/bFtf+vif6HzcVl4t4/L9nW/xk=";
})
# Fixes build with upcoming gcc-13:
# https://github.com/oneapi-src/oneTBB/pull/833
(fetchurl {
(fetchpatch {
name = "gcc-13.patch";
url = "https://github.com/oneapi-src/oneTBB/pull/833/commits/c18342ba667d1f33f5e9a773aa86b091a9694b97.patch";
sha256 = "ZUExE3nsW80Z5GPWZnDNuDiHHaD1EF7qNl/G5M+Wcxg=";
hash = "sha256-LWgf7Rm6Zp4TJdvMqnAkoAebbVS+WV2kB+4iY6jRka4=";
})
# Fixes build for aarch64-darwin
(fetchurl {
(fetchpatch {
name = "aarch64-darwin.patch";
url = "https://github.com/oneapi-src/oneTBB/pull/258/commits/86f6dcdc17a8f5ef2382faaef860cfa5243984fe.patch";
sha256 = "sha256-JXqrFPCb3q1vfxk752tQu7HhApCB4YH2LoVnGRwmspk=";
hash = "sha256-+sNU8yEsVVmQYOCKmlNiyJfKmB/U0GKAmrydwkfrDFQ=";
})
];

View File

@ -2,11 +2,11 @@
stdenv.mkDerivation rec {
pname = "maestro";
version = "1.34.1";
version = "1.35.0";
src = fetchurl {
url = "https://github.com/mobile-dev-inc/maestro/releases/download/cli-${version}/maestro.zip";
sha256 = "0whnhcf7a3j01693254qqwfk9d3xa4icv4kyqkn4ihxyibznb91d";
sha256 = "1rr3ihirga9jjw1n9z45hby6j68d0q11alzhqz4yv2ibvrjykzai";
};
dontUnpack = true;

View File

@ -0,0 +1,60 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, setuptools
, strct
, pytestCheckHook
, pyyaml
}:
buildPythonPackage rec {
pname = "birch";
version = "0.0.35";
pyproject = true;
src = fetchFromGitHub {
owner = "shaypal5";
repo = "birch";
rev = "v${version}";
hash = "sha256-KdQZzQJvJ+logpcLQfaqqEEZJ/9VmNTQX/a4v0oBC98=";
};
postPatch = ''
substituteInPlace pytest.ini \
--replace \
"--cov" \
"#--cov"
'';
nativeBuildInputs = [
setuptools
];
propagatedBuildInputs = [
strct
];
pythonImportsCheck = [
"birch"
"birch.casters"
"birch.exceptions"
"birch.paths"
];
nativeCheckInputs = [
pytestCheckHook
pyyaml
];
preCheck = ''
export HOME="$(mktemp -d)"
'';
meta = with lib; {
description = "Simple hierarchical configuration for Python packages";
homepage = "https://github.com/shaypal5/birch";
license = licenses.mit;
maintainers = with maintainers; [ pbsds ];
};
}

View File

@ -1,29 +1,31 @@
{ lib
, buildPythonPackage
, pythonOlder
, fetchPypi
, fetchFromGitHub
, pythonRelaxDepsHook
, setuptools
, watchdog
, portalocker
, pathtools
, pytestCheckHook
, pymongo
, dnspython
, pymongo-inmemory
, pandas
, birch
}:
buildPythonPackage rec {
pname = "cachier";
version = "2.2.1";
format = "setuptools";
version = "2.2.2";
pyproject = true;
disabled = pythonOlder "3.8";
src = fetchPypi {
inherit pname version;
hash = "sha256-nm98LT87Z7yErKvIqMp93OEX9TDojqqtItgryHgSQJQ=";
src = fetchFromGitHub {
owner = "python-cachier";
repo = "cachier";
rev = "v${version}";
hash = "sha256-zUZqT4SIwZRqhRS/wHIzIYVULnp5aYcytCQd17T0D/4=";
};
pythonRemoveDeps = [ "setuptools" ];
@ -36,7 +38,6 @@ buildPythonPackage rec {
propagatedBuildInputs = [
watchdog
portalocker
pathtools
];
preCheck = ''
@ -52,6 +53,7 @@ buildPythonPackage rec {
dnspython
pymongo-inmemory
pandas
birch
];
disabledTests = [

View File

@ -9,14 +9,14 @@
buildPythonPackage rec {
pname = "ftputil";
version = "5.0.4";
version = "5.1.0";
format = "setuptools";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
hash = "sha256-aInbhkndINm21ApsXw+EzPNAp9rB4L/A8AJAkPwq+zM=";
hash = "sha256-6eYtP9MH75xS5Dsz/ZJ1n8lMBNi1F4+F9kGxg5BtQ1M=";
};
nativeCheckInputs = [

View File

@ -15,7 +15,7 @@
, httpx
}:
let
version = "1.15.0";
version = "1.16.19";
in
buildPythonPackage rec {
pname = "litellm";
@ -26,7 +26,7 @@ buildPythonPackage rec {
owner = "BerriAI";
repo = "litellm";
rev = "refs/tags/v${version}";
hash = "sha256-s3Ue/N04YZHEfEnVxPHupRSVDHxWjVse8FDlRF5yKCk=";
hash = "sha256-KNQuTgJj7oLJsxfi8g9ShC5WHyrdpZGI5Nfgxzu/eak=";
};
postPatch = ''

View File

@ -11,14 +11,14 @@
buildPythonPackage rec {
pname = "opencensus-ext-azure";
version = "1.1.12";
version = "1.1.13";
format = "setuptools";
disabled = pythonOlder "3.4";
src = fetchPypi {
inherit pname version;
hash = "sha256-hrseR84dIKytlq08Efjvsvp6tensSJbzBj2F+JlJBGI=";
hash = "sha256-rsMEchdwBTebpWpwKgl9YYxfV1WOG7ZnbsdflIEwaSo=";
};
propagatedBuildInputs = [

View File

@ -16,7 +16,7 @@
, python-docx
}:
let
version = "0.5.6";
version = "0.5.7";
in
buildPythonPackage {
pname = "pdf2docx";
@ -26,8 +26,8 @@ buildPythonPackage {
src = fetchFromGitHub {
owner = "dothinking";
repo = "pdf2docx";
rev = "v${version}";
hash = "sha256-NrT4GURQIJbqnHstfJrPzwLXT9c2oGBi4QJ6eGIFwu4=";
rev = "refs/tags/v${version}";
hash = "sha256-GDftANn+ioaNR28VfRFDuFgdKoy7D4xiy0ezvWJ3zy0=";
};
nativeBuildInputs = [

View File

@ -1,25 +1,27 @@
{ lib
, buildPythonPackage
, isPy3k
, fetchPypi
, pythonOlder
, fetchFromGitHub
, substituteAll
, graphviz
, coreutils
, pkg-config
, setuptools
, pytest
}:
buildPythonPackage rec {
pname = "pygraphviz";
version = "1.11";
format = "setuptools";
version = "1.12";
pyproject = true;
disabled = !isPy3k;
disabled = pythonOlder "3.10";
src = fetchPypi {
inherit pname version;
hash = "sha256-qX61ztJm9FBT67HyxsbSkJFpBQPjpcFL5/kIs3sG8tQ=";
extension = "zip";
src = fetchFromGitHub {
owner = "pygraphviz";
repo = "pygraphviz";
rev = "pygraphviz-${version}";
hash = "sha256-XDP77H724eiMa/V18OtLxpUpxlIVDmcFLMYOAbazquo=";
};
patches = [
@ -30,7 +32,10 @@ buildPythonPackage rec {
})
];
nativeBuildInputs = [ pkg-config ];
nativeBuildInputs = [
pkg-config
setuptools
];
buildInputs = [ graphviz ];

View File

@ -7,14 +7,14 @@
buildPythonPackage rec {
pname = "rapidgzip";
version = "0.11.1";
version = "0.12.1";
format = "setuptools";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
hash = "sha256-pcKO9BovkUDlRjE8MZQEfTSutVMB/9beyAyP3vChMUE=";
hash = "sha256-s4MLxhwoGS7Zvx6k5qh1PWpyTRBUBGVIkPW9q94u+2Q=";
};
nativeBuildInputs = [ nasm ];

View File

@ -0,0 +1,52 @@
{ lib
, fetchFromGitHub
, buildPythonPackage
, setuptools
, pytestCheckHook
, sortedcontainers
}:
buildPythonPackage rec {
pname = "strct";
version = "0.0.32";
pyproject = true;
src = fetchFromGitHub {
owner = "shaypal5";
repo = "strct";
rev = "v${version}";
hash = "sha256-ctafvdfSOdp7tlCUYg7d5XTXR1qBcWvOVtGtNUnhYIw=";
};
postPatch = ''
substituteInPlace pytest.ini \
--replace \
"--cov" \
"#--cov"
'';
nativeBuildInputs = [
setuptools
];
nativeCheckInputs = [
pytestCheckHook
sortedcontainers
];
pythonImportsCheck = [
"strct"
"strct.dicts"
"strct.hash"
"strct.lists"
"strct.sets"
"strct.sortedlists"
];
meta = with lib; {
description = "A small pure-python package for data structure related utility functions";
homepage = "https://github.com/shaypal5/strct";
license = licenses.mit;
maintainers = with maintainers; [ pbsds ];
};
}

View File

@ -1,5 +1,6 @@
{ lib
, stdenv
, linkFarm
, buildPythonPackage
, cargo
, datasets
@ -21,41 +22,43 @@
let
# See https://github.com/huggingface/tokenizers/blob/main/bindings/python/tests/utils.py for details
# about URLs and file names
robertaVocab = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/roberta-base-vocab.json";
sha256 = "0m86wpkfb2gdh9x9i9ng2fvwk1rva4p0s98xw996nrjxs7166zwy";
};
robertaMerges = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/roberta-base-merges.txt";
sha256 = "1idd4rvkpqqbks51i2vjbd928inw7slij9l4r063w3y5fd3ndq8w";
};
albertVocab = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/albert-base-v1-tokenizer.json";
sha256 = "1hra9pn8rczx7378z88zjclw2qsdrdwq20m56sy42s2crbas6akf";
};
bertVocab = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt";
sha256 = "18rq42cmqa8zanydsbzrb34xwy4l6cz1y900r4kls57cbhvyvv07";
};
norvigBig = fetchurl {
url = "https://norvig.com/big.txt";
sha256 = "0yz80icdly7na03cfpl0nfk5h3j3cam55rj486n03wph81ynq1ps";
};
docPipelineTokenizer = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/anthony/doc-pipeline/tokenizer.json";
hash = "sha256-i533xC8J5CDMNxBjo+p6avIM8UOcui8RmGAmK0GmfBc=";
};
docQuicktourTokenizer = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/anthony/doc-quicktour/tokenizer.json";
hash = "sha256-ipY9d5DR5nxoO6kj7rItueZ9AO5wq9+Nzr6GuEIfIBI=";
};
openaiVocab = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/openai-gpt-vocab.json";
sha256 = "0y40gc9bixj5rxv674br1rxmxkd3ly29p80x1596h8yywwcrpx7x";
};
openaiMerges = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/openai-gpt-merges.txt";
sha256 = "09a754pm4djjglv3x5pkgwd6f79i2rq8ydg0f7c3q1wmwqdbba8f";
test-data = linkFarm "tokenizers-test-data" {
"roberta-base-vocab.json" = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/roberta-base-vocab.json";
sha256 = "0m86wpkfb2gdh9x9i9ng2fvwk1rva4p0s98xw996nrjxs7166zwy";
};
"roberta-base-merges.txt" = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/roberta-base-merges.txt";
sha256 = "1idd4rvkpqqbks51i2vjbd928inw7slij9l4r063w3y5fd3ndq8w";
};
"albert-base-v1-tokenizer.json" = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/albert-base-v1-tokenizer.json";
sha256 = "1hra9pn8rczx7378z88zjclw2qsdrdwq20m56sy42s2crbas6akf";
};
"bert-base-uncased-vocab.txt" = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt";
sha256 = "18rq42cmqa8zanydsbzrb34xwy4l6cz1y900r4kls57cbhvyvv07";
};
"big.txt" = fetchurl {
url = "https://norvig.com/big.txt";
sha256 = "0yz80icdly7na03cfpl0nfk5h3j3cam55rj486n03wph81ynq1ps";
};
"bert-wiki.json" = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/anthony/doc-pipeline/tokenizer.json";
hash = "sha256-i533xC8J5CDMNxBjo+p6avIM8UOcui8RmGAmK0GmfBc=";
};
"tokenizer-wiki.json" = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/anthony/doc-quicktour/tokenizer.json";
hash = "sha256-ipY9d5DR5nxoO6kj7rItueZ9AO5wq9+Nzr6GuEIfIBI=";
};
"openai-gpt-vocab.json" = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/openai-gpt-vocab.json";
sha256 = "0y40gc9bixj5rxv674br1rxmxkd3ly29p80x1596h8yywwcrpx7x";
};
"openai-gpt-merges.txt" = fetchurl {
url = "https://s3.amazonaws.com/models.huggingface.co/bert/openai-gpt-merges.txt";
sha256 = "09a754pm4djjglv3x5pkgwd6f79i2rq8ydg0f7c3q1wmwqdbba8f";
};
};
in
buildPythonPackage rec {
@ -107,16 +110,7 @@ buildPythonPackage rec {
postUnpack = ''
# Add data files for tests, otherwise tests attempt network access
mkdir $sourceRoot/tests/data
( cd $sourceRoot/tests/data
ln -s ${robertaVocab} roberta-base-vocab.json
ln -s ${robertaMerges} roberta-base-merges.txt
ln -s ${albertVocab} albert-base-v1-tokenizer.json
ln -s ${bertVocab} bert-base-uncased-vocab.txt
ln -s ${docPipelineTokenizer} bert-wiki.json
ln -s ${docQuicktourTokenizer} tokenizer-wiki.json
ln -s ${norvigBig} big.txt
ln -s ${openaiVocab} openai-gpt-vocab.json
ln -s ${openaiMerges} openai-gpt-merges.txt )
ln -s ${test-data}/* $sourceRoot/tests/data/
'';
preCheck = ''

View File

@ -68,7 +68,7 @@ in buildPythonPackage {
jinja2
networkx
filelock
] ++ lib.optionals stdenv.isx86_64 [
] ++ lib.optionals (stdenv.isLinux && stdenv.isx86_64) [
openai-triton
];

View File

@ -7,16 +7,16 @@
buildGoModule rec {
pname = "bearer";
version = "1.33.1";
version = "1.34.0";
src = fetchFromGitHub {
owner = "bearer";
repo = "bearer";
rev = "refs/tags/v${version}";
hash = "sha256-cdD4LYQZwkS5dRhmvyHkio7TXPDgfDo7kutVAGJCitc=";
hash = "sha256-JNYjBcuA2KDdhd1yF0E7mEhNJ7xQRT+wFlnAnal/P9I=";
};
vendorHash = "sha256-nh2hkwscb4EYEfumBXPFrLgxIxRlkVqBCnQZ4eMZbgg=";
vendorHash = "sha256-DykY1PFKsJ++F8ToAhyss5nAmsTOfXQXJpSo21oEhYc=";
subPackages = [
"cmd/bearer"

View File

@ -20,12 +20,12 @@
"hash": "sha256:0ns8qxcrxj9i76b93xcghl002l8vbkg7ksd435sikig62qr62gf4"
},
"5.4": {
"version": "5.4.265",
"hash": "sha256:05cvvwjiznn7hfd02qklklalg0chahvh5v18w64lcva6kzj9kbjd"
"version": "5.4.266",
"hash": "sha256:1dmcn9i3nvf1gldm1a32gnl5ybwbk2lizb3wa4gc06g7dxz2y1ys"
},
"4.19": {
"version": "4.19.303",
"hash": "sha256:0dlbl47xs7z4yf9cxbxqzd7zs1f9070jr6ck231wgppa6lwwwb82"
"version": "4.19.304",
"hash": "sha256:165mljr8v1cf4vf4a4b44hx089rprkssvi2azq5wbxxg3basbind"
},
"6.6": {
"version": "6.6.10",

View File

@ -1,8 +1,8 @@
{ stdenv, lib, fetchsvn, linux
, scripts ? fetchsvn {
url = "https://www.fsfla.org/svn/fsfla/software/linux-libre/releases/branches/";
rev = "19459";
sha256 = "12qx165i6dp9mrsbmizw6ynyxwvq11dmwz00xgy5qgr4ag3y4z4c";
rev = "19473";
sha256 = "0k9pgjg6k9j00x4m3g6chnhgznr5r1yyqd9x8q7a9q9j88vygszs";
}
, ...
}:

View File

@ -18,11 +18,11 @@ let
'';
in stdenv.mkDerivation rec {
pname = "keycloak";
version = "23.0.3";
version = "23.0.4";
src = fetchzip {
url = "https://github.com/keycloak/keycloak/releases/download/${version}/keycloak-${version}.zip";
hash = "sha256-5K8+pfn1zoXzBWJevZBx+9kZmefs1AvPoshOKP/dkNY=";
hash = "sha256-qvgYH/e+V++Tk39sgELTiUqyoEbBuUoCRNaCiM8ZuoA=";
};
nativeBuildInputs = [ makeWrapper jre ];

View File

@ -11,7 +11,7 @@ maven.buildMavenPackage rec {
hash = "sha256-pacmx5w1VVWz3HmHO6sc2friNUpzo4zyJI1/TQgCXlc=";
};
mvnHash = "sha256-rwAc2KtKo4vJ0JWwPquMyt+FHVNTmMpzBPbo8lWDN/A=";
mvnHash = "sha256-RjERY434UL9z/gNZFV+wMTITCmTPGanwu61L8sEGaKY=";
installPhase = ''
runHook preInstall

View File

@ -8,7 +8,7 @@
buildGoModule rec {
pname = "telegraf";
version = "1.29.1";
version = "1.29.2";
subPackages = [ "cmd/telegraf" ];
@ -16,10 +16,10 @@ buildGoModule rec {
owner = "influxdata";
repo = "telegraf";
rev = "v${version}";
hash = "sha256-iEVVMARdt3gibahxU9snwo13yi6gINWWdhFkTHLYAuU=";
hash = "sha256-Z2+G4H1O4e77V9jfW+REK4PGdJgoPz+JgLxX/WqBoaY=";
};
vendorHash = "sha256-R6+GKyGD7tUulOA6qEPUlSMj2/zXdLmmrX1HubLNCEc=";
vendorHash = "sha256-mPw3KfQy9DRqv8E6zzYAbeUaLaNfiNPU77ic+JqqBuM=";
proxyVendor = true;
ldflags = [

View File

@ -10,7 +10,7 @@ let
patch -p1 < ${./hello.patch}
'';
});
checkpointBuiltHello = checkpointBuildTools.mkCheckpointedBuild patchedHello baseHelloArtifacts;
checkpointBuiltHello = checkpointBuildTools.mkCheckpointBuild patchedHello baseHelloArtifacts;
checkpointBuiltHelloWithCheck = checkpointBuiltHello.overrideAttrs (old: {
doCheck = true;
@ -41,7 +41,7 @@ let
'';
});
checkpointBuiltHelloWithRemovedFile = checkpointBuildTools.mkCheckpointedBuild patchedHelloRemoveFile baseHelloRemoveFileArtifacts;
checkpointBuiltHelloWithRemovedFile = checkpointBuildTools.mkCheckpointBuild patchedHelloRemoveFile baseHelloRemoveFileArtifacts;
in
stdenv.mkDerivation {
name = "patched-hello-returns-correct-output";

View File

@ -113,7 +113,7 @@ with pkgs;
install-shell-files = callPackage ./install-shell-files {};
checkpoint-build = callPackage ./checkpointBuild {};
checkpointBuildTools = callPackage ./checkpointBuild {};
kernel-config = callPackage ./kernel.nix {};

View File

@ -28,6 +28,8 @@ These checks are performed by this tool:
- Each package directory must not refer to files outside itself using symlinks or Nix path expressions.
### Nix evaluation checks
Evaluate Nixpkgs with `system` set to `x86_64-linux` and check that:
- For each package directory, the `pkgs.${name}` attribute must be defined as `callPackage pkgs/by-name/${shard}/${name}/package.nix args` for some `args`.
- For each package directory, `pkgs.lib.isDerivation pkgs.${name}` must be `true`.

View File

@ -1,11 +1,7 @@
# Takes a path to nixpkgs and a path to the json-encoded list of attributes to check.
# Returns an attribute set containing information on each requested attribute.
# If the attribute is missing from Nixpkgs it's also missing from the result.
#
# The returned information is an attribute set with:
# - call_package_path: The <path> from `<attr> = callPackage <path> { ... }`,
# or null if it's not defined as with callPackage, or if the <path> is not a path
# - is_derivation: The result of `lib.isDerivation <attr>`
# Returns an value containing information on each requested attribute,
# which is decoded on the Rust side.
# See ./eval.rs for the meaning of the returned values
{
attrsPath,
nixpkgsPath,
@ -13,70 +9,85 @@
let
attrs = builtins.fromJSON (builtins.readFile attrsPath);
# This overlay mocks callPackage to persist the path of the first argument
callPackageOverlay = self: super: {
nixpkgsPathLength = builtins.stringLength (toString nixpkgsPath) + 1;
removeNixpkgsPrefix = builtins.substring nixpkgsPathLength (-1);
# We need access to the `callPackage` arguments of each attribute.
# The only way to do so is to override `callPackage` with our own version that adds this information to the result,
# and then try to access this information.
overlay = final: prev: {
# Information for attributes defined using `callPackage`
callPackage = fn: args:
let
result = super.callPackage fn args;
variantInfo._attributeVariant = {
# These names are used by the deserializer on the Rust side
CallPackage.path =
addVariantInfo (prev.callPackage fn args) {
Manual = {
path =
if builtins.isPath fn then
toString fn
removeNixpkgsPrefix (toString fn)
else
null;
CallPackage.empty_arg =
empty_arg =
args == { };
};
in
if builtins.isAttrs result then
# If this was the last overlay to be applied, we could just only return the `_callPackagePath`,
# but that's not the case because stdenv has another overlays on top of user-provided ones.
# So to not break the stdenv build we need to return the mostly proper result here
result // variantInfo
else
# It's very rare that callPackage doesn't return an attribute set, but it can occur.
variantInfo;
};
# Information for attributes that are auto-called from pkgs/by-name.
# This internal attribute is only used by pkgs/by-name
_internalCallByNamePackageFile = file:
let
result = super._internalCallByNamePackageFile file;
variantInfo._attributeVariant = {
# This name is used by the deserializer on the Rust side
AutoCalled = null;
};
in
if builtins.isAttrs result then
# If this was the last overlay to be applied, we could just only return the `_callPackagePath`,
# but that's not the case because stdenv has another overlays on top of user-provided ones.
# So to not break the stdenv build we need to return the mostly proper result here
result // variantInfo
else
# It's very rare that callPackage doesn't return an attribute set, but it can occur.
variantInfo;
addVariantInfo (prev._internalCallByNamePackageFile file) {
Auto = null;
};
};
# We can't just replace attribute values with their info in the overlay,
# because attributes can depend on other attributes, so this would break evaluation.
addVariantInfo = value: variant:
if builtins.isAttrs value then
value // {
_callPackageVariant = variant;
}
else
# It's very rare that callPackage doesn't return an attribute set, but it can occur.
# In such a case we can't really return anything sensible that would include the info,
# so just don't return the info and let the consumer handle it.
value;
pkgs = import nixpkgsPath {
# Don't let the users home directory influence this result
config = { };
overlays = [ callPackageOverlay ];
overlays = [ overlay ];
# We check evaluation and callPackage only for x86_64-linux.
# Not ideal, but hard to fix
system = "x86_64-linux";
};
attrInfo = attr:
let
value = pkgs.${attr};
in
{
# These names are used by the deserializer on the Rust side
variant = value._attributeVariant or { Other = null; };
is_derivation = pkgs.lib.isDerivation value;
};
attrInfo = name: value:
if ! builtins.isAttrs value then
{
NonAttributeSet = null;
}
else if ! value ? _callPackageVariant then
{
NonCallPackage = null;
}
else
{
CallPackage = {
call_package_variant = value._callPackageVariant;
is_derivation = pkgs.lib.isDerivation value;
};
};
attrInfos = builtins.listToAttrs (map (name: {
inherit name;
value = attrInfo name;
}) attrs);
attrInfos = map (name: [
name
(
if ! pkgs ? ${name} then
{ Missing = null; }
else
{ Existing = attrInfo name pkgs.${name}; }
)
]) attrs;
in
# Filter out attributes not in Nixpkgs
builtins.intersectAttrs pkgs attrInfos
attrInfos

View File

@ -6,33 +6,48 @@ use std::path::Path;
use anyhow::Context;
use serde::Deserialize;
use std::collections::HashMap;
use std::path::PathBuf;
use std::process;
use tempfile::NamedTempFile;
/// Attribute set of this structure is returned by eval.nix
#[derive(Deserialize)]
struct AttributeInfo {
variant: AttributeVariant,
enum ByNameAttribute {
/// The attribute doesn't exist at all
Missing,
Existing(AttributeInfo),
}
#[derive(Deserialize)]
enum AttributeInfo {
/// The attribute exists, but its value isn't an attribute set
NonAttributeSet,
/// The attribute exists, but its value isn't defined using callPackage
NonCallPackage,
/// The attribute exists and its value is an attribute set
CallPackage(CallPackageInfo),
}
#[derive(Deserialize)]
struct CallPackageInfo {
call_package_variant: CallPackageVariant,
/// Whether the attribute is a derivation (`lib.isDerivation`)
is_derivation: bool,
}
#[derive(Deserialize)]
enum AttributeVariant {
enum CallPackageVariant {
/// The attribute is auto-called as pkgs.callPackage using pkgs/by-name,
/// and it is not overridden by a definition in all-packages.nix
AutoCalled,
Auto,
/// The attribute is defined as a pkgs.callPackage <path> <args>,
/// and overridden by all-packages.nix
CallPackage {
Manual {
/// The <path> argument or None if it's not a path
path: Option<PathBuf>,
/// true if <args> is { }
empty_arg: bool,
},
/// The attribute is not defined as pkgs.callPackage
Other,
}
/// Check that the Nixpkgs attribute values corresponding to the packages in pkgs/by-name are
@ -45,20 +60,22 @@ pub fn check_values(
) -> validation::Result<ratchet::Nixpkgs> {
// Write the list of packages we need to check into a temporary JSON file.
// This can then get read by the Nix evaluation.
let attrs_file = NamedTempFile::new().context("Failed to create a temporary file")?;
let attrs_file = NamedTempFile::new().with_context(|| "Failed to create a temporary file")?;
// We need to canonicalise this path because if it's a symlink (which can be the case on
// Darwin), Nix would need to read both the symlink and the target path, therefore need 2
// NIX_PATH entries for restrict-eval. But if we resolve the symlinks then only one predictable
// entry is needed.
let attrs_file_path = attrs_file.path().canonicalize()?;
serde_json::to_writer(&attrs_file, &package_names).context(format!(
"Failed to serialise the package names to the temporary path {}",
attrs_file_path.display()
))?;
serde_json::to_writer(&attrs_file, &package_names).with_context(|| {
format!(
"Failed to serialise the package names to the temporary path {}",
attrs_file_path.display()
)
})?;
let expr_path = std::env::var("NIX_CHECK_BY_NAME_EXPR_PATH")
.context("Could not get environment variable NIX_CHECK_BY_NAME_EXPR_PATH")?;
.with_context(|| "Could not get environment variable NIX_CHECK_BY_NAME_EXPR_PATH")?;
// With restrict-eval, only paths in NIX_PATH can be accessed, so we explicitly specify the
// ones needed needed
let mut command = process::Command::new("nix-instantiate");
@ -97,80 +114,96 @@ pub fn check_values(
let result = command
.output()
.context(format!("Failed to run command {command:?}"))?;
.with_context(|| format!("Failed to run command {command:?}"))?;
if !result.status.success() {
anyhow::bail!("Failed to run command {command:?}");
}
// Parse the resulting JSON value
let actual_files: HashMap<String, AttributeInfo> = serde_json::from_slice(&result.stdout)
.context(format!(
"Failed to deserialise {}",
String::from_utf8_lossy(&result.stdout)
))?;
let attributes: Vec<(String, ByNameAttribute)> = serde_json::from_slice(&result.stdout)
.with_context(|| {
format!(
"Failed to deserialise {}",
String::from_utf8_lossy(&result.stdout)
)
})?;
Ok(
validation::sequence(package_names.into_iter().map(|package_name| {
let relative_package_file = structure::relative_file_for_package(&package_name);
let absolute_package_file = nixpkgs_path.join(&relative_package_file);
let check_result = validation::sequence(attributes.into_iter().map(
|(attribute_name, attribute_value)| {
let relative_package_file = structure::relative_file_for_package(&attribute_name);
if let Some(attribute_info) = actual_files.get(&package_name) {
let check_result = if !attribute_info.is_derivation {
NixpkgsProblem::NonDerivation {
relative_package_file: relative_package_file.clone(),
package_name: package_name.clone(),
}
.into()
} else {
Success(())
};
use ratchet::RatchetState::*;
use AttributeInfo::*;
use ByNameAttribute::*;
use CallPackageVariant::*;
let check_result = check_result.and(match &attribute_info.variant {
AttributeVariant::AutoCalled => Success(ratchet::Package {
empty_non_auto_called: ratchet::EmptyNonAutoCalled::Valid,
}),
AttributeVariant::CallPackage { path, empty_arg } => {
let correct_file = if let Some(call_package_path) = path {
absolute_package_file == *call_package_path
} else {
false
};
if correct_file {
Success(ratchet::Package {
// Empty arguments for non-auto-called packages are not allowed anymore.
empty_non_auto_called: if *empty_arg {
ratchet::EmptyNonAutoCalled::Invalid
} else {
ratchet::EmptyNonAutoCalled::Valid
},
})
} else {
NixpkgsProblem::WrongCallPackage {
relative_package_file: relative_package_file.clone(),
package_name: package_name.clone(),
}
.into()
}
}
AttributeVariant::Other => NixpkgsProblem::WrongCallPackage {
relative_package_file: relative_package_file.clone(),
package_name: package_name.clone(),
}
.into(),
});
check_result.map(|value| (package_name.clone(), value))
} else {
NixpkgsProblem::UndefinedAttr {
let check_result = match attribute_value {
Missing => NixpkgsProblem::UndefinedAttr {
relative_package_file: relative_package_file.clone(),
package_name: package_name.clone(),
package_name: attribute_name.clone(),
}
.into()
}
}))
.map(|elems| ratchet::Nixpkgs {
packages: elems.into_iter().collect(),
}),
)
.into(),
Existing(NonAttributeSet) => NixpkgsProblem::NonDerivation {
relative_package_file: relative_package_file.clone(),
package_name: attribute_name.clone(),
}
.into(),
Existing(NonCallPackage) => NixpkgsProblem::WrongCallPackage {
relative_package_file: relative_package_file.clone(),
package_name: attribute_name.clone(),
}
.into(),
Existing(CallPackage(CallPackageInfo {
is_derivation,
call_package_variant,
})) => {
let check_result = if !is_derivation {
NixpkgsProblem::NonDerivation {
relative_package_file: relative_package_file.clone(),
package_name: attribute_name.clone(),
}
.into()
} else {
Success(())
};
check_result.and(match &call_package_variant {
Auto => Success(ratchet::Package {
empty_non_auto_called: Tight,
}),
Manual { path, empty_arg } => {
let correct_file = if let Some(call_package_path) = path {
relative_package_file == *call_package_path
} else {
false
};
if correct_file {
Success(ratchet::Package {
// Empty arguments for non-auto-called packages are not allowed anymore.
empty_non_auto_called: if *empty_arg {
Loose(ratchet::EmptyNonAutoCalled)
} else {
Tight
},
})
} else {
NixpkgsProblem::WrongCallPackage {
relative_package_file: relative_package_file.clone(),
package_name: attribute_name.clone(),
}
.into()
}
}
})
}
};
check_result.map(|value| (attribute_name.clone(), value))
},
));
Ok(check_result.map(|elems| ratchet::Nixpkgs {
package_names,
package_map: elems.into_iter().collect(),
}))
}

View File

@ -38,15 +38,13 @@ pub struct Args {
/// Path to the base Nixpkgs to run ratchet checks against.
/// For PRs, this should be set to a checkout of the PRs base branch.
/// If not specified, no ratchet checks will be performed.
/// However, this flag will become required once CI uses it.
#[arg(long)]
base: Option<PathBuf>,
base: PathBuf,
}
fn main() -> ExitCode {
let args = Args::parse();
match process(args.base.as_deref(), &args.nixpkgs, &[], &mut io::stderr()) {
match process(&args.base, &args.nixpkgs, &[], &mut io::stderr()) {
Ok(true) => {
eprintln!("{}", "Validated successfully".green());
ExitCode::SUCCESS
@ -77,7 +75,7 @@ fn main() -> ExitCode {
/// - `Ok(false)` if there are problems, all of which will be written to `error_writer`.
/// - `Ok(true)` if there are no problems
pub fn process<W: io::Write>(
base_nixpkgs: Option<&Path>,
base_nixpkgs: &Path,
main_nixpkgs: &Path,
eval_accessible_paths: &[&Path],
error_writer: &mut W,
@ -87,18 +85,14 @@ pub fn process<W: io::Write>(
let check_result = main_result.result_map(|nixpkgs_version| {
// If the main Nixpkgs doesn't have any problems, run the ratchet checks against the base
// Nixpkgs
if let Some(base) = base_nixpkgs {
check_nixpkgs(base, eval_accessible_paths, error_writer)?.result_map(
|base_nixpkgs_version| {
Ok(ratchet::Nixpkgs::compare(
Some(base_nixpkgs_version),
nixpkgs_version,
))
},
)
} else {
Ok(ratchet::Nixpkgs::compare(None, nixpkgs_version))
}
check_nixpkgs(base_nixpkgs, eval_accessible_paths, error_writer)?.result_map(
|base_nixpkgs_version| {
Ok(ratchet::Nixpkgs::compare(
base_nixpkgs_version,
nixpkgs_version,
))
},
)
})?;
match check_result {
@ -123,10 +117,12 @@ pub fn check_nixpkgs<W: io::Write>(
error_writer: &mut W,
) -> validation::Result<ratchet::Nixpkgs> {
Ok({
let nixpkgs_path = nixpkgs_path.canonicalize().context(format!(
"Nixpkgs path {} could not be resolved",
nixpkgs_path.display()
))?;
let nixpkgs_path = nixpkgs_path.canonicalize().with_context(|| {
format!(
"Nixpkgs path {} could not be resolved",
nixpkgs_path.display()
)
})?;
if !nixpkgs_path.join(utils::BASE_SUBPATH).exists() {
writeln!(
@ -234,16 +230,16 @@ mod tests {
let base_path = path.join("base");
let base_nixpkgs = if base_path.exists() {
Some(base_path.as_path())
base_path.as_path()
} else {
None
Path::new("tests/empty-base")
};
// We don't want coloring to mess up the tests
let writer = temp_env::with_var("NO_COLOR", Some("1"), || -> anyhow::Result<_> {
let mut writer = vec![];
process(base_nixpkgs, &path, &[&extra_nix_path], &mut writer)
.context(format!("Failed test case {name}"))?;
.with_context(|| format!("Failed test case {name}"))?;
Ok(writer)
})?;

View File

@ -10,31 +10,20 @@ use std::collections::HashMap;
/// The ratchet value for the entirety of Nixpkgs.
#[derive(Default)]
pub struct Nixpkgs {
/// The ratchet values for each package in `pkgs/by-name`
pub packages: HashMap<String, Package>,
/// Sorted list of attributes in package_map
pub package_names: Vec<String>,
/// The ratchet values for all packages
pub package_map: HashMap<String, Package>,
}
impl Nixpkgs {
/// Validates the ratchet checks for Nixpkgs
pub fn compare(optional_from: Option<Self>, to: Self) -> Validation<()> {
pub fn compare(from: Self, to: Self) -> Validation<()> {
validation::sequence_(
// We only loop over the current attributes,
// we don't need to check ones that were removed
to.packages.into_iter().map(|(name, attr_to)| {
let attr_from = if let Some(from) = &optional_from {
from.packages.get(&name)
} else {
// This pretends that if there's no base version to compare against, all
// attributes existed without conforming to the new strictness check for
// backwards compatibility.
// TODO: Remove this case. This is only needed because the `--base`
// argument is still optional, which doesn't need to be once CI is updated
// to pass it.
Some(&Package {
empty_non_auto_called: EmptyNonAutoCalled::Invalid,
})
};
Package::compare(&name, attr_from, &attr_to)
to.package_names.into_iter().map(|name| {
Package::compare(&name, from.package_map.get(&name), &to.package_map[&name])
}),
)
}
@ -43,13 +32,13 @@ impl Nixpkgs {
/// The ratchet value for a single package in `pkgs/by-name`
pub struct Package {
/// The ratchet value for the check for non-auto-called empty arguments
pub empty_non_auto_called: EmptyNonAutoCalled,
pub empty_non_auto_called: RatchetState<EmptyNonAutoCalled>,
}
impl Package {
/// Validates the ratchet checks for a single package defined in `pkgs/by-name`
pub fn compare(name: &str, optional_from: Option<&Self>, to: &Self) -> Validation<()> {
EmptyNonAutoCalled::compare(
RatchetState::<EmptyNonAutoCalled>::compare(
name,
optional_from.map(|x| &x.empty_non_auto_called),
&to.empty_non_auto_called,
@ -57,29 +46,59 @@ impl Package {
}
}
/// The ratchet value of a single package in `pkgs/by-name`
/// The ratchet state of a generic ratchet check.
pub enum RatchetState<Context> {
/// The ratchet is loose, it can be tightened more.
/// In other words, this is the legacy state we're trying to move away from.
/// Introducing new instances is not allowed but previous instances will continue to be allowed.
/// The `Context` is context for error messages in case a new instance of this state is
/// introduced
Loose(Context),
/// The ratchet is tight, it can't be tightened any further.
/// This is either because we already use the latest state, or because the ratchet isn't
/// relevant.
Tight,
}
/// A trait that can convert an attribute-specific error context into a NixpkgsProblem
pub trait ToNixpkgsProblem {
/// How to convert an attribute-specific error context into a NixpkgsProblem
fn to_nixpkgs_problem(name: &str, context: &Self, existed_before: bool) -> NixpkgsProblem;
}
impl<Context: ToNixpkgsProblem> RatchetState<Context> {
/// Compare the previous ratchet state of an attribute to the new state.
/// The previous state may be `None` in case the attribute is new.
fn compare(name: &str, optional_from: Option<&Self>, to: &Self) -> Validation<()> {
// If we don't have a previous state, enforce a tight ratchet
let from = optional_from.unwrap_or(&RatchetState::Tight);
match (from, to) {
// Always okay to keep it tight or tighten the ratchet
(_, RatchetState::Tight) => Success(()),
// Grandfathering policy for a loose ratchet
(RatchetState::Loose { .. }, RatchetState::Loose { .. }) => Success(()),
// Loosening a ratchet is now allowed
(RatchetState::Tight, RatchetState::Loose(context)) => {
Context::to_nixpkgs_problem(name, context, optional_from.is_some()).into()
}
}
}
}
/// The ratchet value of an attribute
/// for the non-auto-called empty argument check of a single.
///
/// This checks that packages defined in `pkgs/by-name` cannot be overridden
/// with an empty second argument like `callPackage ... { }`.
#[derive(PartialEq, PartialOrd)]
pub enum EmptyNonAutoCalled {
Invalid,
Valid,
}
pub struct EmptyNonAutoCalled;
impl EmptyNonAutoCalled {
/// Validates the non-auto-called empty argument ratchet check for a single package defined in `pkgs/by-name`
fn compare(name: &str, optional_from: Option<&Self>, to: &Self) -> Validation<()> {
let from = optional_from.unwrap_or(&Self::Valid);
if to >= from {
Success(())
} else {
NixpkgsProblem::WrongCallPackage {
relative_package_file: structure::relative_file_for_package(name),
package_name: name.to_owned(),
}
.into()
impl ToNixpkgsProblem for EmptyNonAutoCalled {
fn to_nixpkgs_problem(name: &str, _context: &Self, _existed_before: bool) -> NixpkgsProblem {
NixpkgsProblem::WrongCallPackage {
relative_package_file: structure::relative_file_for_package(name),
package_name: name.to_owned(),
}
}
}

View File

@ -17,10 +17,12 @@ pub fn check_references(
) -> validation::Result<()> {
// The empty argument here is the subpath under the package directory to check
// An empty one means the package directory itself
check_path(relative_package_dir, absolute_package_dir, Path::new("")).context(format!(
"While checking the references in package directory {}",
relative_package_dir.display()
))
check_path(relative_package_dir, absolute_package_dir, Path::new("")).with_context(|| {
format!(
"While checking the references in package directory {}",
relative_package_dir.display()
)
})
}
/// Checks for a specific path to not have references outside
@ -62,7 +64,9 @@ fn check_path(
.map(|entry| {
let entry_subpath = subpath.join(entry.file_name());
check_path(relative_package_dir, absolute_package_dir, &entry_subpath)
.context(format!("Error while recursing into {}", subpath.display()))
.with_context(|| {
format!("Error while recursing into {}", subpath.display())
})
})
.collect_vec()?,
)
@ -70,8 +74,8 @@ fn check_path(
// Only check Nix files
if let Some(ext) = path.extension() {
if ext == OsStr::new("nix") {
check_nix_file(relative_package_dir, absolute_package_dir, subpath).context(
format!("Error while checking Nix file {}", subpath.display()),
check_nix_file(relative_package_dir, absolute_package_dir, subpath).with_context(
|| format!("Error while checking Nix file {}", subpath.display()),
)?
} else {
Success(())
@ -93,13 +97,12 @@ fn check_nix_file(
subpath: &Path,
) -> validation::Result<()> {
let path = absolute_package_dir.join(subpath);
let parent_dir = path.parent().context(format!(
"Could not get parent of path {}",
subpath.display()
))?;
let parent_dir = path
.parent()
.with_context(|| format!("Could not get parent of path {}", subpath.display()))?;
let contents =
read_to_string(&path).context(format!("Could not read file {}", subpath.display()))?;
let contents = read_to_string(&path)
.with_context(|| format!("Could not read file {}", subpath.display()))?;
let root = Root::parse(&contents);
if let Some(error) = root.errors().first() {

View File

@ -10,10 +10,10 @@ pub const PACKAGE_NIX_FILENAME: &str = "package.nix";
pub fn read_dir_sorted(base_dir: &Path) -> anyhow::Result<Vec<fs::DirEntry>> {
let listing = base_dir
.read_dir()
.context(format!("Could not list directory {}", base_dir.display()))?;
.with_context(|| format!("Could not list directory {}", base_dir.display()))?;
let mut shard_entries = listing
.collect::<io::Result<Vec<_>>>()
.context(format!("Could not list directory {}", base_dir.display()))?;
.with_context(|| format!("Could not list directory {}", base_dir.display()))?;
shard_entries.sort_by_key(|entry| entry.file_name());
Ok(shard_entries)
}

View File

@ -0,0 +1 @@
import ../mock-nixpkgs.nix { root = ./.; }

View File

@ -19,6 +19,8 @@ It returns a Nixpkgs-like function that can be auto-called and evaluates to an a
overlays ? [],
# Passed by the checker to make sure a real Nixpkgs isn't influenced by impurities
config ? {},
# Passed by the checker to make sure a real Nixpkgs isn't influenced by impurities
system ? null,
}:
let

View File

@ -5,13 +5,13 @@
buildGoModule rec {
pname = "ddns-go";
version = "5.7.0";
version = "5.7.1";
src = fetchFromGitHub {
owner = "jeessy2";
repo = pname;
rev = "v${version}";
hash = "sha256-/GZxPM0f1W72OtpEknw0TLQ1eFDF5C98umX0Q8MX46s=";
hash = "sha256-PKshYKywqL706pVgruWQ9M0QbK2btKu28+wmnlFdDgE=";
};
vendorHash = "sha256-/kKFMo4PRWwXUuurNHMG36TV3EpcEikgf03/y/aKpXo=";

View File

@ -14,17 +14,17 @@ in
assert stdenv.isLinux; # better than `called with unexpected argument 'enableJavaFX'`
mavenJdk.buildMavenPackage rec {
pname = "cryptomator";
version = "1.11.0";
version = "1.11.1";
src = fetchFromGitHub {
owner = "cryptomator";
repo = "cryptomator";
rev = version;
hash = "sha256-NMNlDEUpwKUywzhXhxlNX7NiE+6wOov2Yt8nTfbKTNI=";
hash = "sha256-Y+oG2NF4Vsklp1W22Xv+XrkY6vwn23FkzAXG/5828Og=";
};
mvnParameters = "-Dmaven.test.skip=true -Plinux";
mvnHash = "sha256-cmwU9k7TRRJ07bT1EmY3pIBkvvqmFyE7WJeVL7VFDyc=";
mvnHash = "sha256-cXmnJHgKW6SGnhHFuFJP/DKNmFacfHbC3nQ2uVdIvUE=";
preBuild = ''
VERSION=${version}

View File

@ -41457,6 +41457,8 @@ with pkgs;
xpad = callPackage ../applications/misc/xpad { };
xpipe = callPackage ../applications/networking/xpipe { };
xsane = callPackage ../applications/graphics/sane/xsane.nix { };
xsser = python3Packages.callPackage ../tools/security/xsser { };

View File

@ -342,6 +342,7 @@ mapAliases ({
pymyq = python-myq; # added 2023-10-20
python-myq = throw "python-myq has been removed, as the service provider has decided to block its API requests"; # added 2023-12-07
pyqt4 = throw "pyqt4 has been removed, because it depended on the long EOL qt4"; # added 2022-06-09
pyqt5_with_qtwebkit = pyqt5-webkit; # added 2024-01-07
pyramid_beaker = pyramid-beaker; # added 2023-08-23
pyramid_chameleon = pyramid-chameleon; # added 2023-08-23
pyramid_exclog = pyramid-exclog; # added 2023-08-24

View File

@ -1487,6 +1487,8 @@ self: super: with self; {
bip32 = callPackage ../development/python-modules/bip32 { };
birch = callPackage ../development/python-modules/birch { };
bitarray = callPackage ../development/python-modules/bitarray { };
bitbox02 = callPackage ../development/python-modules/bitbox02 { };
@ -11008,11 +11010,11 @@ self: super: with self; {
};
/*
`pyqt5_with_qtwebkit` should not be used by python libraries in
`pyqt5-webkit` should not be used by python libraries in
pkgs/development/python-modules/*. Putting this attribute in
`propagatedBuildInputs` may cause collisions.
*/
pyqt5_with_qtwebkit = self.pyqt5.override {
pyqt5-webkit = self.pyqt5.override {
withWebKit = true;
};
@ -13814,6 +13816,8 @@ self: super: with self; {
strawberry-graphql = callPackage ../development/python-modules/strawberry-graphql { };
strct = callPackage ../development/python-modules/strct { };
streamdeck = callPackage ../development/python-modules/streamdeck { };
streaming-form-data = callPackage ../development/python-modules/streaming-form-data { };