gpt4all-nix/gpt4all-chat.nix
polygon 47f7ae407d Build 2.5.4 release by default, add nightly option
Running the flake as before will now result in the latest release
being run (currently 2.5.4). To keep using the nightly (or rather,
the one from the flake lock) version, use:
* gpt4all-chat-nightly
* gpt4all-chat-avx-nighly
2023-12-29 14:55:04 +01:00

65 lines
1.1 KiB
Nix

{ src
, lib
, stdenv
, cmake
, fmt
, qtwayland
, qtquicktimeline
, qtsvg
, qthttpserver
, qtwebengine
, qt5compat
, shaderc
, vulkan-headers
, wayland
, wrapQtAppsHook
, version ? "nightly"
, withAvx2 ? true
}:
stdenv.mkDerivation {
pname = "gpt4all-chat";
inherit src;
inherit version;
postPatch = ''
substituteInPlace CMakeLists.txt \
--replace 'set(CMAKE_INSTALL_PREFIX ''${CMAKE_BINARY_DIR}/install)' ""
'';
nativeBuildInputs = [
wrapQtAppsHook
cmake
];
patches = [ ];
buildInputs = [
fmt
cmake
qtwayland
qtquicktimeline
qtsvg
qthttpserver
qtwebengine
qt5compat
shaderc
vulkan-headers
wayland
];
cmakeFlags = lib.optionals withAvx2 [ "-DGPT4ALL_AVX_ONLY=ON" "-DKOMPUTE_OPT_USE_BUILT_IN_VULKAN_HEADER=OFF" "-DKOMPUTE_OPT_DISABLE_VULKAN_VERSION_CHECK=ON" "-DKOMPUTE_OPT_USE_BUILT_IN_FMT=OFF" ];
setSourceRoot = "sourceRoot=`pwd`/source/gpt4all-chat";
meta = with lib; {
description = "Gpt4all-j chat";
homepage = "https://github.com/nomic-ai/gpt4all-chat";
license = licenses.mit;
maintainers = with maintainers; [ ];
};
}