python312Packages.openllm: 0.5.7 -> 0.6.10

Diff: bentoml/OpenLLM@refs/tags/v0.5.7...v0.6.10

Changelog: https://github.com/bentoml/OpenLLM/releases/tag/v0.6.10
This commit is contained in:
natsukium 2024-08-24 09:24:23 +09:00
parent 875233a798
commit 033c7e703c
No known key found for this signature in database
GPG Key ID: 9EA45A31DB994C53
2 changed files with 39 additions and 164 deletions

View File

@ -1,200 +1,77 @@
{
lib,
buildPythonPackage,
hatch-fancy-pypi-readme,
fetchFromGitHub,
hatch-vcs,
hatchling,
pytestCheckHook,
pythonOlder,
accelerate,
bentoml,
bitsandbytes,
build,
click,
ctranslate2,
datasets,
docker,
einops,
ghapi,
huggingface-hub,
hypothesis,
ipython,
jupyter,
jupytext,
nbformat,
notebook,
dulwich,
nvidia-ml-py,
openai,
openllm-client,
openllm-core,
optimum,
peft,
pytest-mock,
pytest-randomly,
pytest-rerunfailures,
pytest-xdist,
safetensors,
scipy,
sentencepiece,
soundfile,
syrupy,
psutil,
pyaml,
questionary,
tabulate,
tiktoken,
transformers,
triton,
xformers,
typer,
uv,
}:
buildPythonPackage rec {
inherit (openllm-core) src version;
pname = "openllm";
version = "0.6.10";
pyproject = true;
disabled = pythonOlder "3.8";
sourceRoot = "${src.name}/openllm-python";
disabled = pythonOlder "3.9";
src = fetchFromGitHub {
owner = "bentoml";
repo = "openllm";
rev = "refs/tags/v${version}";
hash = "sha256-4KIpe6KjbBDDUj0IjzSccxjgZyBoaUVIQJYk1+W01Vo=";
};
pythonRemoveDeps = [
# remove cuda-python as it has an unfree license
"cuda-python"
"pathlib"
"pip-requirements-parser"
];
pythonRelaxDeps = [ "openai" ];
build-system = [
hatch-fancy-pypi-readme
hatch-vcs
hatchling
];
dependencies =
[
accelerate
bentoml
bitsandbytes
build
click
einops
ghapi
openllm-client
openllm-core
optimum
safetensors
scipy
sentencepiece
transformers
]
++ bentoml.optional-dependencies.io
++ tabulate.optional-dependencies.widechars
++ transformers.optional-dependencies.tokenizers
++ transformers.optional-dependencies.torch;
optional-dependencies = {
agents = [
# diffusers
soundfile
transformers
] ++ transformers.optional-dependencies.agents;
awq = [
# autoawq
];
baichuan = [
# cpm-kernels
];
chatglm = [
# cpm-kernels
];
ctranslate = [ ctranslate2 ];
falcon = [ xformers ];
fine-tune = [
datasets
huggingface-hub
peft
# trl
];
ggml = [
# ctransformers
];
gptq = [
# auto-gptq
]; # ++ autogptq.optional-dependencies.triton;
grpc = [ bentoml ] ++ bentoml.optional-dependencies.grpc;
mpt = [ triton ];
openai = [
openai
tiktoken
] ++ openai.optional-dependencies.datalib;
playground = [
ipython
jupyter
jupytext
nbformat
notebook
];
starcoder = [ bitsandbytes ];
vllm = [
# vllm
];
full =
with optional-dependencies;
(
agents
++ awq
++ baichuan
++ chatglm
++ ctranslate
++ falcon
++ fine-tune
++ ggml
++ gptq
++ mpt
# disambiguate between derivation input and passthru field
++ optional-dependencies.openai
++ playground
++ starcoder
++ vllm
);
all = optional-dependencies.full;
};
nativeCheckInputs = [
docker
hypothesis
pytest-mock
pytest-randomly
pytest-rerunfailures
pytest-xdist
pytestCheckHook
syrupy
dependencies = [
accelerate
bentoml
dulwich
nvidia-ml-py
openai
psutil
pyaml
questionary
tabulate
typer
uv
];
preCheck = ''
export HOME=$TMPDIR
# skip GPUs test on CI
export GITHUB_ACTIONS=1
# disable hypothesis' deadline
export CI=1
'';
disabledTestPaths = [
# require network access
"tests/models"
];
disabledTests = [
# incompatible with recent TypedDict
# https://github.com/bentoml/OpenLLM/blob/f3fd32d596253ae34c68e2e9655f19f40e05f666/openllm-python/tests/configuration_test.py#L18-L21
"test_missing_default"
];
# no tests
doCheck = false;
pythonImportsCheck = [ "openllm" ];
meta = with lib; {
description = "Operating LLMs in production";
homepage = "https://github.com/bentoml/OpenLLM/tree/main/openllm-python";
changelog = "https://github.com/bentoml/OpenLLM/blob/${src.rev}/CHANGELOG.md";
description = "Run any open-source LLMs, such as Llama 3.1, Gemma, as OpenAI compatible API endpoint in the cloud";
homepage = "https://github.com/bentoml/OpenLLM";
changelog = "https://github.com/bentoml/OpenLLM/releases/tag/v${version}";
license = licenses.asl20;
maintainers = with maintainers; [
happysalada
natsukium
];
mainProgram = "openllm";
};
}

View File

@ -4651,9 +4651,7 @@ self: super: with self; {
oelint-parser = callPackage ../development/python-modules/oelint-parser { };
openllm = callPackage ../development/python-modules/openllm {
triton = self.triton-cuda;
};
openllm = callPackage ../development/python-modules/openllm { };
openllm-client = callPackage ../development/python-modules/openllm-client { };