mirror of
https://github.com/ilyakooo0/nixpkgs.git
synced 2024-11-19 02:44:17 +03:00
Merge pull request #254431 from happysalada/litellm_init
open-interpreter: init at 0.1.2
This commit is contained in:
commit
cf2cab4374
53
pkgs/development/python-modules/litellm/default.nix
Normal file
53
pkgs/development/python-modules/litellm/default.nix
Normal file
@ -0,0 +1,53 @@
|
||||
{ lib
|
||||
, buildPythonPackage
|
||||
, fetchFromGitHub
|
||||
, poetry-core
|
||||
, importlib-metadata
|
||||
, openai
|
||||
, python-dotenv
|
||||
, tiktoken
|
||||
}:
|
||||
let
|
||||
version = "0.1.590";
|
||||
in
|
||||
buildPythonPackage rec {
|
||||
pname = "litellm";
|
||||
format = "pyproject";
|
||||
inherit version;
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "BerriAI";
|
||||
repo = "litellm";
|
||||
rev = "7cb96e86b4753008cbf8d116aca514750e98d360";
|
||||
hash = "sha256-ITMcwGjelNfNGnfBmmdu0Xwph4u0mxiFSfHnysUxWCQ=";
|
||||
};
|
||||
|
||||
postPatch = ''
|
||||
rm -rf dist
|
||||
'';
|
||||
|
||||
nativeBuildInputs = [
|
||||
poetry-core
|
||||
];
|
||||
|
||||
propagatedBuildInputs = [
|
||||
importlib-metadata
|
||||
openai
|
||||
python-dotenv
|
||||
tiktoken
|
||||
];
|
||||
|
||||
# the import check phase fails trying to do a network request to openai
|
||||
# pythonImportsCheck = [ "litellm" ];
|
||||
|
||||
# no tests
|
||||
doCheck = false;
|
||||
|
||||
meta = with lib; {
|
||||
description = "Use any LLM as a drop in replacement for gpt-3.5-turbo. Use Azure, OpenAI, Cohere, Anthropic, Ollama, VLLM, Sagemaker, HuggingFace, Replicate (100+ LLMs)";
|
||||
homepage = "https://github.com/BerriAI/litellm";
|
||||
license = licenses.mit;
|
||||
changelog = "https://github.com/BerriAI/litellm/releases/tag/v${version}";
|
||||
maintainers = with maintainers; [ happysalada ];
|
||||
};
|
||||
}
|
39
pkgs/development/python-modules/tokentrim/default.nix
Normal file
39
pkgs/development/python-modules/tokentrim/default.nix
Normal file
@ -0,0 +1,39 @@
|
||||
{ lib
|
||||
, buildPythonPackage
|
||||
, fetchFromGitHub
|
||||
, poetry-core
|
||||
, tiktoken
|
||||
}:
|
||||
|
||||
buildPythonPackage {
|
||||
pname = "tokentrim";
|
||||
version = "unstable-2023-09-07";
|
||||
format = "pyproject";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "KillianLucas";
|
||||
repo = "tokentrim";
|
||||
rev = "e98ad3a2ca0e321a7347f76c30be584175495139";
|
||||
hash = "sha256-95xitHnbFFaj0xPuLMWvIvuJzoCO3VSd592X1RI9h3A=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
poetry-core
|
||||
];
|
||||
|
||||
propagatedBuildInputs = [
|
||||
tiktoken
|
||||
];
|
||||
|
||||
pythonImportsCheck = [ "tokentrim" ];
|
||||
|
||||
# tests connect to openai
|
||||
doCheck = false;
|
||||
|
||||
meta = with lib; {
|
||||
description = "Easily trim 'messages' arrays for use with GPTs";
|
||||
homepage = "https://github.com/KillianLucas/tokentrim";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ happysalada ];
|
||||
};
|
||||
}
|
52
pkgs/tools/llm/open-interpreter/default.nix
Normal file
52
pkgs/tools/llm/open-interpreter/default.nix
Normal file
@ -0,0 +1,52 @@
|
||||
{ lib
|
||||
, python3
|
||||
, fetchFromGitHub
|
||||
}:
|
||||
let
|
||||
version = "0.1.3";
|
||||
in
|
||||
python3.pkgs.buildPythonApplication {
|
||||
pname = "open-interpreter";
|
||||
format = "pyproject";
|
||||
inherit version;
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "KillianLucas";
|
||||
repo = "open-interpreter";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-xmmyDIshEYql41k/7gF+ay7s3mI+iGCjr5gDfLkqLU0=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
python3.pkgs.poetry-core
|
||||
];
|
||||
|
||||
propagatedBuildInputs = with python3.pkgs; [
|
||||
appdirs
|
||||
astor
|
||||
gitpython
|
||||
huggingface-hub
|
||||
inquirer
|
||||
litellm
|
||||
openai
|
||||
# pyreadline3 # this is a windows deps
|
||||
python-dotenv
|
||||
rich
|
||||
six
|
||||
tiktoken
|
||||
tokentrim
|
||||
wget
|
||||
];
|
||||
|
||||
# the import check phase fails trying to do a network request to openai
|
||||
# because of litellm
|
||||
# pythonImportsCheck = [ "interpreter" ];
|
||||
|
||||
meta = with lib; {
|
||||
description = "OpenAI's Code Interpreter in your terminal, running locally";
|
||||
homepage = "https://github.com/KillianLucas/open-interpreter";
|
||||
license = licenses.mit;
|
||||
changelog = "https://github.com/KillianLucas/open-interpreter/releases/tag/v${version}";
|
||||
maintainers = with maintainers; [ happysalada ];
|
||||
};
|
||||
}
|
@ -11542,6 +11542,8 @@ with pkgs;
|
||||
|
||||
open-ecard = callPackage ../tools/security/open-ecard { };
|
||||
|
||||
open-interpreter = callPackage ../tools/llm/open-interpreter { };
|
||||
|
||||
openjade = callPackage ../tools/text/sgml/openjade { };
|
||||
|
||||
openhantek6022 = libsForQt5.callPackage ../applications/science/electronics/openhantek6022 { };
|
||||
|
@ -6198,6 +6198,8 @@ self: super: with self; {
|
||||
|
||||
lit = callPackage ../development/python-modules/lit { };
|
||||
|
||||
litellm = callPackage ../development/python-modules/litellm { };
|
||||
|
||||
litemapy = callPackage ../development/python-modules/litemapy { };
|
||||
|
||||
littleutils = callPackage ../development/python-modules/littleutils { };
|
||||
@ -12957,6 +12959,8 @@ self: super: with self; {
|
||||
|
||||
tokenlib = callPackage ../development/python-modules/tokenlib { };
|
||||
|
||||
tokentrim = callPackage ../development/python-modules/tokentrim { };
|
||||
|
||||
tololib = callPackage ../development/python-modules/tololib { };
|
||||
|
||||
toml = callPackage ../development/python-modules/toml { };
|
||||
|
Loading…
Reference in New Issue
Block a user