mirror of
https://github.com/juspay/services-flake.git
synced 2024-10-05 16:37:38 +03:00
chore(example/llm): Reintroduce models
See README
This commit is contained in:
parent
2e60588ffd
commit
9b88034e0a
@ -4,3 +4,4 @@ While `services-flake` is generally used for running services in a *development*
|
||||
|
||||
`example/llm` runs two processes ollama and open-webui, while storing the ollama data under `$HOME/.services-flake/ollama`. You can change this path in `flake.nix`.
|
||||
|
||||
By default, a single model (`llama2-uncensored`) is downloaded. You can modify this in `flake.nix` as well.
|
||||
|
@ -21,12 +21,20 @@
|
||||
# Backend service to perform inference on LLM models
|
||||
ollama."ollama1" = {
|
||||
enable = true;
|
||||
|
||||
# The models are usually huge, downloading them in every project
|
||||
# directory can lead to a lot of duplication. Change here to a
|
||||
# directory where the Ollama models can be stored and shared across
|
||||
# projects.
|
||||
dataDir = "$HOME/.services-flake/ollama1";
|
||||
|
||||
# Define the models to download when our app starts
|
||||
#
|
||||
# You can also initialize this to empty list, and download the
|
||||
# models manually in the UI.
|
||||
models = [ "llama2-uncensored" ];
|
||||
};
|
||||
|
||||
# Get ChatGPT like UI, but open-source, with Open WebUI
|
||||
open-webui."open-webui1" = {
|
||||
enable = true;
|
||||
|
Loading…
Reference in New Issue
Block a user