diff --git a/doc/open-webui.md b/doc/open-webui.md new file mode 100644 index 0000000..daf7e71 --- /dev/null +++ b/doc/open-webui.md @@ -0,0 +1,68 @@ +# Open WebUI + +[Open WebUI](https://github.com/open-webui/open-webui) is a user-friendly WebUI for LLMs. It supports various LLM runners, including [[ollama]] and OpenAI-compatible APIs. + +{#start} +## Getting Started + +```nix +# In `perSystem.process-compose.` +{ + services.open-webui."open-webui1".enable = true; +} +``` + +## Examples + +{#ollama} +### Open WebUI with ollama backend + +```nix +{ + services = { + # Backend service to perform inference on LLM models + ollama."ollama1" = { + enable = true; + # The models are usually huge, downloading them in every project directory can lead to a lot of duplication + dataDir = "$HOME/.services-flake/ollama1"; + models = [ "llama2-uncensored" ]; + }; + # Get ChatGPT like UI, but open-source, with Open WebUI + open-webui."open-webui1" = { + enable = true; + environment = + let + inherit (pc.config.services.ollama.ollama1) host port; + in + { + OLLAMA_API_BASE_URL = "http://${host}:${toString port}"; + WEBUI_AUTH = "False"; + }; + }; + }; + # Start the Open WebUI service after the Ollama service has finished initializing and loading the models + settings.processes.open-webui1.depends_on.ollama1-models.condition = "process_completed_successfully"; +} +``` + +See [[ollama]] for more customisation of the backend. + +{#browser} +## Open browser on startup + +```nix +{ + services.open-webui."open-webui1".enable = true; + # Open the browser after the Open WebUI service has started + settings.processes.open-browser = { + command = + let + inherit (pc.config.services.open-webui.open-webui1) host port; + opener = if pkgs.stdenv.isDarwin then "open" else lib.getExe' pkgs.xdg-utils "xdg-open"; + url = "http://${host}:${toString port}"; + in + "${opener} ${url}"; + depends_on.open-webui1.condition = "process_healthy"; + }; +} +``` diff --git a/doc/services.md b/doc/services.md index 71894e7..215670d 100644 --- a/doc/services.md +++ b/doc/services.md @@ -13,6 +13,7 @@ short-title: Services - [[mysql]]# - [[nginx]]# - [[ollama]]# +- [[open-webui]]# - [[postgresql]]# - [[pgadmin]] - [[redis]]# diff --git a/example/llm/flake.nix b/example/llm/flake.nix index 084c965..2497c97 100644 --- a/example/llm/flake.nix +++ b/example/llm/flake.nix @@ -13,13 +13,45 @@ inputs.process-compose-flake.flakeModule ]; perSystem = { self', pkgs, lib, ... }: { - process-compose."default" = { + process-compose."default" = pc: { imports = [ inputs.services-flake.processComposeModules.default ]; - services.ollama."ollama1" = { - enable = true; - models = [ "llama2-uncensored" ]; + services = { + # Backend service to perform inference on LLM models + ollama."ollama1" = { + enable = true; + # The models are usually huge, downloading them in every project directory can lead to a lot of duplication + dataDir = "$HOME/.services-flake/ollama1"; + models = [ "llama2-uncensored" ]; + }; + # Get ChatGPT like UI, but open-source, with Open WebUI + open-webui."open-webui1" = { + enable = true; + environment = + let + inherit (pc.config.services.ollama.ollama1) host port; + in + { + OLLAMA_API_BASE_URL = "http://${host}:${toString port}"; + WEBUI_AUTH = "False"; + }; + }; + }; + + # Start the Open WebUI service after the Ollama service has finished initializing and loading the models + settings.processes.open-webui1.depends_on.ollama1-models.condition = "process_completed_successfully"; + + # Open the browser after the Open WebUI service has started + settings.processes.open-browser = { + command = + let + inherit (pc.config.services.open-webui.open-webui1) host port; + opener = if pkgs.stdenv.isDarwin then "open" else lib.getExe' pkgs.xdg-utils "xdg-open"; + url = "http://${host}:${toString port}"; + in + "${opener} ${url}"; + depends_on.open-webui1.condition = "process_healthy"; }; }; }; diff --git a/nix/default.nix b/nix/default.nix index f389d38..a853a9a 100644 --- a/nix/default.nix +++ b/nix/default.nix @@ -11,6 +11,7 @@ in ./nginx ./ollama.nix ./postgres + ./open-webui.nix ./redis-cluster.nix ./redis.nix ./zookeeper.nix diff --git a/nix/open-webui.nix b/nix/open-webui.nix new file mode 100644 index 0000000..7fda929 --- /dev/null +++ b/nix/open-webui.nix @@ -0,0 +1,107 @@ +# Based on: https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/open-webui.nix +{ pkgs, lib, name, config, ... }: +let + inherit (lib) types; +in +{ + options = { + enable = lib.mkEnableOption "Open-WebUI server"; + package = lib.mkPackageOption pkgs "open-webui" { }; + + dataDir = lib.mkOption { + type = types.str; + default = "./data/${name}"; + description = "The Open-WebUI data directory"; + }; + + host = lib.mkOption { + type = types.str; + default = "127.0.0.1"; + example = "0.0.0.0"; + description = '' + The host address which the Open-WebUI server HTTP interface listens to. + ''; + }; + + port = lib.mkOption { + type = types.port; + default = 1111; + example = 11111; + description = '' + Which port the Open-WebUI server listens to. + ''; + }; + + environment = lib.mkOption { + type = types.attrsOf types.str; + default = { + SCARF_NO_ANALYTICS = "True"; + DO_NOT_TRACK = "True"; + ANONYMIZED_TELEMETRY = "False"; + }; + example = '' + { + OLLAMA_API_BASE_URL = "http://127.0.0.1:11434"; + # Disable authentication + WEBUI_AUTH = "False"; + } + ''; + description = "Extra environment variables for Open-WebUI"; + }; + + outputs.settings = lib.mkOption { + type = types.deferredModule; + internal = true; + readOnly = true; + default = { + processes = { + "${name}" = + let + setupStateDirs = lib.concatMapStrings + (stateDir: + '' + if [ ! -d "''$${stateDir}" ]; then + mkdir -p "''$${stateDir}" + fi + + ${stateDir}=$(readlink -f "''$${stateDir}") + + export ${stateDir} + '') [ "DATA_DIR" "STATIC_DIR" "HF_HOME" "SENTENCE_TRANSFORMERS_HOME" ]; + in + + { + environment = { + DATA_DIR = config.dataDir; + STATIC_DIR = config.dataDir; + HF_HOME = config.dataDir; + SENTENCE_TRANSFORMERS_HOME = config.dataDir; + } // config.environment; + + command = pkgs.writeShellApplication { + name = "open-webui-wrapper"; + text = '' + ${setupStateDirs} + + ${lib.getExe config.package} serve --host ${config.host} --port ${builtins.toString config.port} + ''; + }; + readiness_probe = { + http_get = { + host = config.host; + port = config.port; + }; + initial_delay_seconds = 2; + period_seconds = 10; + timeout_seconds = 4; + success_threshold = 1; + failure_threshold = 5; + }; + namespace = name; + availability.restart = "on_failure"; + }; + }; + }; + }; + }; +} diff --git a/nix/open-webui_test.nix b/nix/open-webui_test.nix new file mode 100644 index 0000000..692b788 --- /dev/null +++ b/nix/open-webui_test.nix @@ -0,0 +1,21 @@ +{ pkgs, ... }: { + services.open-webui."open-webui1" = { + enable = true; + environment = { + # Requires network connection + RAG_EMBEDDING_MODEL = ""; + }; + }; + + settings.processes.test = { + command = pkgs.writeShellApplication { + runtimeInputs = [ pkgs.curl ]; + text = '' + # Avoid printing the entire HTML page on the stdout, we just want to know if the page is active. + curl http://127.0.0.1:1111 > /dev/null + ''; + name = "open-webui-test"; + }; + depends_on."open-webui1".condition = "process_healthy"; + }; +} diff --git a/test/flake.nix b/test/flake.nix index b7faa31..4995218 100644 --- a/test/flake.nix +++ b/test/flake.nix @@ -39,6 +39,7 @@ "${inputs.services-flake}/nix/mysql/mysql_test.nix" "${inputs.services-flake}/nix/nginx/nginx_test.nix" "${inputs.services-flake}/nix/ollama_test.nix" + "${inputs.services-flake}/nix/open-webui_test.nix" "${inputs.services-flake}/nix/postgres/postgres_test.nix" "${inputs.services-flake}/nix/redis_test.nix" "${inputs.services-flake}/nix/redis-cluster_test.nix"