feat(open-webui): init

ported from:
https://github.com/shivaraj-bh/ollama-flake/blob/main/services/open-webui.nix

This was also recently upstreamed to
[nixpkgs](https://github.com/NixOS/nixpkgs/tree/master):
https://github.com/NixOS/nixpkgs/pull/316248

---------

Co-authored-by: Pol Dellaiera <pol.dellaiera@protonmail.com>
Co-authored-by: Sridhar Ratnakumar <3998+srid@users.noreply.github.com>
This commit is contained in:
shivaraj-bh 2024-06-14 02:57:10 +05:30 committed by Shivaraj B H
parent 14a3740710
commit e7eb9dec41
7 changed files with 235 additions and 4 deletions

68
doc/open-webui.md Normal file
View File

@ -0,0 +1,68 @@
# Open WebUI
[Open WebUI](https://github.com/open-webui/open-webui) is a user-friendly WebUI for LLMs. It supports various LLM runners, including [[ollama]] and OpenAI-compatible APIs.
{#start}
## Getting Started
```nix
# In `perSystem.process-compose.<name>`
{
services.open-webui."open-webui1".enable = true;
}
```
## Examples
{#ollama}
### Open WebUI with ollama backend
```nix
{
services = {
# Backend service to perform inference on LLM models
ollama."ollama1" = {
enable = true;
# The models are usually huge, downloading them in every project directory can lead to a lot of duplication
dataDir = "$HOME/.services-flake/ollama1";
models = [ "llama2-uncensored" ];
};
# Get ChatGPT like UI, but open-source, with Open WebUI
open-webui."open-webui1" = {
enable = true;
environment =
let
inherit (pc.config.services.ollama.ollama1) host port;
in
{
OLLAMA_API_BASE_URL = "http://${host}:${toString port}";
WEBUI_AUTH = "False";
};
};
};
# Start the Open WebUI service after the Ollama service has finished initializing and loading the models
settings.processes.open-webui1.depends_on.ollama1-models.condition = "process_completed_successfully";
}
```
See [[ollama]] for more customisation of the backend.
{#browser}
## Open browser on startup
```nix
{
services.open-webui."open-webui1".enable = true;
# Open the browser after the Open WebUI service has started
settings.processes.open-browser = {
command =
let
inherit (pc.config.services.open-webui.open-webui1) host port;
opener = if pkgs.stdenv.isDarwin then "open" else lib.getExe' pkgs.xdg-utils "xdg-open";
url = "http://${host}:${toString port}";
in
"${opener} ${url}";
depends_on.open-webui1.condition = "process_healthy";
};
}
```

View File

@ -13,6 +13,7 @@ short-title: Services
- [[mysql]]# - [[mysql]]#
- [[nginx]]# - [[nginx]]#
- [[ollama]]# - [[ollama]]#
- [[open-webui]]#
- [[postgresql]]# - [[postgresql]]#
- [[pgadmin]] - [[pgadmin]]
- [[redis]]# - [[redis]]#

View File

@ -13,14 +13,46 @@
inputs.process-compose-flake.flakeModule inputs.process-compose-flake.flakeModule
]; ];
perSystem = { self', pkgs, lib, ... }: { perSystem = { self', pkgs, lib, ... }: {
process-compose."default" = { process-compose."default" = pc: {
imports = [ imports = [
inputs.services-flake.processComposeModules.default inputs.services-flake.processComposeModules.default
]; ];
services.ollama."ollama1" = { services = {
# Backend service to perform inference on LLM models
ollama."ollama1" = {
enable = true; enable = true;
# The models are usually huge, downloading them in every project directory can lead to a lot of duplication
dataDir = "$HOME/.services-flake/ollama1";
models = [ "llama2-uncensored" ]; models = [ "llama2-uncensored" ];
}; };
# Get ChatGPT like UI, but open-source, with Open WebUI
open-webui."open-webui1" = {
enable = true;
environment =
let
inherit (pc.config.services.ollama.ollama1) host port;
in
{
OLLAMA_API_BASE_URL = "http://${host}:${toString port}";
WEBUI_AUTH = "False";
};
};
};
# Start the Open WebUI service after the Ollama service has finished initializing and loading the models
settings.processes.open-webui1.depends_on.ollama1-models.condition = "process_completed_successfully";
# Open the browser after the Open WebUI service has started
settings.processes.open-browser = {
command =
let
inherit (pc.config.services.open-webui.open-webui1) host port;
opener = if pkgs.stdenv.isDarwin then "open" else lib.getExe' pkgs.xdg-utils "xdg-open";
url = "http://${host}:${toString port}";
in
"${opener} ${url}";
depends_on.open-webui1.condition = "process_healthy";
};
}; };
}; };
}; };

View File

@ -11,6 +11,7 @@ in
./nginx ./nginx
./ollama.nix ./ollama.nix
./postgres ./postgres
./open-webui.nix
./redis-cluster.nix ./redis-cluster.nix
./redis.nix ./redis.nix
./zookeeper.nix ./zookeeper.nix

107
nix/open-webui.nix Normal file
View File

@ -0,0 +1,107 @@
# Based on: https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/open-webui.nix
{ pkgs, lib, name, config, ... }:
let
inherit (lib) types;
in
{
options = {
enable = lib.mkEnableOption "Open-WebUI server";
package = lib.mkPackageOption pkgs "open-webui" { };
dataDir = lib.mkOption {
type = types.str;
default = "./data/${name}";
description = "The Open-WebUI data directory";
};
host = lib.mkOption {
type = types.str;
default = "127.0.0.1";
example = "0.0.0.0";
description = ''
The host address which the Open-WebUI server HTTP interface listens to.
'';
};
port = lib.mkOption {
type = types.port;
default = 1111;
example = 11111;
description = ''
Which port the Open-WebUI server listens to.
'';
};
environment = lib.mkOption {
type = types.attrsOf types.str;
default = {
SCARF_NO_ANALYTICS = "True";
DO_NOT_TRACK = "True";
ANONYMIZED_TELEMETRY = "False";
};
example = ''
{
OLLAMA_API_BASE_URL = "http://127.0.0.1:11434";
# Disable authentication
WEBUI_AUTH = "False";
}
'';
description = "Extra environment variables for Open-WebUI";
};
outputs.settings = lib.mkOption {
type = types.deferredModule;
internal = true;
readOnly = true;
default = {
processes = {
"${name}" =
let
setupStateDirs = lib.concatMapStrings
(stateDir:
''
if [ ! -d "''$${stateDir}" ]; then
mkdir -p "''$${stateDir}"
fi
${stateDir}=$(readlink -f "''$${stateDir}")
export ${stateDir}
'') [ "DATA_DIR" "STATIC_DIR" "HF_HOME" "SENTENCE_TRANSFORMERS_HOME" ];
in
{
environment = {
DATA_DIR = config.dataDir;
STATIC_DIR = config.dataDir;
HF_HOME = config.dataDir;
SENTENCE_TRANSFORMERS_HOME = config.dataDir;
} // config.environment;
command = pkgs.writeShellApplication {
name = "open-webui-wrapper";
text = ''
${setupStateDirs}
${lib.getExe config.package} serve --host ${config.host} --port ${builtins.toString config.port}
'';
};
readiness_probe = {
http_get = {
host = config.host;
port = config.port;
};
initial_delay_seconds = 2;
period_seconds = 10;
timeout_seconds = 4;
success_threshold = 1;
failure_threshold = 5;
};
namespace = name;
availability.restart = "on_failure";
};
};
};
};
};
}

21
nix/open-webui_test.nix Normal file
View File

@ -0,0 +1,21 @@
{ pkgs, ... }: {
services.open-webui."open-webui1" = {
enable = true;
environment = {
# Requires network connection
RAG_EMBEDDING_MODEL = "";
};
};
settings.processes.test = {
command = pkgs.writeShellApplication {
runtimeInputs = [ pkgs.curl ];
text = ''
# Avoid printing the entire HTML page on the stdout, we just want to know if the page is active.
curl http://127.0.0.1:1111 > /dev/null
'';
name = "open-webui-test";
};
depends_on."open-webui1".condition = "process_healthy";
};
}

View File

@ -39,6 +39,7 @@
"${inputs.services-flake}/nix/mysql/mysql_test.nix" "${inputs.services-flake}/nix/mysql/mysql_test.nix"
"${inputs.services-flake}/nix/nginx/nginx_test.nix" "${inputs.services-flake}/nix/nginx/nginx_test.nix"
"${inputs.services-flake}/nix/ollama_test.nix" "${inputs.services-flake}/nix/ollama_test.nix"
"${inputs.services-flake}/nix/open-webui_test.nix"
"${inputs.services-flake}/nix/postgres/postgres_test.nix" "${inputs.services-flake}/nix/postgres/postgres_test.nix"
"${inputs.services-flake}/nix/redis_test.nix" "${inputs.services-flake}/nix/redis_test.nix"
"${inputs.services-flake}/nix/redis-cluster_test.nix" "${inputs.services-flake}/nix/redis-cluster_test.nix"