Skip to content

Commit e7eb9de

Browse files
shivaraj-bhdrupolsrid
committed
feat(open-webui): init
ported from: https://github.com/shivaraj-bh/ollama-flake/blob/main/services/open-webui.nix This was also recently upstreamed to [nixpkgs](https://github.com/NixOS/nixpkgs/tree/master): NixOS/nixpkgs#316248 --------- Co-authored-by: Pol Dellaiera <[email protected]> Co-authored-by: Sridhar Ratnakumar <[email protected]>
1 parent 14a3740 commit e7eb9de

File tree

7 files changed

+235
-4
lines changed

7 files changed

+235
-4
lines changed

doc/open-webui.md

+68
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
# Open WebUI
2+
3+
[Open WebUI](https://github.com/open-webui/open-webui) is a user-friendly WebUI for LLMs. It supports various LLM runners, including [[ollama]] and OpenAI-compatible APIs.
4+
5+
{#start}
6+
## Getting Started
7+
8+
```nix
9+
# In `perSystem.process-compose.<name>`
10+
{
11+
services.open-webui."open-webui1".enable = true;
12+
}
13+
```
14+
15+
## Examples
16+
17+
{#ollama}
18+
### Open WebUI with ollama backend
19+
20+
```nix
21+
{
22+
services = {
23+
# Backend service to perform inference on LLM models
24+
ollama."ollama1" = {
25+
enable = true;
26+
# The models are usually huge, downloading them in every project directory can lead to a lot of duplication
27+
dataDir = "$HOME/.services-flake/ollama1";
28+
models = [ "llama2-uncensored" ];
29+
};
30+
# Get ChatGPT like UI, but open-source, with Open WebUI
31+
open-webui."open-webui1" = {
32+
enable = true;
33+
environment =
34+
let
35+
inherit (pc.config.services.ollama.ollama1) host port;
36+
in
37+
{
38+
OLLAMA_API_BASE_URL = "http://${host}:${toString port}";
39+
WEBUI_AUTH = "False";
40+
};
41+
};
42+
};
43+
# Start the Open WebUI service after the Ollama service has finished initializing and loading the models
44+
settings.processes.open-webui1.depends_on.ollama1-models.condition = "process_completed_successfully";
45+
}
46+
```
47+
48+
See [[ollama]] for more customisation of the backend.
49+
50+
{#browser}
51+
## Open browser on startup
52+
53+
```nix
54+
{
55+
services.open-webui."open-webui1".enable = true;
56+
# Open the browser after the Open WebUI service has started
57+
settings.processes.open-browser = {
58+
command =
59+
let
60+
inherit (pc.config.services.open-webui.open-webui1) host port;
61+
opener = if pkgs.stdenv.isDarwin then "open" else lib.getExe' pkgs.xdg-utils "xdg-open";
62+
url = "http://${host}:${toString port}";
63+
in
64+
"${opener} ${url}";
65+
depends_on.open-webui1.condition = "process_healthy";
66+
};
67+
}
68+
```

doc/services.md

+1
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ short-title: Services
1313
- [[mysql]]#
1414
- [[nginx]]#
1515
- [[ollama]]#
16+
- [[open-webui]]#
1617
- [[postgresql]]#
1718
- [[pgadmin]]
1819
- [[redis]]#

example/llm/flake.nix

+36-4
Original file line numberDiff line numberDiff line change
@@ -13,13 +13,45 @@
1313
inputs.process-compose-flake.flakeModule
1414
];
1515
perSystem = { self', pkgs, lib, ... }: {
16-
process-compose."default" = {
16+
process-compose."default" = pc: {
1717
imports = [
1818
inputs.services-flake.processComposeModules.default
1919
];
20-
services.ollama."ollama1" = {
21-
enable = true;
22-
models = [ "llama2-uncensored" ];
20+
services = {
21+
# Backend service to perform inference on LLM models
22+
ollama."ollama1" = {
23+
enable = true;
24+
# The models are usually huge, downloading them in every project directory can lead to a lot of duplication
25+
dataDir = "$HOME/.services-flake/ollama1";
26+
models = [ "llama2-uncensored" ];
27+
};
28+
# Get ChatGPT like UI, but open-source, with Open WebUI
29+
open-webui."open-webui1" = {
30+
enable = true;
31+
environment =
32+
let
33+
inherit (pc.config.services.ollama.ollama1) host port;
34+
in
35+
{
36+
OLLAMA_API_BASE_URL = "http://${host}:${toString port}";
37+
WEBUI_AUTH = "False";
38+
};
39+
};
40+
};
41+
42+
# Start the Open WebUI service after the Ollama service has finished initializing and loading the models
43+
settings.processes.open-webui1.depends_on.ollama1-models.condition = "process_completed_successfully";
44+
45+
# Open the browser after the Open WebUI service has started
46+
settings.processes.open-browser = {
47+
command =
48+
let
49+
inherit (pc.config.services.open-webui.open-webui1) host port;
50+
opener = if pkgs.stdenv.isDarwin then "open" else lib.getExe' pkgs.xdg-utils "xdg-open";
51+
url = "http://${host}:${toString port}";
52+
in
53+
"${opener} ${url}";
54+
depends_on.open-webui1.condition = "process_healthy";
2355
};
2456
};
2557
};

nix/default.nix

+1
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ in
1111
./nginx
1212
./ollama.nix
1313
./postgres
14+
./open-webui.nix
1415
./redis-cluster.nix
1516
./redis.nix
1617
./zookeeper.nix

nix/open-webui.nix

+107
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,107 @@
1+
# Based on: https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/open-webui.nix
2+
{ pkgs, lib, name, config, ... }:
3+
let
4+
inherit (lib) types;
5+
in
6+
{
7+
options = {
8+
enable = lib.mkEnableOption "Open-WebUI server";
9+
package = lib.mkPackageOption pkgs "open-webui" { };
10+
11+
dataDir = lib.mkOption {
12+
type = types.str;
13+
default = "./data/${name}";
14+
description = "The Open-WebUI data directory";
15+
};
16+
17+
host = lib.mkOption {
18+
type = types.str;
19+
default = "127.0.0.1";
20+
example = "0.0.0.0";
21+
description = ''
22+
The host address which the Open-WebUI server HTTP interface listens to.
23+
'';
24+
};
25+
26+
port = lib.mkOption {
27+
type = types.port;
28+
default = 1111;
29+
example = 11111;
30+
description = ''
31+
Which port the Open-WebUI server listens to.
32+
'';
33+
};
34+
35+
environment = lib.mkOption {
36+
type = types.attrsOf types.str;
37+
default = {
38+
SCARF_NO_ANALYTICS = "True";
39+
DO_NOT_TRACK = "True";
40+
ANONYMIZED_TELEMETRY = "False";
41+
};
42+
example = ''
43+
{
44+
OLLAMA_API_BASE_URL = "http://127.0.0.1:11434";
45+
# Disable authentication
46+
WEBUI_AUTH = "False";
47+
}
48+
'';
49+
description = "Extra environment variables for Open-WebUI";
50+
};
51+
52+
outputs.settings = lib.mkOption {
53+
type = types.deferredModule;
54+
internal = true;
55+
readOnly = true;
56+
default = {
57+
processes = {
58+
"${name}" =
59+
let
60+
setupStateDirs = lib.concatMapStrings
61+
(stateDir:
62+
''
63+
if [ ! -d "''$${stateDir}" ]; then
64+
mkdir -p "''$${stateDir}"
65+
fi
66+
67+
${stateDir}=$(readlink -f "''$${stateDir}")
68+
69+
export ${stateDir}
70+
'') [ "DATA_DIR" "STATIC_DIR" "HF_HOME" "SENTENCE_TRANSFORMERS_HOME" ];
71+
in
72+
73+
{
74+
environment = {
75+
DATA_DIR = config.dataDir;
76+
STATIC_DIR = config.dataDir;
77+
HF_HOME = config.dataDir;
78+
SENTENCE_TRANSFORMERS_HOME = config.dataDir;
79+
} // config.environment;
80+
81+
command = pkgs.writeShellApplication {
82+
name = "open-webui-wrapper";
83+
text = ''
84+
${setupStateDirs}
85+
86+
${lib.getExe config.package} serve --host ${config.host} --port ${builtins.toString config.port}
87+
'';
88+
};
89+
readiness_probe = {
90+
http_get = {
91+
host = config.host;
92+
port = config.port;
93+
};
94+
initial_delay_seconds = 2;
95+
period_seconds = 10;
96+
timeout_seconds = 4;
97+
success_threshold = 1;
98+
failure_threshold = 5;
99+
};
100+
namespace = name;
101+
availability.restart = "on_failure";
102+
};
103+
};
104+
};
105+
};
106+
};
107+
}

nix/open-webui_test.nix

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
{ pkgs, ... }: {
2+
services.open-webui."open-webui1" = {
3+
enable = true;
4+
environment = {
5+
# Requires network connection
6+
RAG_EMBEDDING_MODEL = "";
7+
};
8+
};
9+
10+
settings.processes.test = {
11+
command = pkgs.writeShellApplication {
12+
runtimeInputs = [ pkgs.curl ];
13+
text = ''
14+
# Avoid printing the entire HTML page on the stdout, we just want to know if the page is active.
15+
curl http://127.0.0.1:1111 > /dev/null
16+
'';
17+
name = "open-webui-test";
18+
};
19+
depends_on."open-webui1".condition = "process_healthy";
20+
};
21+
}

test/flake.nix

+1
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@
3939
"${inputs.services-flake}/nix/mysql/mysql_test.nix"
4040
"${inputs.services-flake}/nix/nginx/nginx_test.nix"
4141
"${inputs.services-flake}/nix/ollama_test.nix"
42+
"${inputs.services-flake}/nix/open-webui_test.nix"
4243
"${inputs.services-flake}/nix/postgres/postgres_test.nix"
4344
"${inputs.services-flake}/nix/redis_test.nix"
4445
"${inputs.services-flake}/nix/redis-cluster_test.nix"

0 commit comments

Comments
 (0)