feat: add openwebui #8

Merged
ashie merged 2 commits from feat/openwebui into main 2026-03-20 21:48:16 +00:00
4 changed files with 223 additions and 0 deletions

View file

@ -90,6 +90,11 @@
nix.settings.experimental-features = [ "nix-command" "flakes" ];
# Allow unfree packages (for open-webui)
nixpkgs.config.allowUnfreePredicate = pkg: builtins.elem (lib.getName pkg) [
"open-webui"
];
# === SearXNG ===
myModules.searxng = {
enable = true;
@ -203,6 +208,13 @@
# === Tailscale Client ===
services.tailscale.enable = true;
# === OpenWeb UI ===
myModules.open-webui = {
enable = true;
domain = "ai.ashisgreat.xyz";
ollamaUrl = "http://100.64.0.1:11434";
};
# === Backups (Restic + B2) ===
myModules.backup = {
enable = true;

View file

@ -0,0 +1,134 @@
# OpenWeb UI Module Design
## Overview
Add OpenWeb UI as a native NixOS service, connecting to Ollama running on the user's PC via Headscale at `http://100.64.0.1:11434`.
## Architecture
A thin wrapper module around the native `services.open-webui` NixOS module that:
1. Configures connection to the remote Ollama instance
2. Integrates with the existing nginx module for HTTPS
3. Follows established module patterns (vaultwarden, forgejo)
## Module Options
```nix
myModules.open-webui = {
enable = true;
port = 8080;
domain = "ai.ashisgreat.xyz";
ollamaUrl = "http://100.64.0.1:11434";
};
```
## Components
### 1. Module File: `modules/open-webui.nix`
```nix
{
config,
lib,
...
}:
let
cfg = config.myModules.open-webui;
in
{
options.myModules.open-webui = {
enable = lib.mkEnableOption "OpenWeb UI for LLMs";
port = lib.mkOption {
type = lib.types.port;
default = 8080;
description = "Port to expose OpenWeb UI on localhost";
};
domain = lib.mkOption {
type = lib.types.str;
example = "ai.example.com";
description = "Public domain name for OpenWeb UI";
};
ollamaUrl = lib.mkOption {
type = lib.types.str;
default = "http://127.0.0.1:11434";
example = "http://100.64.0.1:11434";
description = "URL of the Ollama API endpoint";
};
};
config = lib.mkIf cfg.enable {
services.open-webui = {
enable = true;
port = cfg.port;
host = "127.0.0.1";
environment = {
OLLAMA_API_BASE_URL = cfg.ollamaUrl;
WEBUI_URL = "https://${cfg.domain}";
};
environmentFile = config.sops.templates."openwebui.env".path;
};
# SOPS template for secrets
sops.templates."openwebui.env" = {
content = ''
WEBUI_SECRET_KEY=${config.sops.placeholder.openwebui_secret_key}
'';
};
sops.secrets.openwebui_secret_key = { };
# Nginx configuration
myModules.nginx.domains.${cfg.domain} = {
port = cfg.port;
extraConfig = ''
client_max_body_size 100M;
'';
contentSecurityPolicy = "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; connect-src 'self' wss: https:; frame-ancestors 'self'";
};
};
}
```
### 2. Update `modules/default.nix`
Add `./open-webui.nix` to imports.
### 3. Update `configuration.nix`
```nix
myModules.open-webui = {
enable = true;
domain = "ai.ashisgreat.xyz";
ollamaUrl = "http://100.64.0.1:11434";
};
```
### 4. Add Secret to `secrets/secrets.yaml`
```yaml
openwebui_secret_key: <generated-secret>
```
## Files Changed
| File | Change |
|------|--------|
| `modules/open-webui.nix` | New module |
| `modules/default.nix` | Add import |
| `configuration.nix` | Enable and configure |
| `secrets/secrets.yaml` | Add `openwebui_secret_key` |
## Security Considerations
- Service binds to `127.0.0.1` only (nginx handles external access)
- `WEBUI_SECRET_KEY` stored encrypted in SOPS
- CSP headers configured for WebSocket support
- Large upload limit (100M) for model files if needed
## Dependencies
- `services.open-webui` (available in nixpkgs unstable)
- Existing nginx and SOPS infrastructure

View file

@ -12,5 +12,6 @@
./adguard.nix
./forgejo.nix
./headscale.nix
./open-webui.nix
];
}

76
modules/open-webui.nix Normal file
View file

@ -0,0 +1,76 @@
# OpenWeb UI Module
# Provides: Web interface for LLMs (Ollama, OpenAI-compatible APIs)
#
# Usage:
# myModules.open-webui = {
# enable = true;
# port = 8080;
# domain = "ai.example.com";
# ollamaUrl = "http://100.64.0.1:11434"; # Remote Ollama via Tailscale/Headscale
# };
{
config,
lib,
...
}:
let
cfg = config.myModules.open-webui;
in
{
options.myModules.open-webui = {
enable = lib.mkEnableOption "OpenWeb UI for LLMs";
port = lib.mkOption {
type = lib.types.port;
default = 8080;
description = "Port to expose OpenWeb UI on localhost";
};
domain = lib.mkOption {
type = lib.types.str;
example = "ai.example.com";
description = "Public domain name for OpenWeb UI";
};
ollamaUrl = lib.mkOption {
type = lib.types.str;
default = "http://127.0.0.1:11434";
example = "http://100.64.0.1:11434";
description = "URL of the Ollama API endpoint";
};
};
config = lib.mkIf cfg.enable {
services.open-webui = {
enable = true;
port = cfg.port;
host = "127.0.0.1";
environment = {
OLLAMA_API_BASE_URL = cfg.ollamaUrl;
WEBUI_URL = "https://${cfg.domain}";
};
environmentFile = config.sops.templates."openwebui.env".path;
};
# SOPS template for secrets
sops.templates."openwebui.env" = {
content = ''
WEBUI_SECRET_KEY=${config.sops.placeholder.openwebui_secret_key}
'';
};
sops.secrets.openwebui_secret_key = { };
# Nginx configuration
myModules.nginx.domains.${cfg.domain} = {
port = cfg.port;
extraConfig = ''
client_max_body_size 100M;
'';
# Relaxed CSP for OpenWeb UI — needs unsafe-eval for some JS, WebSockets, external images
contentSecurityPolicy = "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; connect-src 'self' wss: https:; frame-ancestors 'self'";
};
};
}