feat(open-webui): add OpenWeb UI module for LLM interface
Add native NixOS module wrapping services.open-webui with: - Configurable Ollama endpoint (defaults to 100.64.0.1:11434 via Headscale) - SOPS secret management for WEBUI_SECRET_KEY - Nginx reverse proxy integration with relaxed CSP for WebSockets - Exposed at ai.ashisgreat.xyz Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
be7c2404c3
commit
68da82c26b
3 changed files with 89 additions and 0 deletions
|
|
@ -90,6 +90,11 @@
|
|||
|
||||
nix.settings.experimental-features = [ "nix-command" "flakes" ];
|
||||
|
||||
# Allow unfree packages (for open-webui)
|
||||
nixpkgs.config.allowUnfreePredicate = pkg: builtins.elem (lib.getName pkg) [
|
||||
"open-webui"
|
||||
];
|
||||
|
||||
# === SearXNG ===
|
||||
myModules.searxng = {
|
||||
enable = true;
|
||||
|
|
@ -203,6 +208,13 @@
|
|||
# === Tailscale Client ===
|
||||
services.tailscale.enable = true;
|
||||
|
||||
# === OpenWeb UI ===
|
||||
myModules.open-webui = {
|
||||
enable = true;
|
||||
domain = "ai.ashisgreat.xyz";
|
||||
ollamaUrl = "http://100.64.0.1:11434";
|
||||
};
|
||||
|
||||
# === Backups (Restic + B2) ===
|
||||
myModules.backup = {
|
||||
enable = true;
|
||||
|
|
|
|||
|
|
@ -12,5 +12,6 @@
|
|||
./adguard.nix
|
||||
./forgejo.nix
|
||||
./headscale.nix
|
||||
./open-webui.nix
|
||||
];
|
||||
}
|
||||
|
|
|
|||
76
modules/open-webui.nix
Normal file
76
modules/open-webui.nix
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
# OpenWeb UI Module
|
||||
# Provides: Web interface for LLMs (Ollama, OpenAI-compatible APIs)
|
||||
#
|
||||
# Usage:
|
||||
# myModules.open-webui = {
|
||||
# enable = true;
|
||||
# port = 8080;
|
||||
# domain = "ai.example.com";
|
||||
# ollamaUrl = "http://100.64.0.1:11434"; # Remote Ollama via Tailscale/Headscale
|
||||
# };
|
||||
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
...
|
||||
}:
|
||||
|
||||
let
|
||||
cfg = config.myModules.open-webui;
|
||||
in
|
||||
{
|
||||
options.myModules.open-webui = {
|
||||
enable = lib.mkEnableOption "OpenWeb UI for LLMs";
|
||||
|
||||
port = lib.mkOption {
|
||||
type = lib.types.port;
|
||||
default = 8080;
|
||||
description = "Port to expose OpenWeb UI on localhost";
|
||||
};
|
||||
|
||||
domain = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
example = "ai.example.com";
|
||||
description = "Public domain name for OpenWeb UI";
|
||||
};
|
||||
|
||||
ollamaUrl = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "http://127.0.0.1:11434";
|
||||
example = "http://100.64.0.1:11434";
|
||||
description = "URL of the Ollama API endpoint";
|
||||
};
|
||||
};
|
||||
|
||||
config = lib.mkIf cfg.enable {
|
||||
services.open-webui = {
|
||||
enable = true;
|
||||
port = cfg.port;
|
||||
host = "127.0.0.1";
|
||||
environment = {
|
||||
OLLAMA_API_BASE_URL = cfg.ollamaUrl;
|
||||
WEBUI_URL = "https://${cfg.domain}";
|
||||
};
|
||||
environmentFile = config.sops.templates."openwebui.env".path;
|
||||
};
|
||||
|
||||
# SOPS template for secrets
|
||||
sops.templates."openwebui.env" = {
|
||||
content = ''
|
||||
WEBUI_SECRET_KEY=${config.sops.placeholder.openwebui_secret_key}
|
||||
'';
|
||||
};
|
||||
|
||||
sops.secrets.openwebui_secret_key = { };
|
||||
|
||||
# Nginx configuration
|
||||
myModules.nginx.domains.${cfg.domain} = {
|
||||
port = cfg.port;
|
||||
extraConfig = ''
|
||||
client_max_body_size 100M;
|
||||
'';
|
||||
# Relaxed CSP for OpenWeb UI — needs unsafe-eval for some JS, WebSockets, external images
|
||||
contentSecurityPolicy = "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; connect-src 'self' wss: https:; frame-ancestors 'self'";
|
||||
};
|
||||
};
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue