feat(open-webui): add OpenWeb UI module for LLM interface

Add native NixOS module wrapping services.open-webui with:
- Configurable Ollama endpoint (defaults to 100.64.0.1:11434 via Headscale)
- SOPS secret management for WEBUI_SECRET_KEY
- Nginx reverse proxy integration with relaxed CSP for WebSockets
- Exposed at ai.ashisgreat.xyz

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
ashisgreat22 2026-03-20 22:47:05 +01:00
parent be7c2404c3
commit 68da82c26b
3 changed files with 89 additions and 0 deletions

View file

@ -90,6 +90,11 @@
nix.settings.experimental-features = [ "nix-command" "flakes" ];
# Allow unfree packages (for open-webui)
nixpkgs.config.allowUnfreePredicate = pkg: builtins.elem (lib.getName pkg) [
"open-webui"
];
# === SearXNG ===
myModules.searxng = {
enable = true;
@ -203,6 +208,13 @@
# === Tailscale Client ===
services.tailscale.enable = true;
# === OpenWeb UI ===
myModules.open-webui = {
enable = true;
domain = "ai.ashisgreat.xyz";
ollamaUrl = "http://100.64.0.1:11434";
};
# === Backups (Restic + B2) ===
myModules.backup = {
enable = true;