From 68da82c26b88a64828c267f7a24672622453664e Mon Sep 17 00:00:00 2001 From: ashisgreat22 Date: Fri, 20 Mar 2026 22:47:05 +0100 Subject: [PATCH] feat(open-webui): add OpenWeb UI module for LLM interface Add native NixOS module wrapping services.open-webui with: - Configurable Ollama endpoint (defaults to 100.64.0.1:11434 via Headscale) - SOPS secret management for WEBUI_SECRET_KEY - Nginx reverse proxy integration with relaxed CSP for WebSockets - Exposed at ai.ashisgreat.xyz Co-Authored-By: Claude Opus 4.6 --- configuration.nix | 12 +++++++ modules/default.nix | 1 + modules/open-webui.nix | 76 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 89 insertions(+) create mode 100644 modules/open-webui.nix diff --git a/configuration.nix b/configuration.nix index 785fe16..254f638 100644 --- a/configuration.nix +++ b/configuration.nix @@ -90,6 +90,11 @@ nix.settings.experimental-features = [ "nix-command" "flakes" ]; + # Allow unfree packages (for open-webui) + nixpkgs.config.allowUnfreePredicate = pkg: builtins.elem (lib.getName pkg) [ + "open-webui" + ]; + # === SearXNG === myModules.searxng = { enable = true; @@ -203,6 +208,13 @@ # === Tailscale Client === services.tailscale.enable = true; + # === OpenWeb UI === + myModules.open-webui = { + enable = true; + domain = "ai.ashisgreat.xyz"; + ollamaUrl = "http://100.64.0.1:11434"; + }; + # === Backups (Restic + B2) === myModules.backup = { enable = true; diff --git a/modules/default.nix b/modules/default.nix index 32fc5dd..a743700 100644 --- a/modules/default.nix +++ b/modules/default.nix @@ -12,5 +12,6 @@ ./adguard.nix ./forgejo.nix ./headscale.nix + ./open-webui.nix ]; } diff --git a/modules/open-webui.nix b/modules/open-webui.nix new file mode 100644 index 0000000..769b5df --- /dev/null +++ b/modules/open-webui.nix @@ -0,0 +1,76 @@ +# OpenWeb UI Module +# Provides: Web interface for LLMs (Ollama, OpenAI-compatible APIs) +# +# Usage: +# myModules.open-webui = { +# enable = true; +# port = 8080; +# domain = "ai.example.com"; +# ollamaUrl = "http://100.64.0.1:11434"; # Remote Ollama via Tailscale/Headscale +# }; + +{ + config, + lib, + ... +}: + +let + cfg = config.myModules.open-webui; +in +{ + options.myModules.open-webui = { + enable = lib.mkEnableOption "OpenWeb UI for LLMs"; + + port = lib.mkOption { + type = lib.types.port; + default = 8080; + description = "Port to expose OpenWeb UI on localhost"; + }; + + domain = lib.mkOption { + type = lib.types.str; + example = "ai.example.com"; + description = "Public domain name for OpenWeb UI"; + }; + + ollamaUrl = lib.mkOption { + type = lib.types.str; + default = "http://127.0.0.1:11434"; + example = "http://100.64.0.1:11434"; + description = "URL of the Ollama API endpoint"; + }; + }; + + config = lib.mkIf cfg.enable { + services.open-webui = { + enable = true; + port = cfg.port; + host = "127.0.0.1"; + environment = { + OLLAMA_API_BASE_URL = cfg.ollamaUrl; + WEBUI_URL = "https://${cfg.domain}"; + }; + environmentFile = config.sops.templates."openwebui.env".path; + }; + + # SOPS template for secrets + sops.templates."openwebui.env" = { + content = '' + WEBUI_SECRET_KEY=${config.sops.placeholder.openwebui_secret_key} + ''; + }; + + sops.secrets.openwebui_secret_key = { }; + + # Nginx configuration + myModules.nginx.domains.${cfg.domain} = { + port = cfg.port; + extraConfig = '' + client_max_body_size 100M; + ''; + # Relaxed CSP for OpenWeb UI — needs unsafe-eval for some JS, WebSockets, external images + contentSecurityPolicy = "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; connect-src 'self' wss: https:; frame-ancestors 'self'"; + }; + }; +}