feat: add LM Studio headless LMS server module

- Custom NixOS module wrapping the lms CLI (daemon + server)
- Supports auto-loading models on startup
- Optional nginx reverse proxy via domain option
- Security hardening (NoNewPrivileges, ProtectSystem, PrivateTmp)
- Follows existing myModules.* convention
- Enabled on this host on port 1234 (no models loaded yet)
This commit is contained in:
Franz Kafka 2026-03-21 20:25:13 +00:00
parent 10d8924106
commit 9f82a7b00e
3 changed files with 185 additions and 0 deletions

View file

@ -215,6 +215,14 @@
ollamaUrl = "http://100.64.0.1:11434";
};
# === LM Studio Headless LMS Server ===
myModules.lmstudio-server = {
enable = true;
port = 1234;
# domain = "llm.ashisgreat.xyz"; # Uncomment to add nginx reverse proxy
# models = [ "openai/gpt-oss-20b" ]; # Uncomment to auto-load models
};
# === Backups (Restic + B2) ===
myModules.backup = {
enable = true;