feat: add LM Studio headless LMS server module
- Custom NixOS module wrapping the lms CLI (daemon + server) - Supports auto-loading models on startup - Optional nginx reverse proxy via domain option - Security hardening (NoNewPrivileges, ProtectSystem, PrivateTmp) - Follows existing myModules.* convention - Enabled on this host on port 1234 (no models loaded yet)
This commit is contained in:
parent
10d8924106
commit
9f82a7b00e
3 changed files with 185 additions and 0 deletions
|
|
@ -215,6 +215,14 @@
|
|||
ollamaUrl = "http://100.64.0.1:11434";
|
||||
};
|
||||
|
||||
# === LM Studio Headless LMS Server ===
|
||||
myModules.lmstudio-server = {
|
||||
enable = true;
|
||||
port = 1234;
|
||||
# domain = "llm.ashisgreat.xyz"; # Uncomment to add nginx reverse proxy
|
||||
# models = [ "openai/gpt-oss-20b" ]; # Uncomment to auto-load models
|
||||
};
|
||||
|
||||
# === Backups (Restic + B2) ===
|
||||
myModules.backup = {
|
||||
enable = true;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue