feat: add gemini-cli to OpenClaw

This commit is contained in:
ashisgreat22 2026-03-06 23:12:55 +01:00
parent 509c824941
commit 74d98c0e52
2 changed files with 55 additions and 19 deletions

View file

@ -29,7 +29,12 @@
gateway = {
port = 18789;
bind = "loopback";
trustedProxies = [ "::1" "127.0.0.1" "10.88.0.0/16" "10.89.0.0/16" ];
trustedProxies = [
"::1"
"127.0.0.1"
"10.88.0.0/16"
"10.89.0.0/16"
];
auth = {
mode = "none";
};
@ -42,7 +47,10 @@
discord = {
enabled = true;
token = "/run/secrets/openclaw-discord-token";
allowFrom = [ "1178286690750693419" "*" ];
allowFrom = [
"1178286690750693419"
"*"
];
groupPolicy = "open";
dmPolicy = "open";
};
@ -71,7 +79,7 @@
mode = "merge";
providers.zai = {
baseUrl = "https://api.z.ai/api/coding/paas/v4";
apiKey = "e77f2c392cb942eca9d0407eebc75549.XG7ikxT2kBEQUPYx";
apiKey = config.sops.secrets."openclaw/glm_api_key".path;
models = [
{
id = "glm-4.7";
@ -89,6 +97,25 @@
}
];
};
providers.cli = {
api = "openai-completions";
baseUrl = "http://localhost:8045/cli/v1";
apiKey = config.sops.secrets."openclaw/master_api_key".path;
models = [
{
id = "gemini-3.1-pro-preview";
name = "gemini-3.1-pro-preview";
contextWindow = 1000000;
maxTokens = 65536;
}
{
id = "gemini-3-flash-preview";
name = "gemini-3-flash-preview";
contextWindow = 128000;
maxTokens = 65536;
}
];
};
};
skills.entries.mcporter.enabled = true;
};

View file

@ -1,4 +1,10 @@
{ config, lib, pkgs, inputs, ... }:
{
config,
lib,
pkgs,
inputs,
...
}:
with lib;
@ -68,9 +74,11 @@ in
key = "searxng_brave_api_key";
};
# Ensure secrets exist in sops config, if not user needs to add them.
# We assume secrets.yaml has these keys or user will map them.
# The user had /run/secrets/openclaw-discord-token before.
sops.secrets."openclaw/master_api_key" = {
owner = cfg.user;
group = cfg.group;
key = "master_api_key";
};
systemd.services.openclaw = {
description = "OpenClaw AI Agent";
@ -85,14 +93,6 @@ in
Restart = "always";
RestartSec = "10s";
# Environment variables or config file generation
# OpenClaw seems to take config via a file or env vars.
# Based on previous flake, it used a config file.
# We can generate the config file in the ExecStartPre or rely on env vars if supported.
# The previous flake copied a config file.
# Let's verify how openclaw takes config.
# It used OPENCLAW_CONFIG_DIR, OPENCLAW_DATA_DIR, OPENCLAW_WORKSPACE_DIR env vars.
};
environment = {
@ -175,6 +175,15 @@ in
{ "id": "glm-4.7", "name": "GLM 4.7", "reasoning": true, "contextWindow": 128000, "maxTokens": 128000 },
{ "id": "glm-5", "name": "GLM 5", "reasoning": true, "contextWindow": 128000, "maxTokens": 128000 }
]
},
"cli": {
"api": "openai-completions",
"baseUrl": "http://localhost:8045/cli/v1",
"apiKey": "$(cat ${config.sops.secrets."openclaw/master_api_key".path})",
"models": [
{ "id": "gemini-3.1-pro-preview", "name": "gemini-3.1-pro-preview", "contextWindow": 1000000, "maxTokens": 65536 },
{ "id": "gemini-3-flash-preview", "name": "gemini-3-flash-preview", "contextWindow": 128000, "maxTokens": 65536 }
]
}
}
},