Add temperature override option in config
- Add temperature field to Config struct (optional override)\n- Pass tempOverride to ConvertOpenAIRequest\n- Use override temperature if set, otherwise use client's temperature\n- Document option in config.yaml with example\n- Update README with temperature override documentation
This commit is contained in:
parent
cea246da83
commit
29292addac
5 changed files with 19 additions and 8 deletions
|
|
@ -28,6 +28,7 @@ upstream_url: "https://api.z.ai/api/anthropic"
|
|||
|
||||
- `port`: Port to listen on (default: 8080)
|
||||
- `upstream_url`: Base URL for the Anthropic-compatible upstream API
|
||||
- `temperature` (optional): Override temperature for all requests. If set, this value is used instead of client-specified temperatures. Remove this line to respect client temperatures.
|
||||
|
||||
## Building
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
port: 8080
|
||||
upstream_url: "https://api.z.ai/api/anthropic"
|
||||
|
||||
# Temperature override for all requests (optional)
|
||||
# If set, this temperature will be used instead of what clients request
|
||||
# Remove this line or set to null to use client-specified temperatures
|
||||
# temperature: 0.7
|
||||
|
||||
models:
|
||||
- id: "glm-4.7"
|
||||
owned_by: "zhipu"
|
||||
|
|
|
|||
|
|
@ -7,7 +7,8 @@ import (
|
|||
)
|
||||
|
||||
// ConvertOpenAIRequest converts an OpenAI ChatCompletionRequest to Anthropic format
|
||||
func ConvertOpenAIRequest(req *ChatCompletionRequest) *AnthropicRequest {
|
||||
// tempOverride, if provided, overrides any temperature from the request
|
||||
func ConvertOpenAIRequest(req *ChatCompletionRequest, tempOverride *float64) *AnthropicRequest {
|
||||
system, remainingMessages := extractSystemMessage(req.Messages)
|
||||
|
||||
anthropicReq := &AnthropicRequest{
|
||||
|
|
@ -27,7 +28,10 @@ func ConvertOpenAIRequest(req *ChatCompletionRequest) *AnthropicRequest {
|
|||
if req.Stream != nil {
|
||||
anthropicReq.Stream = *req.Stream
|
||||
}
|
||||
if req.Temperature != nil {
|
||||
// Use temperature override if configured, otherwise use request temperature
|
||||
if tempOverride != nil {
|
||||
anthropicReq.Temperature = tempOverride
|
||||
} else if req.Temperature != nil {
|
||||
anthropicReq.Temperature = req.Temperature
|
||||
}
|
||||
if req.TopP != nil {
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ type Config struct {
|
|||
Port int `yaml:"port"`
|
||||
UpstreamURL string `yaml:"upstream_url"`
|
||||
Models []ModelConfig `yaml:"models"`
|
||||
Temperature *float64 `yaml:"temperature,omitempty"`
|
||||
}
|
||||
|
||||
var config *Config
|
||||
|
|
@ -124,7 +125,7 @@ func handleChatCompletions(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
// Convert to Anthropic format — always non-streaming to upstream
|
||||
// (ZAI's streaming returns empty for GLM models)
|
||||
anthropicReq := ConvertOpenAIRequest(&req)
|
||||
anthropicReq := ConvertOpenAIRequest(&req, config.Temperature)
|
||||
anthropicReq.Stream = false
|
||||
|
||||
reqBody, _ := json.Marshal(anthropicReq)
|
||||
|
|
|
|||
BIN
proxx
BIN
proxx
Binary file not shown.
Loading…
Add table
Add a link
Reference in a new issue