proxx/converter.go

231 lines
5.5 KiB
Go

package main
import (
"encoding/json"
"strings"
)
// ConvertOpenAIRequest converts an OpenAI ChatCompletionRequest to Anthropic format
func ConvertOpenAIRequest(req *ChatCompletionRequest) *AnthropicRequest {
system, remainingMessages := extractSystemMessage(req.Messages)
anthropicReq := &AnthropicRequest{
Model: req.Model,
Messages: convertMessages(remainingMessages),
System: system,
MaxTokens: 32000,
Thinking: &AnthropicThinking{
Type: "enabled",
BudgetTokens: 10000,
},
}
if req.MaxTokens != nil {
anthropicReq.MaxTokens = *req.MaxTokens
}
if req.Stream != nil {
anthropicReq.Stream = *req.Stream
}
if req.Temperature != nil {
anthropicReq.Temperature = req.Temperature
}
if req.TopP != nil {
anthropicReq.TopP = req.TopP
}
if len(req.Stop) > 0 {
anthropicReq.StopSequences = req.Stop
}
if len(req.Tools) > 0 {
anthropicReq.Tools = convertTools(req.Tools)
}
if req.ToolChoices != nil {
anthropicReq.ToolChoice = convertToolChoice(req.ToolChoices)
}
return anthropicReq
}
// extractSystemMessage pulls role="system" messages and joins them
func extractSystemMessage(messages []Message) (string, []Message) {
var systemParts []string
var rest []Message
for _, msg := range messages {
if msg.Role == "system" {
if content, ok := msg.Content.(string); ok {
systemParts = append(systemParts, content)
}
} else {
rest = append(rest, msg)
}
}
return strings.Join(systemParts, "\n"), rest
}
// convertMessages converts OpenAI messages to Anthropic content blocks
func convertMessages(messages []Message) []AnthropicMessage {
var result []AnthropicMessage
for _, msg := range messages {
anthropicMsg := AnthropicMessage{
Role: msg.Role,
}
switch content := msg.Content.(type) {
case string:
anthropicMsg.Content = content
case []interface{}:
var blocks []ContentBlock
for _, part := range content {
partMap, ok := part.(map[string]interface{})
if !ok {
continue
}
partType, _ := partMap["type"].(string)
if partType == "text" {
text, _ := partMap["text"].(string)
blocks = append(blocks, ContentBlock{Type: "text", Text: text})
}
// Image parts: skip for now, Anthropic uses different format
}
anthropicMsg.Content = blocks
}
result = append(result, anthropicMsg)
}
return result
}
// convertTools converts OpenAI function tools to Anthropic tool format
func convertTools(tools []Tool) []AnthropicTool {
var result []AnthropicTool
for _, tool := range tools {
anthropicTool := AnthropicTool{
Name: tool.Function.Name,
Description: tool.Function.Description,
InputSchema: tool.Function.Parameters,
}
result = append(result, anthropicTool)
}
return result
}
// convertToolChoice converts OpenAI tool_choice to Anthropic format
func convertToolChoice(tc *ToolChoice) *AnthropicToolChoice {
if tc == nil {
return nil
}
result := &AnthropicToolChoice{}
if tc.Type != "" {
if tc.Type == "required" {
result.Type = "any"
} else {
result.Type = tc.Type
}
}
if tc.Function != nil {
result.Name = tc.Function.Name
}
return result
}
// ConvertAnthropicResponse converts an Anthropic response to OpenAI format
func ConvertAnthropicResponse(resp *AnthropicResponse, model string) *ChatCompletionResponse {
response := &ChatCompletionResponse{
ID: resp.Id,
Object: "chat.completion",
Created: 1234567890,
Model: model,
Choices: make([]Choice, 0),
Usage: Usage{
PromptTokens: resp.Usage.InputTokens,
CompletionTokens: resp.Usage.OutputTokens,
TotalTokens: resp.Usage.InputTokens + resp.Usage.OutputTokens,
},
}
if len(resp.Content) == 0 {
response.Choices = append(response.Choices, Choice{
Index: 0,
Message: Message{Role: "assistant", Content: ""},
FinishReason: mapStopReason(resp.StopReason),
})
return response
}
// First pass: collect text and tool calls
var textContent string
var toolCalls []ToolCall
for _, block := range resp.Content {
if block.Type == "text" {
textContent += block.Text
} else if block.Type == "tool_use" {
// Serialize the input back to JSON
inputJSON, _ := json.Marshal(block.Input)
toolCalls = append(toolCalls, ToolCall{
ID: block.Id,
Type: "function",
Function: FunctionCall{
Name: block.Name,
Arguments: string(inputJSON),
},
})
}
}
if len(toolCalls) > 0 {
response.Choices = append(response.Choices, Choice{
Index: 0,
Message: Message{
Role: "assistant",
Content: textContent,
ToolCalls: toolCalls,
},
FinishReason: mapStopReason(resp.StopReason),
})
} else {
response.Choices = append(response.Choices, Choice{
Index: 0,
Message: Message{Role: "assistant", Content: textContent},
FinishReason: mapStopReason(resp.StopReason),
})
}
return response
}
// mapStopReason maps Anthropic stop reasons to OpenAI finish reasons
func mapStopReason(reason string) string {
switch reason {
case "end_turn":
return "stop"
case "tool_use":
return "tool_calls"
case "max_tokens":
return "length"
default:
return "stop"
}
}
// buildToolCalls builds OpenAI ToolCall slice from Anthropic ContentBlocks
func buildToolCalls(content []ContentBlock) []ToolCall {
var toolCalls []ToolCall
for _, block := range content {
if block.Type == "tool_use" {
inputJSON, _ := json.Marshal(block.Input)
toolCalls = append(toolCalls, ToolCall{
ID: block.Id,
Type: "function",
Function: FunctionCall{
Name: block.Name,
Arguments: string(inputJSON),
},
})
}
}
return toolCalls
}