parallel optimization groundwork

This commit is contained in:
LPF
2026-05-10 17:27:06 +08:00
parent ce2263ac8c
commit 7b07bb270b
37 changed files with 6896 additions and 3481 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,368 @@
package providers
import (
"encoding/json"
"fmt"
"regexp"
"strings"
)
func parseOpenAICompatResponse(body []byte) (*LLMResponse, error) {
var payload struct {
Choices []struct {
Message struct {
Content string `json:"content"`
ToolCalls []struct {
ID string `json:"id"`
Type string `json:"type"`
Function struct {
Name string `json:"name"`
Arguments string `json:"arguments"`
} `json:"function"`
} `json:"tool_calls"`
} `json:"message"`
FinishReason string `json:"finish_reason"`
} `json:"choices"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage"`
}
if err := json.Unmarshal(body, &payload); err != nil {
return nil, err
}
if len(payload.Choices) == 0 {
return &LLMResponse{}, nil
}
choice := payload.Choices[0]
resp := &LLMResponse{
Content: choice.Message.Content,
FinishReason: choice.FinishReason,
}
if payload.Usage.TotalTokens > 0 || payload.Usage.PromptTokens > 0 || payload.Usage.CompletionTokens > 0 {
resp.Usage = &UsageInfo{
PromptTokens: payload.Usage.PromptTokens,
CompletionTokens: payload.Usage.CompletionTokens,
TotalTokens: payload.Usage.TotalTokens,
}
}
if len(choice.Message.ToolCalls) > 0 {
resp.ToolCalls = make([]ToolCall, 0, len(choice.Message.ToolCalls))
for _, tc := range choice.Message.ToolCalls {
resp.ToolCalls = append(resp.ToolCalls, ToolCall{
ID: tc.ID,
Type: tc.Type,
Function: &FunctionCall{
Name: tc.Function.Name,
Arguments: tc.Function.Arguments,
},
Name: tc.Function.Name,
})
}
}
return resp, nil
}
func (p *HTTPProvider) useCodexCompat() bool {
if p == nil || p.oauth == nil {
return false
}
if !strings.EqualFold(strings.TrimSpace(p.oauth.cfg.Provider), defaultCodexOAuthProvider) {
return false
}
base := strings.ToLower(strings.TrimSpace(p.apiBase))
if base == "" {
return true
}
return strings.Contains(base, "api.openai.com") || strings.Contains(base, "chatgpt.com/backend-api/codex")
}
func (p *HTTPProvider) codexCompatBase() string {
if p == nil {
return codexCompatBaseURL
}
base := strings.ToLower(strings.TrimSpace(p.apiBase))
if strings.Contains(base, "chatgpt.com/backend-api/codex") {
return normalizeAPIBase(p.apiBase)
}
if base != "" && !strings.Contains(base, "api.openai.com") {
return normalizeAPIBase(p.apiBase)
}
return codexCompatBaseURL
}
func (p *HTTPProvider) codexCompatRequestBody(requestBody map[string]interface{}) map[string]interface{} {
return codexCompatRequestBody(requestBody)
}
func (p *HTTPProvider) oauthProvider() string {
if p == nil || p.oauth == nil {
return ""
}
return strings.ToLower(strings.TrimSpace(p.oauth.cfg.Provider))
}
func (p *HTTPProvider) useOpenAICompatChatUpstream() bool {
switch p.oauthProvider() {
case defaultQwenOAuthProvider, defaultKimiOAuthProvider:
return true
default:
return false
}
}
func (p *HTTPProvider) compatBase() string {
switch p.oauthProvider() {
case defaultQwenOAuthProvider:
if strings.TrimSpace(p.apiBase) != "" && !strings.Contains(strings.ToLower(p.apiBase), "api.openai.com") {
return normalizeAPIBase(p.apiBase)
}
return qwenCompatBaseURL
case defaultKimiOAuthProvider:
if strings.TrimSpace(p.apiBase) != "" && !strings.Contains(strings.ToLower(p.apiBase), "api.openai.com") {
return normalizeAPIBase(p.apiBase)
}
return kimiCompatBaseURL
default:
return normalizeAPIBase(p.apiBase)
}
}
func (p *HTTPProvider) compatModel(model string) string {
trimmed := strings.TrimSpace(qwenBaseModel(model))
if p.oauthProvider() == defaultKimiOAuthProvider && strings.HasPrefix(strings.ToLower(trimmed), "kimi-") {
return trimmed[5:]
}
return trimmed
}
func (p *HTTPProvider) buildOpenAICompatChatRequest(messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) map[string]interface{} {
requestBody := map[string]interface{}{
"model": p.compatModel(model),
"messages": openAICompatMessages(messages),
}
if suffix := qwenModelSuffix(model); suffix != "" {
applyOpenAICompatThinkingSuffix(requestBody, suffix)
}
if len(tools) > 0 {
requestBody["tools"] = openAICompatTools(tools)
requestBody["tool_choice"] = "auto"
if tc, ok := rawOption(options, "tool_choice"); ok {
requestBody["tool_choice"] = tc
}
}
if maxTokens, ok := int64FromOption(options, "max_tokens"); ok {
requestBody["max_tokens"] = maxTokens
}
if temperature, ok := float64FromOption(options, "temperature"); ok {
requestBody["temperature"] = temperature
}
return requestBody
}
func openAICompatMessages(messages []Message) []map[string]interface{} {
out := make([]map[string]interface{}, 0, len(messages))
for _, msg := range messages {
role := strings.ToLower(strings.TrimSpace(msg.Role))
content := openAICompatMessageContent(msg)
switch role {
case "system":
out = append(out, map[string]interface{}{"role": "system", "content": content})
case "developer":
out = append(out, map[string]interface{}{"role": "user", "content": content})
case "assistant":
item := map[string]interface{}{"role": "assistant", "content": content}
if len(msg.ToolCalls) > 0 {
toolCalls := make([]map[string]interface{}, 0, len(msg.ToolCalls))
for _, tc := range msg.ToolCalls {
args := ""
if tc.Function != nil {
args = tc.Function.Arguments
}
if args == "" {
raw, _ := json.Marshal(tc.Arguments)
args = string(raw)
}
name := tc.Name
if tc.Function != nil && strings.TrimSpace(tc.Function.Name) != "" {
name = tc.Function.Name
}
toolCalls = append(toolCalls, map[string]interface{}{
"id": tc.ID,
"type": "function",
"function": map[string]interface{}{
"name": name,
"arguments": args,
},
})
}
item["tool_calls"] = toolCalls
}
out = append(out, item)
case "tool":
out = append(out, map[string]interface{}{
"role": "tool",
"tool_call_id": msg.ToolCallID,
"content": content,
})
default:
out = append(out, map[string]interface{}{"role": "user", "content": content})
}
}
return out
}
func openAICompatMessageContent(msg Message) interface{} {
if len(msg.ContentParts) == 0 {
return msg.Content
}
parts := make([]map[string]interface{}, 0, len(msg.ContentParts))
for _, part := range msg.ContentParts {
switch strings.ToLower(strings.TrimSpace(part.Type)) {
case "text", "input_text":
if strings.TrimSpace(part.Text) == "" {
continue
}
parts = append(parts, map[string]interface{}{
"type": "text",
"text": part.Text,
})
case "input_image", "image_url":
imageURL := strings.TrimSpace(part.ImageURL)
if imageURL == "" {
continue
}
payload := map[string]interface{}{
"type": "image_url",
"image_url": map[string]interface{}{
"url": imageURL,
},
}
if detail := strings.TrimSpace(part.Detail); detail != "" {
payload["image_url"].(map[string]interface{})["detail"] = detail
}
parts = append(parts, payload)
default:
if strings.TrimSpace(part.Text) == "" {
continue
}
parts = append(parts, map[string]interface{}{
"type": "text",
"text": part.Text,
})
}
}
if len(parts) == 0 {
return msg.Content
}
if len(parts) == 1 && parts[0]["type"] == "text" && len(msg.ToolCalls) == 0 {
if text, _ := parts[0]["text"].(string); text != "" {
return text
}
}
return parts
}
func openAICompatTools(tools []ToolDefinition) []map[string]interface{} {
out := make([]map[string]interface{}, 0, len(tools))
for _, tool := range tools {
out = append(out, map[string]interface{}{
"type": "function",
"function": map[string]interface{}{
"name": tool.Function.Name,
"description": tool.Function.Description,
"parameters": tool.Function.Parameters,
},
})
}
return out
}
func codexCompatRequestBody(requestBody map[string]interface{}) map[string]interface{} {
if requestBody == nil {
requestBody = map[string]interface{}{}
}
requestBody["stream"] = true
requestBody["store"] = false
requestBody["parallel_tool_calls"] = true
if _, ok := requestBody["include"]; !ok {
requestBody["include"] = []string{"reasoning.encrypted_content"}
}
delete(requestBody, "max_output_tokens")
delete(requestBody, "max_completion_tokens")
delete(requestBody, "temperature")
delete(requestBody, "top_p")
delete(requestBody, "truncation")
delete(requestBody, "user")
if input, ok := requestBody["input"].([]map[string]interface{}); ok {
for _, item := range input {
if strings.EqualFold(strings.TrimSpace(fmt.Sprintf("%v", item["role"])), "system") {
item["role"] = "developer"
}
}
requestBody["input"] = input
}
return requestBody
}
func parseCompatFunctionCalls(content string) ([]ToolCall, string) {
if strings.TrimSpace(content) == "" || !strings.Contains(content, "<function_call>") {
return nil, content
}
blockRe := regexp.MustCompile(`(?is)<function_call>\s*(.*?)\s*</function_call>`)
blocks := blockRe.FindAllStringSubmatch(content, -1)
if len(blocks) == 0 {
return nil, content
}
toolCalls := make([]ToolCall, 0, len(blocks))
for i, block := range blocks {
raw := block[1]
invoke := extractTag(raw, "invoke")
if invoke != "" {
raw = invoke
}
name := extractTag(raw, "toolname")
if strings.TrimSpace(name) == "" {
name = extractTag(raw, "tool_name")
}
name = strings.TrimSpace(name)
if name == "" {
continue
}
args := map[string]interface{}{}
paramsRaw := strings.TrimSpace(extractTag(raw, "parameters"))
if paramsRaw != "" {
if strings.HasPrefix(paramsRaw, "{") && strings.HasSuffix(paramsRaw, "}") {
_ = json.Unmarshal([]byte(paramsRaw), &args)
}
if len(args) == 0 {
paramTagRe := regexp.MustCompile(`(?is)<([a-zA-Z0-9_:-]+)>\s*(.*?)\s*</([a-zA-Z0-9_:-]+)>`)
matches := paramTagRe.FindAllStringSubmatch(paramsRaw, -1)
for _, m := range matches {
if len(m) < 4 || !strings.EqualFold(strings.TrimSpace(m[1]), strings.TrimSpace(m[3])) {
continue
}
k := strings.TrimSpace(m[1])
v := strings.TrimSpace(m[2])
if k == "" || v == "" {
continue
}
args[k] = v
}
}
}
toolCalls = append(toolCalls, ToolCall{ID: fmt.Sprintf("compat_call_%d", i+1), Name: name, Arguments: args})
}
cleaned := strings.TrimSpace(blockRe.ReplaceAllString(content, ""))
return toolCalls, cleaned
}
func extractTag(src string, tag string) string {
re := regexp.MustCompile(fmt.Sprintf(`(?is)<%s>\s*(.*?)\s*</%s>`, regexp.QuoteMeta(tag), regexp.QuoteMeta(tag)))
m := re.FindStringSubmatch(src)
if len(m) < 2 {
return ""
}
return strings.TrimSpace(m[1])
}

View File

@@ -0,0 +1,139 @@
package providers
import (
"fmt"
"github.com/YspCoder/clawgo/pkg/config"
"strings"
"time"
)
func normalizeProviderRouteName(name string) string {
switch strings.ToLower(strings.TrimSpace(name)) {
case "geminicli", "gemini_cli":
return "gemini-cli"
case "aistudio", "ai-studio", "ai_studio", "google-ai-studio", "google_ai_studio", "googleaistudio":
return "aistudio"
case "google", "gemini-api-key", "gemini_api_key":
return "gemini"
case "anthropic", "claude-code", "claude_code", "claude-api-key", "claude_api_key":
return "claude"
case "openai-compatibility", "openai_compatibility", "openai-compat", "openai_compat":
return "openai-compatibility"
case "vertex-api-key", "vertex_api_key", "vertex-compat", "vertex_compat", "vertex-compatibility", "vertex_compatibility":
return "vertex"
case "codex-api-key", "codex_api_key":
return "codex"
case "i-flow", "i_flow":
return "iflow"
default:
return strings.TrimSpace(name)
}
}
func CreateProvider(cfg *config.Config) (LLMProvider, error) {
name := config.PrimaryProviderName(cfg)
provider, err := CreateProviderByName(cfg, name)
if err != nil {
return nil, err
}
_, model := config.ParseProviderModelRef(cfg.Agents.Defaults.Model.Primary)
if hp, ok := provider.(*HTTPProvider); ok && strings.TrimSpace(model) != "" {
hp.defaultModel = strings.TrimSpace(model)
}
return provider, nil
}
func CreateProviderByName(cfg *config.Config, name string) (LLMProvider, error) {
routeName := normalizeProviderRouteName(name)
pc, err := getProviderConfigByName(cfg, routeName)
if err != nil {
return nil, err
}
ConfigureProviderRuntime(routeName, pc)
oauthProvider := normalizeOAuthProvider(pc.OAuth.Provider)
if pc.APIBase == "" &&
oauthProvider != defaultAntigravityOAuthProvider &&
oauthProvider != defaultGeminiOAuthProvider &&
oauthProvider != "aistudio" &&
oauthProvider != defaultCodexOAuthProvider &&
oauthProvider != defaultClaudeOAuthProvider &&
oauthProvider != defaultQwenOAuthProvider &&
oauthProvider != defaultKimiOAuthProvider &&
oauthProvider != defaultIFlowOAuthProvider &&
!strings.EqualFold(routeName, "gemini-cli") &&
!strings.EqualFold(routeName, "aistudio") &&
!strings.EqualFold(routeName, "vertex") &&
!strings.EqualFold(routeName, defaultAntigravityOAuthProvider) &&
!strings.EqualFold(routeName, defaultGeminiOAuthProvider) &&
!strings.EqualFold(routeName, defaultCodexOAuthProvider) &&
!strings.EqualFold(routeName, defaultClaudeOAuthProvider) &&
!strings.EqualFold(routeName, defaultQwenOAuthProvider) &&
!strings.EqualFold(routeName, defaultKimiOAuthProvider) &&
!strings.EqualFold(routeName, defaultIFlowOAuthProvider) {
return nil, fmt.Errorf("no API base configured for provider %q", name)
}
if pc.TimeoutSec <= 0 {
return nil, fmt.Errorf("invalid timeout_sec for provider %q: %d", name, pc.TimeoutSec)
}
defaultModel := ""
if len(pc.Models) > 0 {
defaultModel = pc.Models[0]
}
var oauth *oauthManager
if strings.EqualFold(strings.TrimSpace(pc.Auth), "oauth") || strings.EqualFold(strings.TrimSpace(pc.Auth), "hybrid") {
oauth, err = newOAuthManager(pc, time.Duration(pc.TimeoutSec)*time.Second)
if err != nil {
return nil, err
}
}
if oauthProvider == defaultAntigravityOAuthProvider || strings.EqualFold(routeName, defaultAntigravityOAuthProvider) {
return NewAntigravityProvider(routeName, pc.APIKey, pc.APIBase, defaultModel, pc.SupportsResponsesCompact, pc.Auth, time.Duration(pc.TimeoutSec)*time.Second, oauth), nil
}
if oauthProvider == "aistudio" || strings.EqualFold(routeName, "aistudio") {
return NewAistudioProvider(routeName, pc.APIKey, pc.APIBase, defaultModel, pc.SupportsResponsesCompact, pc.Auth, time.Duration(pc.TimeoutSec)*time.Second, oauth), nil
}
if strings.EqualFold(routeName, "gemini-cli") {
return NewGeminiCLIProvider(routeName, pc.APIKey, pc.APIBase, defaultModel, pc.SupportsResponsesCompact, pc.Auth, time.Duration(pc.TimeoutSec)*time.Second, oauth), nil
}
if oauthProvider == defaultGeminiOAuthProvider || strings.EqualFold(routeName, defaultGeminiOAuthProvider) || strings.EqualFold(routeName, "aistudio") {
return NewGeminiProvider(routeName, pc.APIKey, pc.APIBase, defaultModel, pc.SupportsResponsesCompact, pc.Auth, time.Duration(pc.TimeoutSec)*time.Second, oauth), nil
}
if strings.EqualFold(routeName, "vertex") {
return NewVertexProvider(routeName, pc.APIKey, pc.APIBase, defaultModel, pc.SupportsResponsesCompact, pc.Auth, time.Duration(pc.TimeoutSec)*time.Second, oauth), nil
}
if oauthProvider == defaultCodexOAuthProvider || strings.EqualFold(routeName, defaultCodexOAuthProvider) {
return NewCodexProvider(routeName, pc.APIKey, pc.APIBase, defaultModel, pc.SupportsResponsesCompact, pc.Auth, time.Duration(pc.TimeoutSec)*time.Second, oauth), nil
}
if oauthProvider == defaultClaudeOAuthProvider || strings.EqualFold(routeName, defaultClaudeOAuthProvider) {
return NewClaudeProvider(routeName, pc.APIKey, pc.APIBase, defaultModel, pc.SupportsResponsesCompact, pc.Auth, time.Duration(pc.TimeoutSec)*time.Second, oauth), nil
}
if oauthProvider == defaultQwenOAuthProvider || strings.EqualFold(routeName, defaultQwenOAuthProvider) {
return NewQwenProvider(routeName, pc.APIKey, pc.APIBase, defaultModel, pc.SupportsResponsesCompact, pc.Auth, time.Duration(pc.TimeoutSec)*time.Second, oauth), nil
}
if oauthProvider == defaultKimiOAuthProvider || strings.EqualFold(routeName, defaultKimiOAuthProvider) {
return NewKimiProvider(routeName, pc.APIKey, pc.APIBase, defaultModel, pc.SupportsResponsesCompact, pc.Auth, time.Duration(pc.TimeoutSec)*time.Second, oauth), nil
}
if oauthProvider == defaultIFlowOAuthProvider || strings.EqualFold(routeName, defaultIFlowOAuthProvider) {
return NewIFlowProvider(routeName, pc.APIKey, pc.APIBase, defaultModel, pc.SupportsResponsesCompact, pc.Auth, time.Duration(pc.TimeoutSec)*time.Second, oauth), nil
}
return NewHTTPProvider(routeName, pc.APIKey, pc.APIBase, defaultModel, pc.SupportsResponsesCompact, pc.Auth, time.Duration(pc.TimeoutSec)*time.Second, oauth), nil
}
func ProviderSupportsResponsesCompact(cfg *config.Config, name string) bool {
pc, err := getProviderConfigByName(cfg, name)
if err != nil {
return false
}
return pc.SupportsResponsesCompact
}
func getAllProviderConfigs(cfg *config.Config) map[string]config.ProviderConfig {
return config.AllProviderConfigs(cfg)
}
func getProviderConfigByName(cfg *config.Config, name string) (config.ProviderConfig, error) {
if pc, ok := config.ProviderConfigByName(cfg, name); ok {
return pc, nil
}
return config.ProviderConfig{}, fmt.Errorf("provider %q not found", strings.TrimSpace(name))
}

View File

@@ -0,0 +1,171 @@
package providers
import (
"fmt"
"net/url"
"strings"
)
func rawOption(options map[string]interface{}, key string) (interface{}, bool) {
if options == nil {
return nil, false
}
v, ok := options[key]
if !ok || v == nil {
return nil, false
}
return v, true
}
func stringOption(options map[string]interface{}, key string) (string, bool) {
v, ok := rawOption(options, key)
if !ok {
return "", false
}
s, ok := v.(string)
if !ok {
return "", false
}
return strings.TrimSpace(s), true
}
func mapOption(options map[string]interface{}, key string) (map[string]interface{}, bool) {
v, ok := rawOption(options, key)
if !ok {
return nil, false
}
m, ok := v.(map[string]interface{})
return m, ok
}
func stringSliceOption(options map[string]interface{}, key string) ([]string, bool) {
v, ok := rawOption(options, key)
if !ok {
return nil, false
}
switch t := v.(type) {
case []string:
out := make([]string, 0, len(t))
for _, item := range t {
if s := strings.TrimSpace(item); s != "" {
out = append(out, s)
}
}
return out, true
case []interface{}:
out := make([]string, 0, len(t))
for _, item := range t {
s := strings.TrimSpace(fmt.Sprintf("%v", item))
if s != "" {
out = append(out, s)
}
}
return out, true
}
return nil, false
}
func mapSliceOption(options map[string]interface{}, key string) ([]map[string]interface{}, bool) {
v, ok := rawOption(options, key)
if !ok {
return nil, false
}
switch t := v.(type) {
case []map[string]interface{}:
return t, true
case []interface{}:
out := make([]map[string]interface{}, 0, len(t))
for _, item := range t {
m, ok := item.(map[string]interface{})
if ok {
out = append(out, m)
}
}
return out, true
}
return nil, false
}
func previewResponseBody(body []byte) string {
preview := strings.TrimSpace(string(body))
preview = strings.ReplaceAll(preview, "\n", " ")
preview = strings.ReplaceAll(preview, "\r", " ")
if preview == "" {
return "<empty body>"
}
const maxLen = 600
if len(preview) > maxLen {
return preview[:maxLen] + "..."
}
return preview
}
func int64FromOption(options map[string]interface{}, key string) (int64, bool) {
if options == nil {
return 0, false
}
v, ok := options[key]
if !ok {
return 0, false
}
switch t := v.(type) {
case int:
return int64(t), true
case int64:
return t, true
case float64:
return int64(t), true
default:
return 0, false
}
}
func float64FromOption(options map[string]interface{}, key string) (float64, bool) {
if options == nil {
return 0, false
}
v, ok := options[key]
if !ok {
return 0, false
}
switch t := v.(type) {
case float32:
return float64(t), true
case float64:
return t, true
case int:
return float64(t), true
default:
return 0, false
}
}
func normalizeAPIBase(raw string) string {
trimmed := strings.TrimSpace(raw)
if trimmed == "" {
return ""
}
u, err := url.Parse(trimmed)
if err != nil {
return strings.TrimRight(trimmed, "/")
}
u.Path = strings.TrimRight(u.Path, "/")
return strings.TrimRight(u.String(), "/")
}
func endpointFor(base, relative string) string {
b := strings.TrimRight(strings.TrimSpace(base), "/")
if b == "" {
return relative
}
if strings.HasSuffix(b, relative) {
return b
}
if relative == "/responses/compact" && strings.HasSuffix(b, "/responses") {
return b + "/compact"
}
if relative == "/responses" && strings.HasSuffix(b, "/responses/compact") {
return strings.TrimSuffix(b, "/compact")
}
return b + relative
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,546 @@
package providers
import (
"context"
"encoding/json"
"fmt"
"net/http"
"strings"
)
func (p *HTTPProvider) callResponses(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) ([]byte, int, string, error) {
input := make([]map[string]interface{}, 0, len(messages))
pendingCalls := map[string]struct{}{}
for _, msg := range messages {
input = append(input, toResponsesInputItemsWithState(msg, pendingCalls)...)
}
requestBody := map[string]interface{}{
"model": model,
"input": input,
}
responseTools := buildResponsesTools(tools, options)
if len(responseTools) > 0 {
requestBody["tools"] = responseTools
requestBody["tool_choice"] = "auto"
if tc, ok := rawOption(options, "tool_choice"); ok {
requestBody["tool_choice"] = tc
}
if tc, ok := rawOption(options, "responses_tool_choice"); ok {
requestBody["tool_choice"] = tc
}
}
if maxTokens, ok := int64FromOption(options, "max_tokens"); ok {
requestBody["max_output_tokens"] = maxTokens
}
if temperature, ok := float64FromOption(options, "temperature"); ok {
requestBody["temperature"] = temperature
}
if include, ok := stringSliceOption(options, "responses_include"); ok && len(include) > 0 {
requestBody["include"] = include
}
if metadata, ok := mapOption(options, "responses_metadata"); ok && len(metadata) > 0 {
requestBody["metadata"] = metadata
}
if prevID, ok := stringOption(options, "responses_previous_response_id"); ok && prevID != "" {
requestBody["previous_response_id"] = prevID
}
if p.useOpenAICompatChatUpstream() {
chatBody := p.buildOpenAICompatChatRequest(messages, tools, model, options)
return p.postJSON(ctx, endpointFor(p.compatBase(), "/chat/completions"), chatBody)
}
if p.useCodexCompat() {
requestBody = p.codexCompatRequestBody(requestBody)
return p.postJSONStream(ctx, endpointFor(p.codexCompatBase(), "/responses"), requestBody, nil)
}
return p.postJSON(ctx, endpointFor(p.apiBase, "/responses"), requestBody)
}
func toResponsesInputItemsWithState(msg Message, pendingCalls map[string]struct{}) []map[string]interface{} {
role := strings.ToLower(strings.TrimSpace(msg.Role))
switch role {
case "system", "developer", "user":
if content := responsesMessageContent(msg); len(content) > 0 {
return []map[string]interface{}{{
"type": "message",
"role": role,
"content": content,
}}
}
return []map[string]interface{}{responsesMessageItem(role, msg.Content, "input_text")}
case "assistant":
items := make([]map[string]interface{}, 0, 1+len(msg.ToolCalls))
if msg.Content != "" || len(msg.ToolCalls) == 0 {
items = append(items, responsesMessageItem(role, msg.Content, "output_text"))
}
for _, tc := range msg.ToolCalls {
callID := tc.ID
if callID == "" {
continue
}
name := tc.Name
argsRaw := ""
if tc.Function != nil {
if tc.Function.Name != "" {
name = tc.Function.Name
}
argsRaw = tc.Function.Arguments
}
if name == "" {
continue
}
if argsRaw == "" {
argsJSON, err := json.Marshal(tc.Arguments)
if err != nil {
argsRaw = "{}"
} else {
argsRaw = string(argsJSON)
}
}
if pendingCalls != nil {
pendingCalls[callID] = struct{}{}
}
items = append(items, map[string]interface{}{
"type": "function_call",
"call_id": callID,
"name": name,
"arguments": argsRaw,
})
}
if len(items) == 0 {
return []map[string]interface{}{responsesMessageItem(role, msg.Content, "output_text")}
}
return items
case "tool":
callID := msg.ToolCallID
if callID == "" {
return nil
}
if pendingCalls != nil {
if _, ok := pendingCalls[callID]; !ok {
// Strict pairing: drop orphan/duplicate tool outputs instead of degrading role.
return nil
}
delete(pendingCalls, callID)
}
return []map[string]interface{}{map[string]interface{}{
"type": "function_call_output",
"call_id": callID,
"output": msg.Content,
}}
default:
return []map[string]interface{}{responsesMessageItem("user", msg.Content, "input_text")}
}
}
func responsesMessageContent(msg Message) []map[string]interface{} {
content := make([]map[string]interface{}, 0, len(msg.ContentParts))
for _, part := range msg.ContentParts {
switch strings.ToLower(strings.TrimSpace(part.Type)) {
case "input_text", "text":
if part.Text == "" {
continue
}
content = append(content, map[string]interface{}{
"type": "input_text",
"text": part.Text,
})
case "input_image", "image":
entry := map[string]interface{}{
"type": "input_image",
}
if part.ImageURL != "" {
entry["image_url"] = part.ImageURL
}
if part.FileID != "" {
entry["file_id"] = part.FileID
}
if detail := strings.TrimSpace(part.Detail); detail != "" {
entry["detail"] = detail
}
if _, ok := entry["image_url"]; !ok {
if _, ok := entry["file_id"]; !ok {
continue
}
}
content = append(content, entry)
case "input_file", "file":
entry := map[string]interface{}{
"type": "input_file",
}
if part.FileData != "" {
entry["file_data"] = part.FileData
}
if part.FileID != "" {
entry["file_id"] = part.FileID
}
if part.FileURL != "" {
entry["file_url"] = part.FileURL
}
if part.Filename != "" {
entry["filename"] = part.Filename
}
if _, ok := entry["file_data"]; !ok {
if _, ok := entry["file_id"]; !ok {
if _, ok := entry["file_url"]; !ok {
continue
}
}
}
content = append(content, entry)
}
}
return content
}
func buildResponsesTools(tools []ToolDefinition, options map[string]interface{}) []map[string]interface{} {
responseTools := make([]map[string]interface{}, 0, len(tools)+2)
for _, t := range tools {
typ := strings.ToLower(strings.TrimSpace(t.Type))
if typ == "" {
typ = "function"
}
if typ == "function" {
name := strings.TrimSpace(t.Function.Name)
if name == "" {
name = strings.TrimSpace(t.Name)
}
if name == "" {
continue
}
entry := map[string]interface{}{
"type": "function",
"name": name,
"parameters": map[string]interface{}{},
}
if t.Function.Parameters != nil {
entry["parameters"] = t.Function.Parameters
} else if t.Parameters != nil {
entry["parameters"] = t.Parameters
}
desc := strings.TrimSpace(t.Function.Description)
if desc == "" {
desc = strings.TrimSpace(t.Description)
}
if desc != "" {
entry["description"] = desc
}
if t.Function.Strict != nil {
entry["strict"] = *t.Function.Strict
} else if t.Strict != nil {
entry["strict"] = *t.Strict
}
responseTools = append(responseTools, entry)
continue
}
// Built-in tool types (web_search, file_search, code_interpreter, etc.).
entry := map[string]interface{}{
"type": typ,
}
if name := strings.TrimSpace(t.Name); name != "" {
entry["name"] = name
}
if desc := strings.TrimSpace(t.Description); desc != "" {
entry["description"] = desc
}
if t.Strict != nil {
entry["strict"] = *t.Strict
}
for k, v := range t.Parameters {
entry[k] = v
}
responseTools = append(responseTools, entry)
}
if extraTools, ok := mapSliceOption(options, "responses_tools"); ok {
responseTools = append(responseTools, extraTools...)
}
return responseTools
}
func responsesMessageItem(role, text, contentType string) map[string]interface{} {
ct := contentType
if ct == "" {
ct = "input_text"
}
return map[string]interface{}{
"type": "message",
"role": role,
"content": []map[string]interface{}{
{
"type": ct,
"text": text,
},
},
}
}
func (p *HTTPProvider) callResponsesStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}, onDelta func(string)) ([]byte, int, string, error) {
input := make([]map[string]interface{}, 0, len(messages))
pendingCalls := map[string]struct{}{}
for _, msg := range messages {
input = append(input, toResponsesInputItemsWithState(msg, pendingCalls)...)
}
requestBody := map[string]interface{}{
"model": model,
"input": input,
"stream": true,
}
responseTools := buildResponsesTools(tools, options)
if len(responseTools) > 0 {
requestBody["tools"] = responseTools
requestBody["tool_choice"] = "auto"
if tc, ok := rawOption(options, "tool_choice"); ok {
requestBody["tool_choice"] = tc
}
if tc, ok := rawOption(options, "responses_tool_choice"); ok {
requestBody["tool_choice"] = tc
}
}
if maxTokens, ok := int64FromOption(options, "max_tokens"); ok {
requestBody["max_output_tokens"] = maxTokens
}
if temperature, ok := float64FromOption(options, "temperature"); ok {
requestBody["temperature"] = temperature
}
if include, ok := stringSliceOption(options, "responses_include"); ok && len(include) > 0 {
requestBody["include"] = include
}
if streamOpts, ok := mapOption(options, "responses_stream_options"); ok && len(streamOpts) > 0 {
requestBody["stream_options"] = streamOpts
}
if p.useOpenAICompatChatUpstream() {
chatBody := p.buildOpenAICompatChatRequest(messages, tools, model, options)
chatBody["stream"] = true
streamOptions := map[string]interface{}{"include_usage": true}
chatBody["stream_options"] = streamOptions
return p.postJSONStream(ctx, endpointFor(p.compatBase(), "/chat/completions"), chatBody, func(event string) {
var obj map[string]interface{}
if err := json.Unmarshal([]byte(event), &obj); err != nil {
return
}
choices, _ := obj["choices"].([]interface{})
for _, choice := range choices {
item, _ := choice.(map[string]interface{})
delta, _ := item["delta"].(map[string]interface{})
if txt := strings.TrimSpace(fmt.Sprintf("%v", delta["content"])); txt != "" {
onDelta(txt)
}
}
})
}
if p.useCodexCompat() {
requestBody = p.codexCompatRequestBody(requestBody)
return p.postJSONStream(ctx, endpointFor(p.codexCompatBase(), "/responses"), requestBody, func(event string) {
var obj map[string]interface{}
if err := json.Unmarshal([]byte(event), &obj); err != nil {
return
}
if d := strings.TrimSpace(fmt.Sprintf("%v", obj["delta"])); d != "" {
onDelta(d)
return
}
if delta, ok := obj["delta"].(map[string]interface{}); ok {
if txt := strings.TrimSpace(fmt.Sprintf("%v", delta["text"])); txt != "" {
onDelta(txt)
}
}
})
}
return p.postJSONStream(ctx, endpointFor(p.apiBase, "/responses"), requestBody, func(event string) {
var obj map[string]interface{}
if err := json.Unmarshal([]byte(event), &obj); err != nil {
return
}
typ := strings.TrimSpace(fmt.Sprintf("%v", obj["type"]))
if typ == "response.output_text.delta" {
if d := strings.TrimSpace(fmt.Sprintf("%v", obj["delta"])); d != "" {
onDelta(d)
}
return
}
if delta, ok := obj["delta"].(map[string]interface{}); ok {
if txt := strings.TrimSpace(fmt.Sprintf("%v", delta["text"])); txt != "" {
onDelta(txt)
}
}
})
}
func parseResponsesAPIResponse(body []byte) (*LLMResponse, error) {
var resp struct {
Status string `json:"status"`
Output []struct {
ID string `json:"id"`
Type string `json:"type"`
CallID string `json:"call_id"`
Name string `json:"name"`
ArgsRaw string `json:"arguments"`
Role string `json:"role"`
Content []struct {
Type string `json:"type"`
Text string `json:"text"`
} `json:"content"`
} `json:"output"`
OutputText string `json:"output_text"`
Usage struct {
InputTokens int `json:"input_tokens"`
OutputTokens int `json:"output_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage"`
}
if err := json.Unmarshal(body, &resp); err != nil {
return nil, fmt.Errorf("failed to unmarshal response: %w", err)
}
toolCalls := make([]ToolCall, 0)
outputText := strings.TrimSpace(resp.OutputText)
for _, item := range resp.Output {
switch strings.TrimSpace(item.Type) {
case "function_call":
name := strings.TrimSpace(item.Name)
if name == "" {
continue
}
args := map[string]interface{}{}
if strings.TrimSpace(item.ArgsRaw) != "" {
if err := json.Unmarshal([]byte(item.ArgsRaw), &args); err != nil {
args["raw"] = item.ArgsRaw
}
}
id := strings.TrimSpace(item.CallID)
if id == "" {
id = strings.TrimSpace(item.ID)
}
if id == "" {
id = fmt.Sprintf("call_%d", len(toolCalls)+1)
}
toolCalls = append(toolCalls, ToolCall{ID: id, Name: name, Arguments: args})
case "message":
if outputText == "" {
texts := make([]string, 0, len(item.Content))
for _, c := range item.Content {
if strings.TrimSpace(c.Type) == "output_text" && strings.TrimSpace(c.Text) != "" {
texts = append(texts, c.Text)
}
}
if len(texts) > 0 {
outputText = strings.Join(texts, "\n")
}
}
}
}
if len(toolCalls) == 0 {
compatCalls, cleanedContent := parseCompatFunctionCalls(outputText)
if len(compatCalls) > 0 {
toolCalls = compatCalls
outputText = cleanedContent
}
}
finishReason := strings.TrimSpace(resp.Status)
if finishReason == "" || finishReason == "completed" {
finishReason = "stop"
}
var usage *UsageInfo
if resp.Usage.TotalTokens > 0 || resp.Usage.InputTokens > 0 || resp.Usage.OutputTokens > 0 {
usage = &UsageInfo{PromptTokens: resp.Usage.InputTokens, CompletionTokens: resp.Usage.OutputTokens, TotalTokens: resp.Usage.TotalTokens}
}
return &LLMResponse{Content: strings.TrimSpace(outputText), ToolCalls: toolCalls, FinishReason: finishReason, Usage: usage}, nil
}
func (p *HTTPProvider) BuildSummaryViaResponsesCompact(ctx context.Context, model string, existingSummary string, messages []Message, maxSummaryChars int) (string, error) {
if !p.SupportsResponsesCompact() {
return "", fmt.Errorf("responses compact is not enabled for this provider")
}
input := make([]map[string]interface{}, 0, len(messages)+1)
if strings.TrimSpace(existingSummary) != "" {
input = append(input, responsesMessageItem("system", "Existing summary:\n"+strings.TrimSpace(existingSummary), "input_text"))
}
pendingCalls := map[string]struct{}{}
for _, msg := range messages {
input = append(input, toResponsesInputItemsWithState(msg, pendingCalls)...)
}
if len(input) == 0 {
return strings.TrimSpace(existingSummary), nil
}
compactReq := map[string]interface{}{"model": model, "input": input}
compactBody, statusCode, contentType, err := p.postJSON(ctx, endpointFor(p.apiBase, "/responses/compact"), compactReq)
if err != nil {
return "", fmt.Errorf("responses compact request failed: %w", err)
}
if statusCode != http.StatusOK {
return "", fmt.Errorf("responses compact request failed (status %d, content-type %q): %s", statusCode, contentType, previewResponseBody(compactBody))
}
if !json.Valid(compactBody) {
return "", fmt.Errorf("responses compact request failed (status %d, content-type %q): non-JSON response: %s", statusCode, contentType, previewResponseBody(compactBody))
}
var compactResp struct {
Output interface{} `json:"output"`
CompactedInput interface{} `json:"compacted_input"`
Compacted interface{} `json:"compacted"`
}
if err := json.Unmarshal(compactBody, &compactResp); err != nil {
return "", fmt.Errorf("responses compact request failed: invalid JSON: %w", err)
}
compactPayload := compactResp.Output
if compactPayload == nil {
compactPayload = compactResp.CompactedInput
}
if compactPayload == nil {
compactPayload = compactResp.Compacted
}
payloadBytes, err := json.Marshal(compactPayload)
if err != nil {
return "", fmt.Errorf("failed to serialize compact output: %w", err)
}
compactedPayload := strings.TrimSpace(string(payloadBytes))
if compactedPayload == "" || compactedPayload == "null" {
return "", fmt.Errorf("empty compact output")
}
if len(compactedPayload) > 12000 {
compactedPayload = compactedPayload[:12000] + "..."
}
summaryPrompt := fmt.Sprintf(
"Compacted conversation JSON:\n%s\n\nReturn a concise markdown summary with sections: Key Facts, Decisions, Open Items, Next Steps.",
compactedPayload,
)
summaryReq := map[string]interface{}{
"model": model,
"input": summaryPrompt,
}
if maxSummaryChars > 0 {
estMaxTokens := maxSummaryChars / 3
if estMaxTokens < 128 {
estMaxTokens = 128
}
summaryReq["max_output_tokens"] = estMaxTokens
}
summaryBody, summaryStatus, summaryType, err := p.postJSON(ctx, endpointFor(p.apiBase, "/responses"), summaryReq)
if err != nil {
return "", fmt.Errorf("responses summary request failed: %w", err)
}
if summaryStatus != http.StatusOK {
return "", fmt.Errorf("responses summary request failed (status %d, content-type %q): %s", summaryStatus, summaryType, previewResponseBody(summaryBody))
}
if !json.Valid(summaryBody) {
return "", fmt.Errorf("responses summary request failed (status %d, content-type %q): non-JSON response: %s", summaryStatus, summaryType, previewResponseBody(summaryBody))
}
summaryResp, err := parseResponsesAPIResponse(summaryBody)
if err != nil {
return "", fmt.Errorf("responses summary request failed: %w", err)
}
summary := strings.TrimSpace(summaryResp.Content)
if summary == "" {
return "", fmt.Errorf("empty summary after responses compact")
}
if maxSummaryChars > 0 && len(summary) > maxSummaryChars {
summary = summary[:maxSummaryChars]
}
return summary, nil
}