feat: support response api

This commit is contained in:
xkm
2026-03-09 19:10:06 +08:00
parent 4301c3bf75
commit 3618ebe65d

154
main.go
View File

@@ -30,6 +30,23 @@ type chatCompletionReq struct {
Stream bool `json:"stream"` Stream bool `json:"stream"`
} }
type ResponseReasoning struct {
Effort string `json:"effort"`
Summary string `json:"summary,omitempty"` // auto, concise, detailed
}
type chatResponseReq struct {
Model string `json:"model"`
Input []ChatMessage `json:"input"`
Temperature *float64 `json:"temperature,omitempty"`
Reasoning ResponseReasoning `json:"reasoning,omitempty"`
Stream bool `json:"stream"`
}
type responseStreamEvent struct {
Type string `json:"type"`
Delta string `json:"delta"`
}
type streamEvent struct { type streamEvent struct {
Choices []struct { Choices []struct {
Delta struct { Delta struct {
@@ -43,7 +60,7 @@ type streamEvent struct {
} `json:"error,omitempty"` } `json:"error,omitempty"`
} }
func streamChatText( func streamChatCompletions(
ctx context.Context, ctx context.Context,
client *http.Client, client *http.Client,
baseURL string, baseURL string,
@@ -176,11 +193,137 @@ func streamChatText(
return pr, nil return pr, nil
} }
func streamChatResponses(
ctx context.Context,
client *http.Client,
baseURL string,
apiKey string,
model string,
reasoningEffort string,
temperature *float64,
msgs []ChatMessage,
) (io.ReadCloser, error) {
if msgs == nil || len(msgs) == 0 {
return nil, errors.New("missing messages")
}
endpoint := strings.TrimRight(baseURL, "/") + "/responses"
body := chatResponseReq{
Model: model,
Input: msgs,
Temperature: temperature,
Reasoning: ResponseReasoning{Effort: reasoningEffort},
Stream: true,
}
payload, err := json.Marshal(body)
if err != nil {
return nil, err
}
pr, pw := io.Pipe()
go func() {
defer func(pw *io.PipeWriter) {
err := pw.Close()
if err != nil {
log.Println(err)
}
}(pw)
req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(payload))
if err != nil {
_ = pw.CloseWithError(err)
return
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "text/event-stream")
req.Header.Set("Authorization", "Bearer "+apiKey)
resp, err := client.Do(req)
if err != nil {
_ = pw.CloseWithError(err)
return
}
defer func(Body io.ReadCloser) {
err := Body.Close()
if err != nil {
log.Println(err)
}
}(resp.Body)
if resp.StatusCode < 200 || resp.StatusCode > 299 {
b, _ := io.ReadAll(resp.Body)
_ = pw.CloseWithError(fmt.Errorf("HTTP %d: %s", resp.StatusCode, strings.TrimSpace(string(b))))
return
}
sc := bufio.NewScanner(resp.Body)
sc.Buffer(make([]byte, 0, 64*1024), 2*1024*1024)
var dataLines []string
flushEvent := func() bool {
if len(dataLines) == 0 {
_ = pw.Close()
return true
}
data := strings.Join(dataLines, "\n")
dataLines = dataLines[:0]
var evt responseStreamEvent
if err := json.Unmarshal([]byte(data), &evt); err != nil {
_ = pw.CloseWithError(fmt.Errorf("failed to unmarshal event: %w, data=%q", err, data))
return false
}
if evt.Type == "response.output_text.delta" {
if _, err := io.WriteString(pw, evt.Delta); err != nil {
_ = pw.CloseWithError(err)
return false
}
}
return true
}
for sc.Scan() {
line := sc.Text()
if line == "" {
if ok := flushEvent(); !ok {
return
}
continue
}
if strings.HasPrefix(line, "data:") {
v := strings.TrimSpace(strings.TrimPrefix(line, "data:"))
dataLines = append(dataLines, v)
}
if strings.HasPrefix(line, "event: response.completed") {
break
}
}
if len(dataLines) > 0 {
_ = flushEvent()
return
}
if err := sc.Err(); err != nil {
_ = pw.CloseWithError(err)
return
}
}()
return pr, nil
}
// Translate src to lang // Translate src to lang
func Translate( func Translate(
ctx context.Context, ctx context.Context,
client *http.Client, client *http.Client,
baseURL string, baseURL string,
responseApi bool,
apiKey string, apiKey string,
model string, model string,
reasoningEffort string, reasoningEffort string,
@@ -201,12 +344,17 @@ func Translate(
Content: src, Content: src,
}, },
} }
return streamChatText(ctx, client, baseURL, apiKey, model, reasoningEffort, temperature, msgs) if responseApi {
return streamChatResponses(ctx, client, baseURL, apiKey, model, reasoningEffort, temperature, msgs)
}
return streamChatCompletions(ctx, client, baseURL, apiKey, model, reasoningEffort, temperature, msgs)
} }
var cli struct { var cli struct {
Config string `short:"c" default:"~/.config/translate.toml" help:"Path to config file"` Config string `short:"c" default:"~/.config/translate.toml" help:"Path to config file"`
BaseURL string `short:"b" help:"LLM API base URL" default:"https://api.openai.com/v1"` BaseURL string `short:"b" help:"LLM API base URL" default:"https://api.openai.com/v1"`
ResponseApi bool `default:"false" help:"Use /v1/responses or /v1/chat/completions"`
ApiKey string `short:"k" help:"LLM API Key"` ApiKey string `short:"k" help:"LLM API Key"`
Model string `short:"m" help:"LLM model" default:"gpt-5-nano"` Model string `short:"m" help:"LLM model" default:"gpt-5-nano"`
ReasoningEffort string `help:"LLM reasoning effort (note that some LLMs may not support certain settings)" default:"minimal"` ReasoningEffort string `help:"LLM reasoning effort (note that some LLMs may not support certain settings)" default:"minimal"`
@@ -239,7 +387,7 @@ func main() {
} }
} }
stream, err := Translate(context.Background(), http.DefaultClient, cli.BaseURL, cli.ApiKey, cli.Model, stream, err := Translate(context.Background(), http.DefaultClient, cli.BaseURL, cli.ResponseApi, cli.ApiKey, cli.Model,
cli.ReasoningEffort, cli.Temperature, src, lang) cli.ReasoningEffort, cli.Temperature, src, lang)
if err != nil { if err != nil {