From c7e6863c6f7252dc11e826493cfa9fdc211cbd5d Mon Sep 17 00:00:00 2001 From: xkm Date: Sat, 21 Mar 2026 15:37:16 +0800 Subject: [PATCH] feat(openai): completion::stream, response::stream and basic model src: gitea.starryskymeow.cn/xkm/translate --- .gitignore | 1 + go.mod | 3 + model.go | 45 +++++++++++++ openai-completion.go | 147 +++++++++++++++++++++++++++++++++++++++++++ openai-response.go | 139 ++++++++++++++++++++++++++++++++++++++++ 5 files changed, 335 insertions(+) create mode 100644 .gitignore create mode 100644 go.mod create mode 100644 model.go create mode 100644 openai-completion.go create mode 100644 openai-response.go diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..757fee3 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +/.idea \ No newline at end of file diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..4885509 --- /dev/null +++ b/go.mod @@ -0,0 +1,3 @@ +module gitea.starryskymeow.cn/xkm/llm-api + +go 1.26.1 diff --git a/model.go b/model.go new file mode 100644 index 0000000..99fe773 --- /dev/null +++ b/model.go @@ -0,0 +1,45 @@ +package llm_api + +type OpenaiChatMessage struct { + Role string `json:"role"` + Content string `json:"content"` +} + +type OpenaiChatCompletionReq struct { + Model string `json:"model"` + Messages []OpenaiChatMessage `json:"messages"` + Temperature *float64 `json:"temperature,omitempty"` + ReasoningEffort string `json:"reasoning_effort,omitempty"` + Stream bool `json:"stream"` +} + +type OpenaiResponseReasoning struct { + Effort string `json:"effort"` + Summary string `json:"summary,omitempty"` // auto, concise, detailed +} + +type OpenaiChatResponseReq struct { + Model string `json:"model"` + Input []OpenaiChatMessage `json:"input"` + Temperature *float64 `json:"temperature,omitempty"` + Reasoning OpenaiResponseReasoning `json:"reasoning,omitempty"` + Stream bool `json:"stream"` +} + +type OpenaiResponseStreamEvent struct { + Type string `json:"type"` + Delta string `json:"delta"` +} + +type OpenaiCompletionStreamEvent struct { + Choices []struct { + Delta struct { + Content string `json:"content"` + } `json:"delta"` + } `json:"choices"` + + Error *struct { + Message string `json:"message"` + Type string `json:"type"` + } `json:"error,omitempty"` +} diff --git a/openai-completion.go b/openai-completion.go new file mode 100644 index 0000000..75b85fb --- /dev/null +++ b/openai-completion.go @@ -0,0 +1,147 @@ +package llm_api + +import ( + "bufio" + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "log" + "net/http" + "strings" +) + +func OpenaiStreamChatCompletions( + ctx context.Context, + client *http.Client, + baseURL string, + apiKey string, + model string, + reasoningEffort string, + temperature *float64, + msgs []OpenaiChatMessage, +) (io.ReadCloser, error) { + if msgs == nil || len(msgs) == 0 { + return nil, errors.New("missing messages") + } + endpoint := strings.TrimRight(baseURL, "/") + "/chat/completions" + + body := OpenaiChatCompletionReq{ + Model: model, + Messages: msgs, + Temperature: temperature, + ReasoningEffort: reasoningEffort, + Stream: true, + } + + payload, err := json.Marshal(body) + if err != nil { + return nil, err + } + + pr, pw := io.Pipe() + + go func() { + defer func(pw *io.PipeWriter) { + err := pw.Close() + if err != nil { + log.Println(err) + } + }(pw) + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(payload)) + if err != nil { + _ = pw.CloseWithError(err) + return + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept", "text/event-stream") + req.Header.Set("Authorization", "Bearer "+apiKey) + + resp, err := client.Do(req) + if err != nil { + _ = pw.CloseWithError(err) + return + } + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + log.Println(err) + } + }(resp.Body) + + if resp.StatusCode < 200 || resp.StatusCode > 299 { + b, _ := io.ReadAll(resp.Body) + _ = pw.CloseWithError(fmt.Errorf("HTTP %d: %s", resp.StatusCode, strings.TrimSpace(string(b)))) + return + } + + sc := bufio.NewScanner(resp.Body) + sc.Buffer(make([]byte, 0, 64*1024), 2*1024*1024) + + var dataLines []string + flushEvent := func() bool { + if len(dataLines) == 0 { + _ = pw.Close() + return true + } + data := strings.Join(dataLines, "\n") + dataLines = dataLines[:0] + if data == "[DONE]" { + _ = pw.Close() + return false + } + + var evt OpenaiCompletionStreamEvent + if err := json.Unmarshal([]byte(data), &evt); err != nil { + _ = pw.CloseWithError(fmt.Errorf("failed to unmarshal event: %w, data=%q", err, data)) + return false + } + if evt.Error != nil { + _ = pw.CloseWithError(fmt.Errorf("api error: %+v", evt.Error)) + return false + } + if len(evt.Choices) > 0 { + chunk := evt.Choices[0].Delta.Content + if chunk != "" { + if _, err := io.WriteString(pw, chunk); err != nil { + _ = pw.CloseWithError(err) + return false + } + } + } + return true + } + + for sc.Scan() { + line := sc.Text() + + if line == "" { + if ok := flushEvent(); !ok { + return + } + continue + } + + if strings.HasPrefix(line, "data:") { + v := strings.TrimSpace(strings.TrimPrefix(line, "data:")) + dataLines = append(dataLines, v) + } + } + + if len(dataLines) > 0 { + _ = flushEvent() + return + } + + if err := sc.Err(); err != nil { + _ = pw.CloseWithError(err) + return + } + }() + + return pr, nil +} diff --git a/openai-response.go b/openai-response.go new file mode 100644 index 0000000..4372229 --- /dev/null +++ b/openai-response.go @@ -0,0 +1,139 @@ +package llm_api + +import ( + "bufio" + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "log" + "net/http" + "strings" +) + +func OpenaiStreamChatResponses( + ctx context.Context, + client *http.Client, + baseURL string, + apiKey string, + model string, + reasoningEffort string, + temperature *float64, + msgs []OpenaiChatMessage, +) (io.ReadCloser, error) { + if msgs == nil || len(msgs) == 0 { + return nil, errors.New("missing messages") + } + endpoint := strings.TrimRight(baseURL, "/") + "/responses" + + body := OpenaiChatResponseReq{ + Model: model, + Input: msgs, + Temperature: temperature, + Reasoning: OpenaiResponseReasoning{Effort: reasoningEffort}, + Stream: true, + } + + payload, err := json.Marshal(body) + if err != nil { + return nil, err + } + + pr, pw := io.Pipe() + + go func() { + defer func(pw *io.PipeWriter) { + err := pw.Close() + if err != nil { + log.Println(err) + } + }(pw) + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(payload)) + if err != nil { + _ = pw.CloseWithError(err) + return + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept", "text/event-stream") + req.Header.Set("Authorization", "Bearer "+apiKey) + + resp, err := client.Do(req) + if err != nil { + _ = pw.CloseWithError(err) + return + } + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + log.Println(err) + } + }(resp.Body) + + if resp.StatusCode < 200 || resp.StatusCode > 299 { + b, _ := io.ReadAll(resp.Body) + _ = pw.CloseWithError(fmt.Errorf("HTTP %d: %s", resp.StatusCode, strings.TrimSpace(string(b)))) + return + } + + sc := bufio.NewScanner(resp.Body) + sc.Buffer(make([]byte, 0, 64*1024), 2*1024*1024) + + var dataLines []string + flushEvent := func() bool { + if len(dataLines) == 0 { + _ = pw.Close() + return true + } + data := strings.Join(dataLines, "\n") + dataLines = dataLines[:0] + + var evt OpenaiResponseStreamEvent + if err := json.Unmarshal([]byte(data), &evt); err != nil { + _ = pw.CloseWithError(fmt.Errorf("failed to unmarshal event: %w, data=%q", err, data)) + return false + } + if evt.Type == "response.output_text.delta" { + if _, err := io.WriteString(pw, evt.Delta); err != nil { + _ = pw.CloseWithError(err) + return false + } + } + return true + } + + for sc.Scan() { + line := sc.Text() + + if line == "" { + if ok := flushEvent(); !ok { + return + } + continue + } + + if strings.HasPrefix(line, "data:") { + v := strings.TrimSpace(strings.TrimPrefix(line, "data:")) + dataLines = append(dataLines, v) + } + if strings.HasPrefix(line, "event: response.completed") { + break + } + } + + if len(dataLines) > 0 { + _ = flushEvent() + return + } + + if err := sc.Err(); err != nil { + _ = pw.CloseWithError(err) + return + } + }() + + return pr, nil +}