From aa06c480e81c3e5b4799250c2f828960891255f1 Mon Sep 17 00:00:00 2001 From: xkm Date: Sat, 21 Mar 2026 16:15:37 +0800 Subject: [PATCH] test(openai): add TestOpenaiStreamChatResponsesNoEffort --- openai_test.go | 49 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 openai_test.go diff --git a/openai_test.go b/openai_test.go new file mode 100644 index 0000000..b2295a3 --- /dev/null +++ b/openai_test.go @@ -0,0 +1,49 @@ +package llm_api + +import ( + "io" + "net/http" + "net/http/httptest" + "testing" +) + +func getFakeServer(t *testing.T) *httptest.Server { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + t.Logf("Method: %s\n", r.Method) + t.Logf("URL: %s\n", r.URL) + t.Logf("User-Agent: %s\n", r.Header.Get("User-Agent")) + + body, _ := io.ReadAll(r.Body) + t.Logf("Body: %s\n", string(body)) + + w.WriteHeader(http.StatusOK) + w.Write([]byte(`{"status":"ok"}`)) + })) + return server +} + +func TestOpenaiStreamChatResponsesNoEffort(t *testing.T) { + server := getFakeServer(t) + defer server.Close() + + pr, err := OpenaiStreamChatResponses( + t.Context(), + server.Client(), + "http://example.com/v1", + "apikey", + "test-model", + "", + nil, + []OpenaiChatMessage{ + { + Role: "user", + Content: "test", + }, + }, + ) + if err != nil { + t.Fatal(err) + } + defer pr.Close() + io.Copy(io.Discard, pr) +}