package llm_api import ( "io" "net/http" "net/http/httptest" "testing" ) func getFakeServer(t *testing.T) *httptest.Server { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { t.Logf("Method: %s\n", r.Method) t.Logf("URL: %s\n", r.URL) t.Logf("User-Agent: %s\n", r.Header.Get("User-Agent")) body, _ := io.ReadAll(r.Body) t.Logf("Body: %s\n", string(body)) w.WriteHeader(http.StatusOK) w.Write([]byte(`{"status":"ok"}`)) })) return server } func TestOpenaiStreamChatResponsesNoEffort(t *testing.T) { server := getFakeServer(t) defer server.Close() pr, err := OpenaiStreamChatResponses( t.Context(), server.Client(), "http://example.com/v1", "apikey", "test-model", "", nil, []OpenaiChatMessage{ { Role: "user", Content: "test", }, }, ) if err != nil { t.Fatal(err) } defer pr.Close() io.Copy(io.Discard, pr) }