Files
llm-api/model.go
2026-03-21 15:37:16 +08:00

46 lines
1.3 KiB
Go

package llm_api
type OpenaiChatMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
type OpenaiChatCompletionReq struct {
Model string `json:"model"`
Messages []OpenaiChatMessage `json:"messages"`
Temperature *float64 `json:"temperature,omitempty"`
ReasoningEffort string `json:"reasoning_effort,omitempty"`
Stream bool `json:"stream"`
}
type OpenaiResponseReasoning struct {
Effort string `json:"effort"`
Summary string `json:"summary,omitempty"` // auto, concise, detailed
}
type OpenaiChatResponseReq struct {
Model string `json:"model"`
Input []OpenaiChatMessage `json:"input"`
Temperature *float64 `json:"temperature,omitempty"`
Reasoning OpenaiResponseReasoning `json:"reasoning,omitempty"`
Stream bool `json:"stream"`
}
type OpenaiResponseStreamEvent struct {
Type string `json:"type"`
Delta string `json:"delta"`
}
type OpenaiCompletionStreamEvent struct {
Choices []struct {
Delta struct {
Content string `json:"content"`
} `json:"delta"`
} `json:"choices"`
Error *struct {
Message string `json:"message"`
Type string `json:"type"`
} `json:"error,omitempty"`
}