first demo
This commit is contained in:
10
go.mod
Normal file
10
go.mod
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
module gitea.starryskymeow.cn/xkm/ask
|
||||||
|
|
||||||
|
go 1.26.1
|
||||||
|
|
||||||
|
require (
|
||||||
|
gitea.starryskymeow.cn/xkm/llm-api v0.0.0-20260321081605-eb8f4a273c21 // indirect
|
||||||
|
github.com/alecthomas/kong v1.14.0 // indirect
|
||||||
|
github.com/alecthomas/kong-toml v0.4.0 // indirect
|
||||||
|
github.com/pelletier/go-toml v1.9.5 // indirect
|
||||||
|
)
|
||||||
10
go.sum
Normal file
10
go.sum
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
gitea.starryskymeow.cn/xkm/llm-api v0.0.0-20260321073716-c7e6863c6f72 h1:wvpDlMPNVifnOfP729SBZuC5fY/rTffhkMeep0vrDB0=
|
||||||
|
gitea.starryskymeow.cn/xkm/llm-api v0.0.0-20260321073716-c7e6863c6f72/go.mod h1:uvdSSW01wuwvyiaIOsYYrffQ0SnRIHgOlJDVZmYjejQ=
|
||||||
|
gitea.starryskymeow.cn/xkm/llm-api v0.0.0-20260321081605-eb8f4a273c21 h1:EiDI/mUpObzQNn9Edb8UuIuClVUctxTEQHhaB9KgNug=
|
||||||
|
gitea.starryskymeow.cn/xkm/llm-api v0.0.0-20260321081605-eb8f4a273c21/go.mod h1:uvdSSW01wuwvyiaIOsYYrffQ0SnRIHgOlJDVZmYjejQ=
|
||||||
|
github.com/alecthomas/kong v1.14.0 h1:gFgEUZWu2ZmZ+UhyZ1bDhuutbKN1nTtJTwh19Wsn21s=
|
||||||
|
github.com/alecthomas/kong v1.14.0/go.mod h1:wrlbXem1CWqUV5Vbmss5ISYhsVPkBb1Yo7YKJghju2I=
|
||||||
|
github.com/alecthomas/kong-toml v0.4.0 h1:sSK/HHi2M5jqSXYTxmuxkdZcJ+ip9jhYvwcjDGcaJBQ=
|
||||||
|
github.com/alecthomas/kong-toml v0.4.0/go.mod h1:hRVV9iGmqYsFqs17jFQgqhkjYIxiklbfy95xJ3nlpKI=
|
||||||
|
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
|
||||||
|
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
||||||
90
main.go
Normal file
90
main.go
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
llm_api "gitea.starryskymeow.cn/xkm/llm-api"
|
||||||
|
"github.com/alecthomas/kong"
|
||||||
|
kongtoml "github.com/alecthomas/kong-toml"
|
||||||
|
)
|
||||||
|
|
||||||
|
var cli struct {
|
||||||
|
Config string `short:"c" default:"~/.config/ask.toml" help:"Path to config file"`
|
||||||
|
BaseURL string `short:"b" help:"LLM API base URL" default:"https://api.openai.com/v1"`
|
||||||
|
ResponseApi bool `default:"false" help:"Use /v1/responses or /v1/chat/completions"`
|
||||||
|
ApiKey string `short:"k" help:"LLM API Key"`
|
||||||
|
Model string `short:"m" help:"LLM model" default:"gpt-5-nano"`
|
||||||
|
ReasoningEffort string `help:"LLM reasoning effort (note that some LLMs may not support certain settings)" default:"minimal"`
|
||||||
|
Temperature *float64 `help:"LLM Temperature"`
|
||||||
|
Src []string `arg:"" optional:"" name:"src" help:"Text to ask, or leave empty to use stdin"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
_ = kong.Parse(&cli)
|
||||||
|
_ = kong.Parse(
|
||||||
|
&cli,
|
||||||
|
kong.Configuration(kongtoml.Loader, cli.Config),
|
||||||
|
)
|
||||||
|
|
||||||
|
src := strings.Join(cli.Src, " ")
|
||||||
|
|
||||||
|
if src == "" {
|
||||||
|
stdin, err := io.ReadAll(os.Stdin)
|
||||||
|
src = string(stdin)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var pr io.ReadCloser
|
||||||
|
var err error
|
||||||
|
if cli.ResponseApi {
|
||||||
|
pr, err = llm_api.OpenaiStreamChatResponses(
|
||||||
|
context.Background(),
|
||||||
|
http.DefaultClient,
|
||||||
|
cli.BaseURL,
|
||||||
|
cli.ApiKey,
|
||||||
|
cli.Model,
|
||||||
|
cli.ReasoningEffort,
|
||||||
|
nil,
|
||||||
|
[]llm_api.OpenaiChatMessage{
|
||||||
|
{
|
||||||
|
Role: "user",
|
||||||
|
Content: src,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
pr, err = llm_api.OpenaiStreamChatCompletions(
|
||||||
|
context.Background(),
|
||||||
|
http.DefaultClient,
|
||||||
|
cli.BaseURL,
|
||||||
|
cli.ApiKey,
|
||||||
|
cli.Model,
|
||||||
|
cli.ReasoningEffort,
|
||||||
|
nil,
|
||||||
|
[]llm_api.OpenaiChatMessage{
|
||||||
|
{
|
||||||
|
Role: "user",
|
||||||
|
Content: src,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
defer pr.Close()
|
||||||
|
_, err = io.Copy(os.Stdout, pr)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user