94 lines
2.4 KiB
Go
94 lines
2.4 KiB
Go
package openrouter
|
|
|
|
import (
|
|
"bytes"
|
|
"context"
|
|
"encoding/json"
|
|
"fmt"
|
|
"net/http"
|
|
"time"
|
|
)
|
|
|
|
type Message struct {
|
|
Role string `json:"role"` // "system" | "user" | "assistant"
|
|
Content string `json:"content"`
|
|
}
|
|
|
|
type ChatCompletionRequest struct {
|
|
Model string `json:"model"`
|
|
Messages []Message `json:"messages"`
|
|
Temperature *float64 `json:"temperature,omitempty"`
|
|
MaxTokens *int `json:"max_tokens,omitempty"`
|
|
TopP *float64 `json:"top_p,omitempty"`
|
|
Stop json.RawMessage `json:"stop,omitempty"` // string or []string; keep flexible
|
|
}
|
|
|
|
type ChatCompletionResponse struct {
|
|
ID string `json:"id"`
|
|
Choices []struct {
|
|
Index int `json:"index"`
|
|
Message struct {
|
|
Role string `json:"role"`
|
|
Content string `json:"content"`
|
|
} `json:"message"`
|
|
FinishReason string `json:"finish_reason"`
|
|
} `json:"choices"`
|
|
}
|
|
|
|
func ChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error) {
|
|
httpClient := http.Client{Timeout: 10 * time.Second}
|
|
|
|
body, err := json.Marshal(req)
|
|
if err != nil {
|
|
return ChatCompletionResponse{}, err
|
|
}
|
|
|
|
httpReq, err := http.NewRequestWithContext(ctx, "POST", "https://openrouter.ai/api/v1/chat/completions", bytes.NewReader(body))
|
|
if err != nil {
|
|
return ChatCompletionResponse{}, err
|
|
}
|
|
|
|
httpReq.Header.Set("Authorization", "Bearer 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")
|
|
httpReq.Header.Set("Content-Type", "application/json")
|
|
|
|
resp, err := httpClient.Do(httpReq)
|
|
if err != nil {
|
|
return ChatCompletionResponse{}, err
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
|
// You may want to decode OpenRouter's error JSON here for better messages
|
|
return ChatCompletionResponse{}, fmt.Errorf("openrouter status %d", resp.StatusCode)
|
|
}
|
|
|
|
var out ChatCompletionResponse
|
|
if err := json.NewDecoder(resp.Body).Decode(&out); err != nil {
|
|
return ChatCompletionResponse{}, err
|
|
}
|
|
return out, nil
|
|
}
|
|
|
|
func doLLM() {
|
|
|
|
req := ChatCompletionRequest{
|
|
Model: "openai/gpt-oss-20b:free",
|
|
Messages: []Message{
|
|
{
|
|
Role: "user",
|
|
Content: "Write a short poem about software development.",
|
|
},
|
|
},
|
|
}
|
|
|
|
ctx := context.Background()
|
|
resp, err := client.ChatCompletion(ctx, req)
|
|
if err != nil {
|
|
fmt.Println("Error:", err)
|
|
return
|
|
}
|
|
|
|
for _, choice := range resp.Choices {
|
|
fmt.Printf("Response: %s\n", choice.Message.Content)
|
|
}
|
|
}
|