Initial working copy

This commit is contained in:
Chandler Swift 2025-12-19 23:14:28 -06:00
parent 2a335176e6
commit 2c876cef42
19 changed files with 783 additions and 126 deletions

View file

@ -5,10 +5,17 @@ import (
"context"
"encoding/json"
"fmt"
"log"
"net/http"
"time"
)
type OpenRouterProvider struct {
// Endpoint string
Token string
Model string // "openai/gpt-oss-20b:free"
}
type Message struct {
Role string `json:"role"` // "system" | "user" | "assistant"
Content string `json:"content"`
@ -21,6 +28,10 @@ type ChatCompletionRequest struct {
MaxTokens *int `json:"max_tokens,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
Stop json.RawMessage `json:"stop,omitempty"` // string or []string; keep flexible
Provider struct {
Sort string `json:"sort,omitempty"`
} `json:"provider,omitempty"`
}
type ChatCompletionResponse struct {
@ -35,60 +46,52 @@ type ChatCompletionResponse struct {
} `json:"choices"`
}
func ChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error) {
httpClient := http.Client{Timeout: 10 * time.Second}
func (p OpenRouterProvider) Complete(ctx context.Context, prompt string) (string, error) {
req := ChatCompletionRequest{
Model: p.Model,
Messages: []Message{
{
Role: "user",
Content: prompt,
},
},
Provider: struct {
Sort string `json:"sort,omitempty"`
}{
Sort: "throughput",
},
}
httpClient := http.Client{Timeout: 10 * time.Second}
body, err := json.Marshal(req)
if err != nil {
return ChatCompletionResponse{}, err
return "", err
}
httpReq, err := http.NewRequestWithContext(ctx, "POST", "https://openrouter.ai/api/v1/chat/completions", bytes.NewReader(body))
if err != nil {
return ChatCompletionResponse{}, err
return "", err
}
httpReq.Header.Set("Authorization", "Bearer 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")
httpReq.Header.Set("Authorization", fmt.Sprintf("Bearer %v", p.Token))
httpReq.Header.Set("Content-Type", "application/json")
resp, err := httpClient.Do(httpReq)
if err != nil {
return ChatCompletionResponse{}, err
return "", err
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
// You may want to decode OpenRouter's error JSON here for better messages
return ChatCompletionResponse{}, fmt.Errorf("openrouter status %d", resp.StatusCode)
return "", fmt.Errorf("openrouter status %d", resp.StatusCode)
}
var out ChatCompletionResponse
if err := json.NewDecoder(resp.Body).Decode(&out); err != nil {
return ChatCompletionResponse{}, err
}
return out, nil
}
func doLLM() {
req := ChatCompletionRequest{
Model: "openai/gpt-oss-20b:free",
Messages: []Message{
{
Role: "user",
Content: "Write a short poem about software development.",
},
},
}
ctx := context.Background()
resp, err := client.ChatCompletion(ctx, req)
if err != nil {
fmt.Println("Error:", err)
return
}
for _, choice := range resp.Choices {
fmt.Printf("Response: %s\n", choice.Message.Content)
log.Println(err)
log.Println(out)
return "", err
}
return out.Choices[0].Message.Content, nil
}