Scaffold project

This commit is contained in:
2026-02-28 20:03:51 +00:00
commit f818b82a43
13 changed files with 1530 additions and 0 deletions

View File

@@ -0,0 +1,241 @@
package anthropic
import (
"context"
"fmt"
"time"
"github.com/anthropics/anthropic-sdk-go"
"github.com/anthropics/anthropic-sdk-go/option"
"github.com/yourusername/go-llm-gateway/internal/api"
"github.com/yourusername/go-llm-gateway/internal/config"
)
const Name = "anthropic"
// Provider implements the Anthropic SDK integration.
type Provider struct {
cfg config.ProviderConfig
client *anthropic.Client
}
// New constructs a Provider from configuration.
func New(cfg config.ProviderConfig) *Provider {
var client *anthropic.Client
if cfg.APIKey != "" {
c := anthropic.NewClient(option.WithAPIKey(cfg.APIKey))
client = &c
}
return &Provider{
cfg: cfg,
client: client,
}
}
func (p *Provider) Name() string { return Name }
// Generate routes the Open Responses request to Anthropic's API.
func (p *Provider) Generate(ctx context.Context, req *api.ResponseRequest) (*api.Response, error) {
if p.cfg.APIKey == "" {
return nil, fmt.Errorf("anthropic api key missing")
}
if p.client == nil {
return nil, fmt.Errorf("anthropic client not initialized")
}
model := chooseModel(req.Model, p.cfg.Model)
// Convert Open Responses messages to Anthropic format
messages := make([]anthropic.MessageParam, 0, len(req.Input))
var system string
for _, msg := range req.Input {
var content string
for _, block := range msg.Content {
if block.Type == "input_text" {
content += block.Text
}
}
switch msg.Role {
case "user":
messages = append(messages, anthropic.NewUserMessage(anthropic.NewTextBlock(content)))
case "assistant":
messages = append(messages, anthropic.NewAssistantMessage(anthropic.NewTextBlock(content)))
case "system":
system = content
}
}
// Build request params
params := anthropic.MessageNewParams{
Model: anthropic.Model(model),
Messages: messages,
MaxTokens: int64(4096),
}
if system != "" {
systemBlocks := []anthropic.TextBlockParam{
{Text: system, Type: "text"},
}
params.System = systemBlocks
}
// Call Anthropic API
resp, err := p.client.Messages.New(ctx, params)
if err != nil {
return nil, fmt.Errorf("anthropic api error: %w", err)
}
// Convert Anthropic response to Open Responses format
output := make([]api.Message, 0, 1)
var text string
for _, block := range resp.Content {
if block.Type == "text" {
text += block.Text
}
}
output = append(output, api.Message{
Role: "assistant",
Content: []api.ContentBlock{
{Type: "output_text", Text: text},
},
})
return &api.Response{
ID: resp.ID,
Object: "response",
Created: time.Now().Unix(),
Model: string(resp.Model),
Provider: Name,
Output: output,
Usage: api.Usage{
InputTokens: int(resp.Usage.InputTokens),
OutputTokens: int(resp.Usage.OutputTokens),
TotalTokens: int(resp.Usage.InputTokens + resp.Usage.OutputTokens),
},
}, nil
}
// GenerateStream handles streaming requests to Anthropic.
func (p *Provider) GenerateStream(ctx context.Context, req *api.ResponseRequest) (<-chan *api.StreamChunk, <-chan error) {
chunkChan := make(chan *api.StreamChunk)
errChan := make(chan error, 1)
go func() {
defer close(chunkChan)
defer close(errChan)
if p.cfg.APIKey == "" {
errChan <- fmt.Errorf("anthropic api key missing")
return
}
if p.client == nil {
errChan <- fmt.Errorf("anthropic client not initialized")
return
}
model := chooseModel(req.Model, p.cfg.Model)
// Convert messages
messages := make([]anthropic.MessageParam, 0, len(req.Input))
var system string
for _, msg := range req.Input {
var content string
for _, block := range msg.Content {
if block.Type == "input_text" {
content += block.Text
}
}
switch msg.Role {
case "user":
messages = append(messages, anthropic.NewUserMessage(anthropic.NewTextBlock(content)))
case "assistant":
messages = append(messages, anthropic.NewAssistantMessage(anthropic.NewTextBlock(content)))
case "system":
system = content
}
}
// Build params
params := anthropic.MessageNewParams{
Model: anthropic.Model(model),
Messages: messages,
MaxTokens: int64(4096),
}
if system != "" {
systemBlocks := []anthropic.TextBlockParam{
{Text: system, Type: "text"},
}
params.System = systemBlocks
}
// Create stream
stream := p.client.Messages.NewStreaming(ctx, params)
// Process stream
for stream.Next() {
event := stream.Current()
delta := &api.StreamDelta{}
var text string
// Handle different event types
if event.Type == "content_block_delta" && event.Delta.Type == "text_delta" {
text = event.Delta.Text
delta.Content = []api.ContentBlock{
{Type: "output_text", Text: text},
}
}
if event.Type == "message_start" {
delta.Role = "assistant"
}
streamChunk := &api.StreamChunk{
Object: "response.chunk",
Created: time.Now().Unix(),
Model: string(model),
Provider: Name,
Delta: delta,
}
select {
case chunkChan <- streamChunk:
case <-ctx.Done():
errChan <- ctx.Err()
return
}
}
if err := stream.Err(); err != nil {
errChan <- fmt.Errorf("anthropic stream error: %w", err)
return
}
// Send final chunk
select {
case chunkChan <- &api.StreamChunk{Object: "response.chunk", Done: true}:
case <-ctx.Done():
errChan <- ctx.Err()
}
}()
return chunkChan, errChan
}
func chooseModel(requested, defaultModel string) string {
if requested != "" {
return requested
}
if defaultModel != "" {
return defaultModel
}
return "claude-3-5-sonnet"
}

View File

@@ -0,0 +1,227 @@
package google
import (
"context"
"fmt"
"time"
"github.com/google/uuid"
"google.golang.org/genai"
"github.com/yourusername/go-llm-gateway/internal/api"
"github.com/yourusername/go-llm-gateway/internal/config"
)
const Name = "google"
// Provider implements the Google Generative AI integration.
type Provider struct {
cfg config.ProviderConfig
client *genai.Client
}
// New constructs a Provider using the provided configuration.
func New(cfg config.ProviderConfig) *Provider {
var client *genai.Client
if cfg.APIKey != "" {
var err error
client, err = genai.NewClient(context.Background(), &genai.ClientConfig{
APIKey: cfg.APIKey,
})
if err != nil {
// Log error but don't fail construction - will fail on Generate
fmt.Printf("warning: failed to create google client: %v\n", err)
}
}
return &Provider{
cfg: cfg,
client: client,
}
}
func (p *Provider) Name() string { return Name }
// Generate routes the Open Responses request to Gemini.
func (p *Provider) Generate(ctx context.Context, req *api.ResponseRequest) (*api.Response, error) {
if p.cfg.APIKey == "" {
return nil, fmt.Errorf("google api key missing")
}
if p.client == nil {
return nil, fmt.Errorf("google client not initialized")
}
model := chooseModel(req.Model, p.cfg.Model)
// Convert Open Responses messages to Gemini format
var contents []*genai.Content
for _, msg := range req.Input {
var parts []*genai.Part
for _, block := range msg.Content {
if block.Type == "input_text" {
parts = append(parts, genai.NewPartFromText(block.Text))
}
}
role := "user"
if msg.Role == "assistant" || msg.Role == "model" {
role = "model"
}
contents = append(contents, &genai.Content{
Role: role,
Parts: parts,
})
}
// Generate content
resp, err := p.client.Models.GenerateContent(ctx, model, contents, nil)
if err != nil {
return nil, fmt.Errorf("google api error: %w", err)
}
// Convert Gemini response to Open Responses format
output := make([]api.Message, 0, 1)
var text string
if len(resp.Candidates) > 0 && resp.Candidates[0].Content != nil {
for _, part := range resp.Candidates[0].Content.Parts {
if part != nil {
text += part.Text
}
}
}
output = append(output, api.Message{
Role: "assistant",
Content: []api.ContentBlock{
{Type: "output_text", Text: text},
},
})
// Extract usage info if available
var inputTokens, outputTokens int
if resp.UsageMetadata != nil {
inputTokens = int(resp.UsageMetadata.PromptTokenCount)
outputTokens = int(resp.UsageMetadata.CandidatesTokenCount)
}
return &api.Response{
ID: uuid.NewString(),
Object: "response",
Created: time.Now().Unix(),
Model: model,
Provider: Name,
Output: output,
Usage: api.Usage{
InputTokens: inputTokens,
OutputTokens: outputTokens,
TotalTokens: inputTokens + outputTokens,
},
}, nil
}
// GenerateStream handles streaming requests to Google.
func (p *Provider) GenerateStream(ctx context.Context, req *api.ResponseRequest) (<-chan *api.StreamChunk, <-chan error) {
chunkChan := make(chan *api.StreamChunk)
errChan := make(chan error, 1)
go func() {
defer close(chunkChan)
defer close(errChan)
if p.cfg.APIKey == "" {
errChan <- fmt.Errorf("google api key missing")
return
}
if p.client == nil {
errChan <- fmt.Errorf("google client not initialized")
return
}
model := chooseModel(req.Model, p.cfg.Model)
// Convert messages
var contents []*genai.Content
for _, msg := range req.Input {
var parts []*genai.Part
for _, block := range msg.Content {
if block.Type == "input_text" {
parts = append(parts, genai.NewPartFromText(block.Text))
}
}
role := "user"
if msg.Role == "assistant" || msg.Role == "model" {
role = "model"
}
contents = append(contents, &genai.Content{
Role: role,
Parts: parts,
})
}
// Create stream
stream := p.client.Models.GenerateContentStream(ctx, model, contents, nil)
// Process stream
for resp, err := range stream {
if err != nil {
errChan <- fmt.Errorf("google stream error: %w", err)
return
}
var text string
if len(resp.Candidates) > 0 && resp.Candidates[0].Content != nil {
for _, part := range resp.Candidates[0].Content.Parts {
if part != nil {
text += part.Text
}
}
}
delta := &api.StreamDelta{}
if text != "" {
delta.Content = []api.ContentBlock{
{Type: "output_text", Text: text},
}
}
streamChunk := &api.StreamChunk{
Object: "response.chunk",
Created: time.Now().Unix(),
Model: model,
Provider: Name,
Delta: delta,
}
select {
case chunkChan <- streamChunk:
case <-ctx.Done():
errChan <- ctx.Err()
return
}
}
// Send final chunk
select {
case chunkChan <- &api.StreamChunk{Object: "response.chunk", Done: true}:
case <-ctx.Done():
errChan <- ctx.Err()
}
}()
return chunkChan, errChan
}
func chooseModel(requested, defaultModel string) string {
if requested != "" {
return requested
}
if defaultModel != "" {
return defaultModel
}
return "gemini-2.0-flash-exp"
}

View File

@@ -0,0 +1,210 @@
package openai
import (
"context"
"fmt"
"time"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
"github.com/yourusername/go-llm-gateway/internal/api"
"github.com/yourusername/go-llm-gateway/internal/config"
)
const Name = "openai"
// Provider implements the OpenAI SDK integration.
type Provider struct {
cfg config.ProviderConfig
client *openai.Client
}
// New constructs a Provider from configuration.
func New(cfg config.ProviderConfig) *Provider {
var client *openai.Client
if cfg.APIKey != "" {
c := openai.NewClient(option.WithAPIKey(cfg.APIKey))
client = &c
}
return &Provider{
cfg: cfg,
client: client,
}
}
// Name returns the provider identifier.
func (p *Provider) Name() string { return Name }
// Generate routes the Open Responses request to OpenAI.
func (p *Provider) Generate(ctx context.Context, req *api.ResponseRequest) (*api.Response, error) {
if p.cfg.APIKey == "" {
return nil, fmt.Errorf("openai api key missing")
}
if p.client == nil {
return nil, fmt.Errorf("openai client not initialized")
}
model := chooseModel(req.Model, p.cfg.Model)
// Convert Open Responses messages to OpenAI format
messages := make([]openai.ChatCompletionMessageParamUnion, 0, len(req.Input))
for _, msg := range req.Input {
var content string
for _, block := range msg.Content {
if block.Type == "input_text" {
content += block.Text
}
}
switch msg.Role {
case "user":
messages = append(messages, openai.UserMessage(content))
case "assistant":
messages = append(messages, openai.AssistantMessage(content))
case "system":
messages = append(messages, openai.SystemMessage(content))
}
}
// Call OpenAI API
resp, err := p.client.Chat.Completions.New(ctx, openai.ChatCompletionNewParams{
Model: openai.ChatModel(model),
Messages: messages,
})
if err != nil {
return nil, fmt.Errorf("openai api error: %w", err)
}
// Convert OpenAI response to Open Responses format
output := make([]api.Message, 0, len(resp.Choices))
for _, choice := range resp.Choices {
output = append(output, api.Message{
Role: "assistant",
Content: []api.ContentBlock{
{Type: "output_text", Text: choice.Message.Content},
},
})
}
return &api.Response{
ID: resp.ID,
Object: "response",
Created: time.Now().Unix(),
Model: resp.Model,
Provider: Name,
Output: output,
Usage: api.Usage{
InputTokens: int(resp.Usage.PromptTokens),
OutputTokens: int(resp.Usage.CompletionTokens),
TotalTokens: int(resp.Usage.TotalTokens),
},
}, nil
}
// GenerateStream handles streaming requests to OpenAI.
func (p *Provider) GenerateStream(ctx context.Context, req *api.ResponseRequest) (<-chan *api.StreamChunk, <-chan error) {
chunkChan := make(chan *api.StreamChunk)
errChan := make(chan error, 1)
go func() {
defer close(chunkChan)
defer close(errChan)
if p.cfg.APIKey == "" {
errChan <- fmt.Errorf("openai api key missing")
return
}
if p.client == nil {
errChan <- fmt.Errorf("openai client not initialized")
return
}
model := chooseModel(req.Model, p.cfg.Model)
// Convert messages
messages := make([]openai.ChatCompletionMessageParamUnion, 0, len(req.Input))
for _, msg := range req.Input {
var content string
for _, block := range msg.Content {
if block.Type == "input_text" {
content += block.Text
}
}
switch msg.Role {
case "user":
messages = append(messages, openai.UserMessage(content))
case "assistant":
messages = append(messages, openai.AssistantMessage(content))
case "system":
messages = append(messages, openai.SystemMessage(content))
}
}
// Create streaming request
stream := p.client.Chat.Completions.NewStreaming(ctx, openai.ChatCompletionNewParams{
Model: openai.ChatModel(model),
Messages: messages,
})
// Process stream
for stream.Next() {
chunk := stream.Current()
for _, choice := range chunk.Choices {
delta := &api.StreamDelta{}
if choice.Delta.Role != "" {
delta.Role = string(choice.Delta.Role)
}
if choice.Delta.Content != "" {
delta.Content = []api.ContentBlock{
{Type: "output_text", Text: choice.Delta.Content},
}
}
streamChunk := &api.StreamChunk{
ID: chunk.ID,
Object: "response.chunk",
Created: time.Now().Unix(),
Model: chunk.Model,
Provider: Name,
Delta: delta,
}
select {
case chunkChan <- streamChunk:
case <-ctx.Done():
errChan <- ctx.Err()
return
}
}
}
if err := stream.Err(); err != nil {
errChan <- fmt.Errorf("openai stream error: %w", err)
return
}
// Send final chunk
select {
case chunkChan <- &api.StreamChunk{Object: "response.chunk", Done: true}:
case <-ctx.Done():
errChan <- ctx.Err()
}
}()
return chunkChan, errChan
}
func chooseModel(requested, defaultModel string) string {
if requested != "" {
return requested
}
if defaultModel != "" {
return defaultModel
}
return "gpt-4o-mini"
}

View File

@@ -0,0 +1,78 @@
package providers
import (
"context"
"fmt"
"strings"
"github.com/yourusername/go-llm-gateway/internal/api"
"github.com/yourusername/go-llm-gateway/internal/config"
anthropicprovider "github.com/yourusername/go-llm-gateway/internal/providers/anthropic"
googleprovider "github.com/yourusername/go-llm-gateway/internal/providers/google"
openaiprovider "github.com/yourusername/go-llm-gateway/internal/providers/openai"
)
// Provider represents a unified interface that each LLM provider must implement.
type Provider interface {
Name() string
Generate(ctx context.Context, req *api.ResponseRequest) (*api.Response, error)
GenerateStream(ctx context.Context, req *api.ResponseRequest) (<-chan *api.StreamChunk, <-chan error)
}
// Registry keeps track of registered providers by key (e.g. "openai").
type Registry struct {
providers map[string]Provider
}
// NewRegistry constructs provider implementations from configuration.
func NewRegistry(cfg config.ProvidersConfig) (*Registry, error) {
reg := &Registry{providers: make(map[string]Provider)}
if cfg.Google.APIKey != "" {
reg.providers[googleprovider.Name] = googleprovider.New(cfg.Google)
}
if cfg.Anthropic.APIKey != "" {
reg.providers[anthropicprovider.Name] = anthropicprovider.New(cfg.Anthropic)
}
if cfg.OpenAI.APIKey != "" {
reg.providers[openaiprovider.Name] = openaiprovider.New(cfg.OpenAI)
}
if len(reg.providers) == 0 {
return nil, fmt.Errorf("no providers configured")
}
return reg, nil
}
// Get returns provider by key.
func (r *Registry) Get(name string) (Provider, bool) {
p, ok := r.providers[name]
return p, ok
}
// Default returns provider based on inferred name.
func (r *Registry) Default(model string) (Provider, error) {
if model != "" {
switch {
case strings.HasPrefix(model, "gpt") || strings.HasPrefix(model, "o1"):
if p, ok := r.providers[openaiprovider.Name]; ok {
return p, nil
}
case strings.HasPrefix(model, "claude"):
if p, ok := r.providers[anthropicprovider.Name]; ok {
return p, nil
}
case strings.HasPrefix(model, "gemini"):
if p, ok := r.providers[googleprovider.Name]; ok {
return p, nil
}
}
}
for _, p := range r.providers {
return p, nil
}
return nil, fmt.Errorf("no providers available")
}