feat: add AnthropicSummarizer implementation using anthropic-sdk-go package

Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat>
This commit is contained in:
Olivier Tremblay 2025-11-16 20:28:52 -05:00
parent 7f2629d09c
commit 513af56fff
2 changed files with 72 additions and 6 deletions

View file

@ -67,26 +67,39 @@ func main() {
// vikunjaTasks = DoVikunja(*start, *end) // vikunjaTasks = DoVikunja(*start, *end)
// } // }
// Get OpenAI environment variables // Get environment variables
openaiEndpoint := os.Getenv("OPENAI_ENDPOINT") openaiEndpoint := os.Getenv("OPENAI_ENDPOINT")
openaiToken := os.Getenv("OPENAI_TOKEN") openaiToken := os.Getenv("OPENAI_TOKEN")
openaiModel := os.Getenv("OPENAI_MODEL") openaiModel := os.Getenv("OPENAI_MODEL")
anthropicModel := os.Getenv("ANTHROPIC_MODEL")
// Create Ollama summarizer // Create appropriate summarizer based on available environment variables
ollamaSummarizer := NewOllamaSummarizer("", "", openaiModel) var summarizer Summarizer
if openaiEndpoint != "" && openaiToken != "" {
// Use OpenAI summarizer
summarizer = NewOpenAISummarizer(openaiEndpoint, openaiToken, openaiModel)
} else if anthropicModel != "" {
// Use Anthropic summarizer
summarizer = NewAnthropicSummarizer(anthropicModel)
} else {
// Use Ollama summarizer as fallback
summarizer = NewOllamaSummarizer("", "", openaiModel)
}
// Always call SummarizeData to ensure prompt file is created for debugging // Always call SummarizeData to ensure prompt file is created for debugging
summ, err := SummarizeData(*employeename, prs, issues, vikunjaTasks, finalPrompt, ollamaSummarizer) summ, err := SummarizeData(*employeename, prs, issues, vikunjaTasks, finalPrompt, summarizer)
if err != nil { if err != nil {
fmt.Println(fmt.Errorf("error getting PRs: %w", err)) fmt.Println(fmt.Errorf("error getting PRs: %w", err))
os.Exit(1) os.Exit(1)
} }
// Only call summarization endpoint if OpenAI env vars are set // Only call summarization endpoint if we have appropriate credentials
if openaiEndpoint != "" && openaiToken != "" { if openaiEndpoint != "" && openaiToken != "" {
fmt.Println(summ) fmt.Println(summ)
} else if os.Getenv("ANTHROPIC_API_KEY") != "" && anthropicModel != "" {
fmt.Println(summ)
} else { } else {
fmt.Println("OpenAI endpoint and token not set, but prompt file was created for debugging") fmt.Println("No summarization endpoint configured, but prompt file was created for debugging")
} }
} }

View file

@ -10,6 +10,8 @@ import (
"o5r.ca/autocrossbow/issues" "o5r.ca/autocrossbow/issues"
"o5r.ca/autocrossbow/issues/vikunja" "o5r.ca/autocrossbow/issues/vikunja"
"github.com/anthropics/anthropic-sdk-go"
"github.com/anthropics/anthropic-sdk-go/option"
"github.com/ollama/ollama/api" "github.com/ollama/ollama/api"
) )
@ -113,6 +115,57 @@ func (o *OllamaSummarizer) Summarize(fullPrompt string) (string, error) {
return result, nil return result, nil
} }
// AnthropicSummarizer implements the Summarizer interface for Anthropic API
type AnthropicSummarizer struct {
client *anthropic.Client
model string
}
// NewAnthropicSummarizer creates a new AnthropicSummarizer with the given parameters
func NewAnthropicSummarizer(model string) *AnthropicSummarizer {
// Create the Anthropic client with the API key from environment
client := anthropic.NewClient(
option.WithAPIKey(os.Getenv("ANTHROPIC_API_KEY")),
)
return &AnthropicSummarizer{
client: client,
model: model,
}
}
// Summarize sends the prompt to the Anthropic API for summarization
func (a *AnthropicSummarizer) Summarize(fullPrompt string) (string, error) {
// Check if required parameters are set
if a.model == "" {
return "", fmt.Errorf("Anthropic model not set")
}
// Create the request
ctx := context.Background()
message, err := a.client.Messages.New(ctx, anthropic.MessageNewParams{
Model: a.model,
MaxTokens: 1024,
Messages: []anthropic.MessageParam{
anthropic.NewUserMessage(anthropic.NewTextBlock(fullPrompt)),
},
})
if err != nil {
return "", err
}
// Extract the response text
var result string
for _, content := range message.Content {
if textBlock, ok := content.AsAny().(*anthropic.TextBlock); ok {
result += textBlock.Text
}
}
return result, nil
}
// buildPrompt constructs the prompt string from PRs, issues, and tasks // buildPrompt constructs the prompt string from PRs, issues, and tasks
func buildPrompt(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string) string { func buildPrompt(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string) string {
// Build a prompt string // Build a prompt string