From 513af56ffff15c662d43511ee900b7a6940b87db Mon Sep 17 00:00:00 2001 From: Olivier Tremblay Date: Sun, 16 Nov 2025 20:28:52 -0500 Subject: [PATCH] feat: add AnthropicSummarizer implementation using anthropic-sdk-go package Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) --- cmd/acb/main.go | 25 ++++++++++++++++----- cmd/acb/summarize.go | 53 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 72 insertions(+), 6 deletions(-) diff --git a/cmd/acb/main.go b/cmd/acb/main.go index 7d551e2..0f3df32 100644 --- a/cmd/acb/main.go +++ b/cmd/acb/main.go @@ -67,26 +67,39 @@ func main() { // vikunjaTasks = DoVikunja(*start, *end) // } - // Get OpenAI environment variables + // Get environment variables openaiEndpoint := os.Getenv("OPENAI_ENDPOINT") openaiToken := os.Getenv("OPENAI_TOKEN") openaiModel := os.Getenv("OPENAI_MODEL") + anthropicModel := os.Getenv("ANTHROPIC_MODEL") - // Create Ollama summarizer - ollamaSummarizer := NewOllamaSummarizer("", "", openaiModel) + // Create appropriate summarizer based on available environment variables + var summarizer Summarizer + if openaiEndpoint != "" && openaiToken != "" { + // Use OpenAI summarizer + summarizer = NewOpenAISummarizer(openaiEndpoint, openaiToken, openaiModel) + } else if anthropicModel != "" { + // Use Anthropic summarizer + summarizer = NewAnthropicSummarizer(anthropicModel) + } else { + // Use Ollama summarizer as fallback + summarizer = NewOllamaSummarizer("", "", openaiModel) + } // Always call SummarizeData to ensure prompt file is created for debugging - summ, err := SummarizeData(*employeename, prs, issues, vikunjaTasks, finalPrompt, ollamaSummarizer) + summ, err := SummarizeData(*employeename, prs, issues, vikunjaTasks, finalPrompt, summarizer) if err != nil { fmt.Println(fmt.Errorf("error getting PRs: %w", err)) os.Exit(1) } - // Only call summarization endpoint if OpenAI env vars are set + // Only call summarization endpoint if we have appropriate credentials if openaiEndpoint != "" && openaiToken != "" { fmt.Println(summ) + } else if os.Getenv("ANTHROPIC_API_KEY") != "" && anthropicModel != "" { + fmt.Println(summ) } else { - fmt.Println("OpenAI endpoint and token not set, but prompt file was created for debugging") + fmt.Println("No summarization endpoint configured, but prompt file was created for debugging") } } diff --git a/cmd/acb/summarize.go b/cmd/acb/summarize.go index 24dcdb6..eeab02a 100644 --- a/cmd/acb/summarize.go +++ b/cmd/acb/summarize.go @@ -10,6 +10,8 @@ import ( "o5r.ca/autocrossbow/issues" "o5r.ca/autocrossbow/issues/vikunja" + "github.com/anthropics/anthropic-sdk-go" + "github.com/anthropics/anthropic-sdk-go/option" "github.com/ollama/ollama/api" ) @@ -113,6 +115,57 @@ func (o *OllamaSummarizer) Summarize(fullPrompt string) (string, error) { return result, nil } +// AnthropicSummarizer implements the Summarizer interface for Anthropic API +type AnthropicSummarizer struct { + client *anthropic.Client + model string +} + +// NewAnthropicSummarizer creates a new AnthropicSummarizer with the given parameters +func NewAnthropicSummarizer(model string) *AnthropicSummarizer { + // Create the Anthropic client with the API key from environment + client := anthropic.NewClient( + option.WithAPIKey(os.Getenv("ANTHROPIC_API_KEY")), + ) + + return &AnthropicSummarizer{ + client: client, + model: model, + } +} + +// Summarize sends the prompt to the Anthropic API for summarization +func (a *AnthropicSummarizer) Summarize(fullPrompt string) (string, error) { + // Check if required parameters are set + if a.model == "" { + return "", fmt.Errorf("Anthropic model not set") + } + + // Create the request + ctx := context.Background() + + message, err := a.client.Messages.New(ctx, anthropic.MessageNewParams{ + Model: a.model, + MaxTokens: 1024, + Messages: []anthropic.MessageParam{ + anthropic.NewUserMessage(anthropic.NewTextBlock(fullPrompt)), + }, + }) + if err != nil { + return "", err + } + + // Extract the response text + var result string + for _, content := range message.Content { + if textBlock, ok := content.AsAny().(*anthropic.TextBlock); ok { + result += textBlock.Text + } + } + + return result, nil +} + // buildPrompt constructs the prompt string from PRs, issues, and tasks func buildPrompt(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string) string { // Build a prompt string