diff --git a/cmd/acb/main.go b/cmd/acb/main.go index db5a0d7..2ae98b0 100644 --- a/cmd/acb/main.go +++ b/cmd/acb/main.go @@ -67,23 +67,38 @@ func main() { // vikunjaTasks = DoVikunja(*start, *end) // } - // Get OpenAI environment variables + // Get environment variables openaiEndpoint := os.Getenv("OPENAI_ENDPOINT") openaiToken := os.Getenv("OPENAI_TOKEN") openaiModel := os.Getenv("OPENAI_MODEL") + anthropicModel := os.Getenv("ANTHROPIC_MODEL") - // Check if OpenAI environment variables are set before calling Summarize - if openaiEndpoint == "" || openaiToken == "" { - fmt.Println("Error: OPENAI_ENDPOINT and OPENAI_TOKEN must be set in environment variables to summarize") - os.Exit(1) + // Create appropriate summarizer based on available environment variables + var summarizer Summarizer + if openaiEndpoint != "" && openaiToken != "" { + // Use OpenAI summarizer + summarizer = NewOpenAISummarizer(openaiEndpoint, openaiToken, openaiModel) + } else if anthropicModel != "" { + // Use Anthropic summarizer + summarizer = NewAnthropicSummarizer(anthropicModel) + } else { + // Use Ollama summarizer as fallback + summarizer = NewOllamaSummarizer("", "", openaiModel) } - summ, err := SummarizeData(*employeename, prs, issues, vikunjaTasks, finalPrompt, openaiEndpoint, openaiToken, openaiModel) + // Always call SummarizeData to ensure prompt file is created for debugging + summ, err := SummarizeData(*employeename, prs, issues, vikunjaTasks, finalPrompt, summarizer) if err != nil { fmt.Println(fmt.Errorf("error getting PRs: %w", err)) os.Exit(1) } - fmt.Println(summ) + + // Only call summarization endpoint if we have a valid summarizer + if summarizer != nil { + fmt.Println(summ) + } else { + fmt.Println("No summarization endpoint configured, but prompt file was created for debugging") + } } func DoPrs(proj, ghusername, start, end string) map[string][]contributions.PullRequest { diff --git a/cmd/acb/summarize.go b/cmd/acb/summarize.go index 7177df3..eeab02a 100644 --- a/cmd/acb/summarize.go +++ b/cmd/acb/summarize.go @@ -1,17 +1,18 @@ package main import ( - "bytes" - "encoding/json" + "context" "fmt" - "io" - "net/http" "os" "time" "o5r.ca/autocrossbow/contributions" "o5r.ca/autocrossbow/issues" "o5r.ca/autocrossbow/issues/vikunja" + + "github.com/anthropics/anthropic-sdk-go" + "github.com/anthropics/anthropic-sdk-go/option" + "github.com/ollama/ollama/api" ) const defaultPrompt = `I will provide you, for a given period, with an employee name and a list of Pull Request titles and summaries split by repository, and a list of Jira Issues an employee has worked on. I may also provide, optionally, the employee's self-assessment. If I do, integrate that. @@ -19,6 +20,152 @@ const defaultPrompt = `I will provide you, for a given period, with an employee I'd like you to summarize the employee's accomplishments for the quarter I'd like the summary for the accomplishments to be in prose form, in a few paragraphs separated based on areas of work. Keep answers to 500 words for the summary.` +// Summarizer interface defines the contract for summarization implementations +type Summarizer interface { + Summarize(prompt string) (string, error) +} + +// OpenAISummarizer implements the Summarizer interface for OpenAI-compatible endpoints +type OpenAISummarizer struct { + endpoint string + token string + model string +} + +// NewOpenAISummarizer creates a new OpenAISummarizer with the given parameters +func NewOpenAISummarizer(endpoint, token, model string) *OpenAISummarizer { + return &OpenAISummarizer{ + endpoint: endpoint, + token: token, + model: model, + } +} + +// Summarize sends the prompt to an OpenAI-compatible endpoint for summarization +func (o *OpenAISummarizer) Summarize(fullPrompt string) (string, error) { + // Check if required environment variables are set + if o.endpoint == "" || o.token == "" { + return "", fmt.Errorf("OpenAI endpoint or token not set") + } + + // Create the request + ctx := context.Background() + client, _ := api.ClientFromEnvironment() + + req := api.GenerateRequest{ + Model: o.model, + Prompt: fullPrompt, + Stream: nil, + } + + var result string + err := client.Generate(ctx, &req, func(resp api.GenerateResponse) error { + result += resp.Response + return nil + }) + if err != nil { + return "", err + } + + return result, nil +} + +// OllamaSummarizer implements the Summarizer interface for Ollama endpoints +type OllamaSummarizer struct { + endpoint string + token string + model string +} + +// NewOllamaSummarizer creates a new OllamaSummarizer with the given parameters +func NewOllamaSummarizer(endpoint, token, model string) *OllamaSummarizer { + return &OllamaSummarizer{ + endpoint: endpoint, + token: token, + model: model, + } +} + +// Summarize sends the prompt to an Ollama endpoint for summarization +func (o *OllamaSummarizer) Summarize(fullPrompt string) (string, error) { + // Check if required parameters are set + if o.model == "" { + return "", fmt.Errorf("Ollama model not set") + } + + // Create the request + ctx := context.Background() + client, _ := api.ClientFromEnvironment() + + req := &api.GenerateRequest{ + Model: o.model, + Prompt: fullPrompt, + Stream: nil, + } + + var result string + err := client.Generate(ctx, req, func(resp api.GenerateResponse) error { + result += resp.Response + return nil + }) + if err != nil { + return "", err + } + + return result, nil +} + +// AnthropicSummarizer implements the Summarizer interface for Anthropic API +type AnthropicSummarizer struct { + client *anthropic.Client + model string +} + +// NewAnthropicSummarizer creates a new AnthropicSummarizer with the given parameters +func NewAnthropicSummarizer(model string) *AnthropicSummarizer { + // Create the Anthropic client with the API key from environment + client := anthropic.NewClient( + option.WithAPIKey(os.Getenv("ANTHROPIC_API_KEY")), + ) + + return &AnthropicSummarizer{ + client: client, + model: model, + } +} + +// Summarize sends the prompt to the Anthropic API for summarization +func (a *AnthropicSummarizer) Summarize(fullPrompt string) (string, error) { + // Check if required parameters are set + if a.model == "" { + return "", fmt.Errorf("Anthropic model not set") + } + + // Create the request + ctx := context.Background() + + message, err := a.client.Messages.New(ctx, anthropic.MessageNewParams{ + Model: a.model, + MaxTokens: 1024, + Messages: []anthropic.MessageParam{ + anthropic.NewUserMessage(anthropic.NewTextBlock(fullPrompt)), + }, + }) + if err != nil { + return "", err + } + + // Extract the response text + var result string + for _, content := range message.Content { + if textBlock, ok := content.AsAny().(*anthropic.TextBlock); ok { + result += textBlock.Text + } + } + + return result, nil +} + // buildPrompt constructs the prompt string from PRs, issues, and tasks func buildPrompt(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string) string { // Build a prompt string @@ -36,7 +183,7 @@ func buildPrompt(employeename string, prs map[string][]contributions.PullRequest fullPrompt += fmt.Sprintf("Description: %s\n", issue.Description) fullPrompt += "--------\n" } - + // Save prompt to file for debugging promptf, err := os.Create(fmt.Sprintf("prompt-%s-%d.json", employeename, time.Now().Unix())) if err != nil { @@ -45,66 +192,20 @@ func buildPrompt(employeename string, prs map[string][]contributions.PullRequest } promptf.WriteString(fullPrompt) defer promptf.Close() - + return fullPrompt } -// callSummarizationEndpoint sends the prompt to an OpenAI-compatible endpoint for summarization -func callSummarizationEndpoint(fullPrompt string, openaiEndpoint string, openaiToken string, openaiModel string) (string, error) { - // Create a JSON payload for the OpenAI API - payload := struct { - Model string `json:"model"` - Messages []struct { - Role string `json:"role"` - Content string `json:"content"` - } `json:"messages"` - }{ - Model: openaiModel, - Messages: []struct { - Role string `json:"role"` - Content string `json:"content"` - }{{Role: "system", Content: fullPrompt}}, - } - - jsonPayload, err := json.Marshal(payload) - fmt.Println(string(jsonPayload)) - if err != nil { - return "", err - } - - // Create a POST request to the OpenAI endpoint with JSON body - req, err := http.NewRequest("POST", openaiEndpoint, bytes.NewBuffer(jsonPayload)) - if err != nil { - return "", err - } - req.Header.Set("Content-Type", "application/json") - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", openaiToken)) - - client := &http.Client{} - resp, err := client.Do(req) - if err != nil { - return "", err - } - defer resp.Body.Close() - - body, err := io.ReadAll(resp.Body) - if err != nil { - return "", err - } - - return string(body), nil -} - // SummarizeData builds the prompt and calls the summarization endpoint -func SummarizeData(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string, openaiEndpoint string, openaiToken string, openaiModel string) (string, error) { +func SummarizeData(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string, summarizer Summarizer) (string, error) { // Build the prompt fullPrompt := buildPrompt(employeename, prs, issues, tasks, prompt) - - // Call the summarization endpoint - result, err := callSummarizationEndpoint(fullPrompt, openaiEndpoint, openaiToken, openaiModel) + + // Always call the summarizer's Summarize method + result, err := summarizer.Summarize(fullPrompt) if err != nil { return "", err } - + return result, nil }