package main import ( "context" "fmt" "os" "time" "o5r.ca/autocrossbow/contributions" "o5r.ca/autocrossbow/issues" "o5r.ca/autocrossbow/issues/vikunja" "github.com/anthropics/anthropic-sdk-go" "github.com/anthropics/anthropic-sdk-go/option" "github.com/ollama/ollama/api" ) const defaultPrompt = `I will provide you, for a given period, with an employee name and a list of Pull Request titles and summaries split by repository, and a list of Jira Issues an employee has worked on. I may also provide, optionally, the employee's self-assessment. If I do, integrate that. I'd like you to summarize the employee's accomplishments for the quarter I'd like the summary for the accomplishments to be in prose form, in a few paragraphs separated based on areas of work. Keep answers to 500 words for the summary.` // Summarizer interface defines the contract for summarization implementations type Summarizer interface { Summarize(prompt string) (string, error) } // OpenAISummarizer implements the Summarizer interface for OpenAI-compatible endpoints type OpenAISummarizer struct { endpoint string token string model string } // NewOpenAISummarizer creates a new OpenAISummarizer with the given parameters func NewOpenAISummarizer(endpoint, token, model string) *OpenAISummarizer { return &OpenAISummarizer{ endpoint: endpoint, token: token, model: model, } } // Summarize sends the prompt to an OpenAI-compatible endpoint for summarization func (o *OpenAISummarizer) Summarize(fullPrompt string) (string, error) { // Check if required environment variables are set if o.endpoint == "" || o.token == "" { return "", fmt.Errorf("OpenAI endpoint or token not set") } // Create the request ctx := context.Background() client, _ := api.ClientFromEnvironment() req := api.GenerateRequest{ Model: o.model, Prompt: fullPrompt, Stream: nil, } var result string err := client.Generate(ctx, &req, func(resp api.GenerateResponse) error { result += resp.Response return nil }) if err != nil { return "", err } return result, nil } // OllamaSummarizer implements the Summarizer interface for Ollama endpoints type OllamaSummarizer struct { endpoint string token string model string } // NewOllamaSummarizer creates a new OllamaSummarizer with the given parameters func NewOllamaSummarizer(endpoint, token, model string) *OllamaSummarizer { return &OllamaSummarizer{ endpoint: endpoint, token: token, model: model, } } // Summarize sends the prompt to an Ollama endpoint for summarization func (o *OllamaSummarizer) Summarize(fullPrompt string) (string, error) { // Check if required parameters are set if o.model == "" { return "", fmt.Errorf("Ollama model not set") } // Create the request ctx := context.Background() client, _ := api.ClientFromEnvironment() req := &api.GenerateRequest{ Model: o.model, Prompt: fullPrompt, Stream: nil, } var result string err := client.Generate(ctx, req, func(resp api.GenerateResponse) error { result += resp.Response return nil }) if err != nil { return "", err } return result, nil } // AnthropicSummarizer implements the Summarizer interface for Anthropic API type AnthropicSummarizer struct { client *anthropic.Client model string } // NewAnthropicSummarizer creates a new AnthropicSummarizer with the given parameters func NewAnthropicSummarizer(model string) *AnthropicSummarizer { // Create the Anthropic client with the API key from environment client := anthropic.NewClient( option.WithAPIKey(os.Getenv("ANTHROPIC_API_KEY")), ) return &AnthropicSummarizer{ client: &client, model: model, } } // Summarize sends the prompt to the Anthropic API for summarization func (a *AnthropicSummarizer) Summarize(fullPrompt string) (string, error) { // Check if required parameters are set if a.model == "" { return "", fmt.Errorf("Anthropic model not set") } // Create the request ctx := context.Background() message, err := a.client.Messages.New(ctx, anthropic.MessageNewParams{ Model: anthropic.Model(a.model), MaxTokens: 10000, Messages: []anthropic.MessageParam{ anthropic.NewUserMessage(anthropic.NewTextBlock(fullPrompt)), }, }) if err != nil { return "Blew up here", err } return fmt.Sprintf("%+v\n", message.Content), nil } // buildPrompt constructs the prompt string from PRs, issues, and tasks func buildPrompt(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string) string { // Build a prompt string fullPrompt := prompt + fmt.Sprintf("\n\nHere's the PRs and Tickets for the employee %s:\n\n", employeename) for repo, prList := range prs { fullPrompt += fmt.Sprintf("Repository: %s\n", repo) for _, pr := range prList { fullPrompt += fmt.Sprintf("- Title: %s\n", pr.Title) fullPrompt += fmt.Sprintf(" Body: %s\n", pr.Body) } } fullPrompt += "Issues:\n" for _, issue := range issues { fullPrompt += fmt.Sprintf("Summary: %s\n", issue.Summary) fullPrompt += fmt.Sprintf("Description: %s\n", issue.Description) fullPrompt += "--------\n" } // Save prompt to file for debugging promptf, err := os.Create(fmt.Sprintf("prompt-%s-%d.json", employeename, time.Now().Unix())) if err != nil { fmt.Println(fmt.Errorf("error creating PR file: %w", err)) os.Exit(1) } promptf.WriteString(fullPrompt) defer promptf.Close() return fullPrompt } // SummarizeData builds the prompt and calls the summarization endpoint func SummarizeData(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string, summarizer Summarizer) (string, error) { // Build the prompt fullPrompt := buildPrompt(employeename, prs, issues, tasks, prompt) // Always call the summarizer's Summarize method result, err := summarizer.Summarize(fullPrompt) if err != nil { return "", err } return result, nil }