initial commit
yessss feat: add Vikunja issues provider and integrate tasks retrieval Co-authored-by: aider (openai/qwen2.5-coder:32b-instruct-q4_0) <aider@aider.chat> feat: add Vikunja handler to main file Co-authored-by: aider (openai/qwen2.5-coder:32b-instruct-q4_0) <aider@aider.chat> refactor: Move Jira user from positional arg to flag parameter Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> fix: replace manual argument parsing with Go flags package Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> feat: replace positional arguments with flags in main function Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> fix: enforce mandatory start/end dates and at least one identifier flag Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> fix: only call relevant tools based on flag presence Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> feat: add vikunja task support to summarize function feat: add flag to pass custom prompt with default prompt as fallback Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> Minor fix fix: comment out vikunja tasks integration for now fix: check for OpenAI environment variables before calling Summarize Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> feat: add timestamped prompt file generation for summarization refactor: remove duplicate os.Getenv calls for OpenAI env vars in SummarizeData Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> refactor: pass OpenAI env vars from main to SummarizeData instead of calling os.Getenv inside the function Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> feat: add file prompt support with fallback to literal prompt string Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> fix: remove single quotes around JIRA user assignee and add debug printing feat: implement accountID lookup for Jira user search in GetIssues function Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> fix: remove debug print statement in Jira user search function fix: Replace user parameter with JIRA_USER environment variable in SetBasicAuth calls Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> fix: add missing os import in summarize.go refactor: split SummarizeData into buildPrompt and callSummarizationEndpoint functions Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> fix: always call SummarizeData to ensure prompt file is created for debugging, but only call OpenAI endpoint if env vars are set Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> refactor: extract callSummarizationEndpoint into Summarizer interface for multiple implementations Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> refactor: move OpenAI variable checks into OpenAISummarizer's Summarize method and always call the summarizer's method Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> feat: implement Ollama Summarizer using official SDK as per article example Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> refactor: simplify OpenAI and Ollama summarizer implementations refactor: simplify Summarizer interface by moving endpoint, token, and model to struct properties Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> feat: add Ollama summarizer support to main command fix: remove extraneous parameters from SummarizeData call and use properly initialized OllamaSummarizer Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> feat: add AnthropicSummarizer implementation using anthropic-sdk-go package Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> fix: change summarizer call condition to check if summarizer is nil instead of environment variables Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> fix: correct client reference and update Anthropic summarizer implementation feat: extract PR rendering to String() method using text/template Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat> Some cleanup
This commit is contained in:
commit
a2c8207998
9 changed files with 726 additions and 0 deletions
172
cmd/acb/main.go
Normal file
172
cmd/acb/main.go
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"o5r.ca/autocrossbow/contributions"
|
||||
"o5r.ca/autocrossbow/issues"
|
||||
"o5r.ca/autocrossbow/issues/vikunja"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Define flags
|
||||
var jiraUser = flag.String("jira-user", "", "Jira user to query")
|
||||
var proj = flag.String("proj", "", "Project name")
|
||||
var ghusername = flag.String("ghusername", "", "GitHub username")
|
||||
var start = flag.String("start", "", "Start date")
|
||||
var end = flag.String("end", "", "End date")
|
||||
var employeename = flag.String("employeename", "", "Employee name")
|
||||
var prompt = flag.String("prompt", defaultPrompt, "Custom prompt to use for summarization")
|
||||
|
||||
flag.Parse()
|
||||
|
||||
// Check required flags
|
||||
if *start == "" || *end == "" {
|
||||
fmt.Println("Error: start and end dates are required")
|
||||
flag.Usage()
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Check that at least one of employee name, jira user, project, or ghusername is provided
|
||||
if *employeename == "" && *jiraUser == "" && *proj == "" && *ghusername == "" {
|
||||
fmt.Println("Error: at least one of employee name, jira user, project, or ghusername must be provided")
|
||||
flag.Usage()
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Check if the prompt is a file path and read it if it exists
|
||||
finalPrompt := *prompt
|
||||
if _, err := os.Stat(*prompt); err == nil {
|
||||
// File exists, read it
|
||||
content, err := os.ReadFile(*prompt)
|
||||
if err != nil {
|
||||
fmt.Printf("Error reading prompt file: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
finalPrompt = string(content)
|
||||
}
|
||||
|
||||
var prs map[string][]contributions.PullRequest
|
||||
var issues []issues.Issue
|
||||
var vikunjaTasks []vikunja.Task
|
||||
|
||||
// Only call tools if relevant flags are set
|
||||
if *proj != "" || *ghusername != "" {
|
||||
prs = DoPrs(*proj, *ghusername, *start, *end)
|
||||
}
|
||||
|
||||
if *jiraUser != "" {
|
||||
issues = DoJira(*start, *end, *jiraUser)
|
||||
}
|
||||
|
||||
// if *employeename != "" {
|
||||
// vikunjaTasks = DoVikunja(*start, *end)
|
||||
// }
|
||||
|
||||
// Get environment variables
|
||||
openaiEndpoint := os.Getenv("OPENAI_ENDPOINT")
|
||||
openaiToken := os.Getenv("OPENAI_TOKEN")
|
||||
openaiModel := os.Getenv("OPENAI_MODEL")
|
||||
anthropicModel := os.Getenv("ANTHROPIC_MODEL")
|
||||
|
||||
// Create appropriate summarizer based on available environment variables
|
||||
var summarizer Summarizer
|
||||
if openaiEndpoint != "" && openaiToken != "" {
|
||||
// Use OpenAI summarizer
|
||||
summarizer = NewOpenAISummarizer(openaiEndpoint, openaiToken, openaiModel)
|
||||
} else if anthropicModel != "" {
|
||||
// Use Anthropic summarizer
|
||||
summarizer = NewAnthropicSummarizer(anthropicModel)
|
||||
} else {
|
||||
// Use Ollama summarizer as fallback
|
||||
summarizer = NewOllamaSummarizer("", "", openaiModel)
|
||||
}
|
||||
|
||||
// Always call SummarizeData to ensure prompt file is created for debugging
|
||||
summ, err := SummarizeData(*employeename, prs, issues, vikunjaTasks, finalPrompt, summarizer)
|
||||
if err != nil {
|
||||
fmt.Println(fmt.Errorf("error getting PRs: %w", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Only call summarization endpoint if we have a valid summarizer
|
||||
if summarizer != nil {
|
||||
fmt.Println(summ)
|
||||
} else {
|
||||
fmt.Println("No summarization endpoint configured, but prompt file was created for debugging")
|
||||
}
|
||||
}
|
||||
|
||||
func DoPrs(proj, ghusername, start, end string) map[string][]contributions.PullRequest {
|
||||
prs, err := contributions.GetPRs(proj, ghusername, start, end)
|
||||
if err != nil {
|
||||
fmt.Println(fmt.Errorf("error getting PRs: %w", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
ghf, err := os.Create(fmt.Sprintf("gh-%s-%s-%s-%s-%d.json", proj, ghusername, start, end, time.Now().Unix()))
|
||||
if err != nil {
|
||||
fmt.Println(fmt.Errorf("error creating PR file: %w", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
enc := json.NewEncoder(ghf)
|
||||
err = enc.Encode(prs)
|
||||
if err != nil {
|
||||
fmt.Println(fmt.Errorf("error writing out PRs: %w", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
return prs
|
||||
}
|
||||
|
||||
func DoJira(start, end string, user string) []issues.Issue {
|
||||
host := os.Getenv("JIRA_HOST")
|
||||
tasks, err := issues.GetIssues(host, user, start, end)
|
||||
if err != nil {
|
||||
fmt.Println(fmt.Errorf("error getting Vikunja tasks: %w", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
vf, err := os.Create(fmt.Sprintf("jira-%s-%s-%s-%s-%d.json", host, user, start, end, time.Now().Unix()))
|
||||
if err != nil {
|
||||
fmt.Println(fmt.Errorf("error creating Jira file: %w", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
defer vf.Close()
|
||||
|
||||
enc := json.NewEncoder(vf)
|
||||
err = enc.Encode(tasks)
|
||||
if err != nil {
|
||||
fmt.Println(fmt.Errorf("error writing out Jira tasks: %w", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
return tasks
|
||||
}
|
||||
|
||||
func DoVikunja(start, end string) []vikunja.Task {
|
||||
host := os.Getenv("VIKUNJA_HOST")
|
||||
user := os.Getenv("VIKUNJA_USER")
|
||||
tasks, err := vikunja.GetTasks(host, user, start, end)
|
||||
if err != nil {
|
||||
fmt.Println(fmt.Errorf("error getting Vikunja tasks: %w", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
vf, err := os.Create(fmt.Sprintf("vikunja-%s-%s-%s-%s-%d.json", host, user, start, end, time.Now().Unix()))
|
||||
if err != nil {
|
||||
fmt.Println(fmt.Errorf("error creating Vikunja file: %w", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
defer vf.Close()
|
||||
|
||||
enc := json.NewEncoder(vf)
|
||||
err = enc.Encode(tasks)
|
||||
if err != nil {
|
||||
fmt.Println(fmt.Errorf("error writing out Vikunja tasks: %w", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
return tasks
|
||||
}
|
||||
197
cmd/acb/summarize.go
Normal file
197
cmd/acb/summarize.go
Normal file
|
|
@ -0,0 +1,197 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"o5r.ca/autocrossbow/contributions"
|
||||
"o5r.ca/autocrossbow/issues"
|
||||
"o5r.ca/autocrossbow/issues/vikunja"
|
||||
|
||||
"github.com/anthropics/anthropic-sdk-go"
|
||||
"github.com/anthropics/anthropic-sdk-go/option"
|
||||
"github.com/ollama/ollama/api"
|
||||
)
|
||||
|
||||
const defaultPrompt = `I will provide you, for a given period, with an employee name and a list of Pull Request titles and summaries split by repository, and a list of Jira Issues an employee has worked on. I may also provide, optionally, the employee's self-assessment. If I do, integrate that.
|
||||
|
||||
I'd like you to summarize the employee's accomplishments for the quarter
|
||||
I'd like the summary for the accomplishments to be in prose form, in a few paragraphs separated based on areas of work. Keep answers to 500 words for the summary.`
|
||||
|
||||
// Summarizer interface defines the contract for summarization implementations
|
||||
type Summarizer interface {
|
||||
Summarize(prompt string) (string, error)
|
||||
}
|
||||
|
||||
// OpenAISummarizer implements the Summarizer interface for OpenAI-compatible endpoints
|
||||
type OpenAISummarizer struct {
|
||||
endpoint string
|
||||
token string
|
||||
model string
|
||||
}
|
||||
|
||||
// NewOpenAISummarizer creates a new OpenAISummarizer with the given parameters
|
||||
func NewOpenAISummarizer(endpoint, token, model string) *OpenAISummarizer {
|
||||
return &OpenAISummarizer{
|
||||
endpoint: endpoint,
|
||||
token: token,
|
||||
model: model,
|
||||
}
|
||||
}
|
||||
|
||||
// Summarize sends the prompt to an OpenAI-compatible endpoint for summarization
|
||||
func (o *OpenAISummarizer) Summarize(fullPrompt string) (string, error) {
|
||||
// Check if required environment variables are set
|
||||
if o.endpoint == "" || o.token == "" {
|
||||
return "", fmt.Errorf("OpenAI endpoint or token not set")
|
||||
}
|
||||
|
||||
// Create the request
|
||||
ctx := context.Background()
|
||||
client, _ := api.ClientFromEnvironment()
|
||||
|
||||
req := api.GenerateRequest{
|
||||
Model: o.model,
|
||||
Prompt: fullPrompt,
|
||||
Stream: nil,
|
||||
}
|
||||
|
||||
var result string
|
||||
err := client.Generate(ctx, &req, func(resp api.GenerateResponse) error {
|
||||
result += resp.Response
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// OllamaSummarizer implements the Summarizer interface for Ollama endpoints
|
||||
type OllamaSummarizer struct {
|
||||
endpoint string
|
||||
token string
|
||||
model string
|
||||
}
|
||||
|
||||
// NewOllamaSummarizer creates a new OllamaSummarizer with the given parameters
|
||||
func NewOllamaSummarizer(endpoint, token, model string) *OllamaSummarizer {
|
||||
return &OllamaSummarizer{
|
||||
endpoint: endpoint,
|
||||
token: token,
|
||||
model: model,
|
||||
}
|
||||
}
|
||||
|
||||
// Summarize sends the prompt to an Ollama endpoint for summarization
|
||||
func (o *OllamaSummarizer) Summarize(fullPrompt string) (string, error) {
|
||||
// Check if required parameters are set
|
||||
if o.model == "" {
|
||||
return "", fmt.Errorf("Ollama model not set")
|
||||
}
|
||||
|
||||
// Create the request
|
||||
ctx := context.Background()
|
||||
client, _ := api.ClientFromEnvironment()
|
||||
|
||||
req := &api.GenerateRequest{
|
||||
Model: o.model,
|
||||
Prompt: fullPrompt,
|
||||
Stream: nil,
|
||||
}
|
||||
|
||||
var result string
|
||||
err := client.Generate(ctx, req, func(resp api.GenerateResponse) error {
|
||||
result += resp.Response
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// AnthropicSummarizer implements the Summarizer interface for Anthropic API
|
||||
type AnthropicSummarizer struct {
|
||||
client *anthropic.Client
|
||||
model string
|
||||
}
|
||||
|
||||
// NewAnthropicSummarizer creates a new AnthropicSummarizer with the given parameters
|
||||
func NewAnthropicSummarizer(model string) *AnthropicSummarizer {
|
||||
// Create the Anthropic client with the API key from environment
|
||||
client := anthropic.NewClient(
|
||||
option.WithAPIKey(os.Getenv("ANTHROPIC_API_KEY")),
|
||||
)
|
||||
|
||||
return &AnthropicSummarizer{
|
||||
client: &client,
|
||||
model: model,
|
||||
}
|
||||
}
|
||||
|
||||
// Summarize sends the prompt to the Anthropic API for summarization
|
||||
func (a *AnthropicSummarizer) Summarize(fullPrompt string) (string, error) {
|
||||
// Check if required parameters are set
|
||||
if a.model == "" {
|
||||
return "", fmt.Errorf("Anthropic model not set")
|
||||
}
|
||||
|
||||
// Create the request
|
||||
ctx := context.Background()
|
||||
|
||||
message, err := a.client.Messages.New(ctx, anthropic.MessageNewParams{
|
||||
Model: anthropic.Model(a.model),
|
||||
MaxTokens: 10000,
|
||||
Messages: []anthropic.MessageParam{
|
||||
anthropic.NewUserMessage(anthropic.NewTextBlock(fullPrompt)),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return "Blew up here", err
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%+v\n", message.Content), nil
|
||||
}
|
||||
|
||||
// buildPrompt constructs the prompt string from PRs, issues, and tasks
|
||||
func buildPrompt(employeename string, prs contributions.PRMap, issues []issues.Issue, tasks []vikunja.Task, prompt string) string {
|
||||
// Build a prompt string
|
||||
fullPrompt := prompt + fmt.Sprintf("\n\nHere's the PRs and Tickets for the employee %s:\n\n", employeename)
|
||||
fullPrompt += prs.String()
|
||||
fullPrompt += "Issues:\n"
|
||||
for _, issue := range issues {
|
||||
fullPrompt += fmt.Sprintf("Summary: %s\n", issue.Summary)
|
||||
fullPrompt += fmt.Sprintf("Description: %s\n", issue.Description)
|
||||
fullPrompt += "--------\n"
|
||||
}
|
||||
|
||||
// Save prompt to file for debugging
|
||||
promptf, err := os.Create(fmt.Sprintf("prompt-%s-%d.json", employeename, time.Now().Unix()))
|
||||
if err != nil {
|
||||
fmt.Println(fmt.Errorf("error creating PR file: %w", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
promptf.WriteString(fullPrompt)
|
||||
defer promptf.Close()
|
||||
|
||||
return fullPrompt
|
||||
}
|
||||
|
||||
// SummarizeData builds the prompt and calls the summarization endpoint
|
||||
func SummarizeData(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string, summarizer Summarizer) (string, error) {
|
||||
// Build the prompt
|
||||
fullPrompt := buildPrompt(employeename, prs, issues, tasks, prompt)
|
||||
|
||||
// Always call the summarizer's Summarize method
|
||||
result, err := summarizer.Summarize(fullPrompt)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue