autocrossbow/cmd/acb/summarize.go
Olivier Tremblay 254c29b1d7 feat: add flag to pass custom prompt with default prompt as fallback
Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat>
2025-11-10 10:09:42 -05:00

92 lines
2.9 KiB
Go

package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"o5r.ca/autocrossbow/contributions"
"o5r.ca/autocrossbow/issues"
"o5r.ca/autocrossbow/issues/vikunja"
)
const defaultPrompt = `I will provide you, for a given period, with an employee name and a list of Pull Request titles and summaries split by repository, and a list of Jira Issues an employee has worked on. I may also provide, optionally, the employee's self-assessment. If I do, integrate that.
I'd like you to summarize the employee's accomplishments for the quarter
I'd like the summary for the accomplishments to be in prose form, in a few paragraphs separated based on areas of work. Keep answers to 500 words for the summary.`
// SummarizeData takes GitHub PRs and Jira issues data and sends it to an OpenAI-compatible endpoint for summarization.
func SummarizeData(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string) (string, error) {
// Build a prompt string
fullPrompt := prompt + fmt.Sprintf("\n\nHere's the PRs and Tickets for the employee %s:\n\n", employeename)
for repo, prList := range prs {
fullPrompt += fmt.Sprintf("Repository: %s\n", repo)
for _, pr := range prList {
fullPrompt += fmt.Sprintf("- Title: %s\n", pr.Title)
fullPrompt += fmt.Sprintf(" Body: %s\n", pr.Body)
}
}
fullPrompt += fmt.Sprintf("Issues:")
for _, issue := range issues {
fullPrompt += fmt.Sprintf("Summary: %s\n", issue.Summary)
fullPrompt += fmt.Sprintf("Description: %s\n", issue.Description)
fullPrompt += fmt.Sprintf("--------")
}
// Get OpenAI endpoint and token from environment variables
openaiEndpoint := os.Getenv("OPENAI_ENDPOINT")
openaiToken := os.Getenv("OPENAI_TOKEN")
openaiModel := os.Getenv("OPENAI_MODEL")
if openaiEndpoint == "" || openaiToken == "" {
return "", fmt.Errorf("OPENAI_ENDPOINT and OPENAI_TOKEN must be set in environment variables")
}
// Create a JSON payload for the OpenAI API
payload := struct {
Model string `json:"model"`
Messages []struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"messages"`
}{
Model: openaiModel,
Messages: []struct {
Role string `json:"role"`
Content string `json:"content"`
}{{Role: "system", Content: fullPrompt}},
}
jsonPayload, err := json.Marshal(payload)
fmt.Println(string(jsonPayload))
if err != nil {
return "", err
}
// Create a POST request to the OpenAI endpoint with JSON body
req, err := http.NewRequest("POST", openaiEndpoint, bytes.NewBuffer(jsonPayload))
if err != nil {
return "", err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", openaiToken))
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return "", err
}
//parts := strings.Split(body, "think")
return string(body), nil
}