133 lines
4.4 KiB
Go
133 lines
4.4 KiB
Go
package main
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
"os"
|
|
"time"
|
|
|
|
"o5r.ca/autocrossbow/contributions"
|
|
"o5r.ca/autocrossbow/issues"
|
|
"o5r.ca/autocrossbow/issues/vikunja"
|
|
|
|
"github.com/ollama/ollama/api"
|
|
)
|
|
|
|
const defaultPrompt = `I will provide you, for a given period, with an employee name and a list of Pull Request titles and summaries split by repository, and a list of Jira Issues an employee has worked on. I may also provide, optionally, the employee's self-assessment. If I do, integrate that.
|
|
|
|
I'd like you to summarize the employee's accomplishments for the quarter
|
|
I'd like the summary for the accomplishments to be in prose form, in a few paragraphs separated based on areas of work. Keep answers to 500 words for the summary.`
|
|
|
|
// Summarizer interface defines the contract for summarization implementations
|
|
type Summarizer interface {
|
|
Summarize(prompt string, endpoint string, token string, model string) (string, error)
|
|
}
|
|
|
|
// OpenAISummarizer implements the Summarizer interface for OpenAI-compatible endpoints
|
|
type OpenAISummarizer struct{}
|
|
|
|
// Summarize sends the prompt to an OpenAI-compatible endpoint for summarization
|
|
func (o *OpenAISummarizer) Summarize(fullPrompt string, openaiEndpoint string, openaiToken string, openaiModel string) (string, error) {
|
|
// Check if required environment variables are set
|
|
if openaiEndpoint == "" || openaiToken == "" {
|
|
return "", fmt.Errorf("OpenAI endpoint or token not set")
|
|
}
|
|
|
|
// Create a POST request to the OpenAI endpoint with JSON body
|
|
req := api.GenerateRequest{
|
|
Model: openaiModel,
|
|
Prompt: fullPrompt,
|
|
Stream: nil,
|
|
}
|
|
|
|
// Use the Ollama client to generate the response
|
|
ctx := context.Background()
|
|
client, _ := api.ClientFromEnvironment()
|
|
|
|
var result string
|
|
err := client.Generate(ctx, &req, func(resp api.GenerateResponse) error {
|
|
result += resp.Response
|
|
return nil
|
|
})
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
return result, nil
|
|
}
|
|
|
|
// OllamaSummarizer implements the Summarizer interface for Ollama endpoints
|
|
type OllamaSummarizer struct{}
|
|
|
|
// Summarize sends the prompt to an Ollama endpoint for summarization
|
|
func (o *OllamaSummarizer) Summarize(fullPrompt string, ollamaEndpoint string, ollamaToken string, ollamaModel string) (string, error) {
|
|
// Check if required parameters are set
|
|
if ollamaModel == "" {
|
|
return "", fmt.Errorf("Ollama model not set")
|
|
}
|
|
|
|
// Create the request
|
|
ctx := context.Background()
|
|
client, _ := api.ClientFromEnvironment()
|
|
|
|
req := &api.GenerateRequest{
|
|
Model: ollamaModel,
|
|
Prompt: fullPrompt,
|
|
Stream: nil,
|
|
}
|
|
|
|
var result string
|
|
err := client.Generate(ctx, req, func(resp api.GenerateResponse) error {
|
|
result += resp.Response
|
|
return nil
|
|
})
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
return result, nil
|
|
}
|
|
|
|
// buildPrompt constructs the prompt string from PRs, issues, and tasks
|
|
func buildPrompt(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string) string {
|
|
// Build a prompt string
|
|
fullPrompt := prompt + fmt.Sprintf("\n\nHere's the PRs and Tickets for the employee %s:\n\n", employeename)
|
|
for repo, prList := range prs {
|
|
fullPrompt += fmt.Sprintf("Repository: %s\n", repo)
|
|
for _, pr := range prList {
|
|
fullPrompt += fmt.Sprintf("- Title: %s\n", pr.Title)
|
|
fullPrompt += fmt.Sprintf(" Body: %s\n", pr.Body)
|
|
}
|
|
}
|
|
fullPrompt += "Issues:\n"
|
|
for _, issue := range issues {
|
|
fullPrompt += fmt.Sprintf("Summary: %s\n", issue.Summary)
|
|
fullPrompt += fmt.Sprintf("Description: %s\n", issue.Description)
|
|
fullPrompt += "--------\n"
|
|
}
|
|
|
|
// Save prompt to file for debugging
|
|
promptf, err := os.Create(fmt.Sprintf("prompt-%s-%d.json", employeename, time.Now().Unix()))
|
|
if err != nil {
|
|
fmt.Println(fmt.Errorf("error creating PR file: %w", err))
|
|
os.Exit(1)
|
|
}
|
|
promptf.WriteString(fullPrompt)
|
|
defer promptf.Close()
|
|
|
|
return fullPrompt
|
|
}
|
|
|
|
// SummarizeData builds the prompt and calls the summarization endpoint
|
|
func SummarizeData(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string, openaiEndpoint string, openaiToken string, openaiModel string, summarizer Summarizer) (string, error) {
|
|
// Build the prompt
|
|
fullPrompt := buildPrompt(employeename, prs, issues, tasks, prompt)
|
|
|
|
// Always call the summarizer's Summarize method
|
|
result, err := summarizer.Summarize(fullPrompt, openaiEndpoint, openaiToken, openaiModel)
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
return result, nil
|
|
}
|