refactor: extract callSummarizationEndpoint into Summarizer interface for multiple implementations
Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat>
This commit is contained in:
parent
979aa66b17
commit
feb06e51ff
1 changed files with 40 additions and 32 deletions
|
|
@ -19,38 +19,16 @@ const defaultPrompt = `I will provide you, for a given period, with an employee
|
||||||
I'd like you to summarize the employee's accomplishments for the quarter
|
I'd like you to summarize the employee's accomplishments for the quarter
|
||||||
I'd like the summary for the accomplishments to be in prose form, in a few paragraphs separated based on areas of work. Keep answers to 500 words for the summary.`
|
I'd like the summary for the accomplishments to be in prose form, in a few paragraphs separated based on areas of work. Keep answers to 500 words for the summary.`
|
||||||
|
|
||||||
// buildPrompt constructs the prompt string from PRs, issues, and tasks
|
// Summarizer interface defines the contract for summarization implementations
|
||||||
func buildPrompt(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string) string {
|
type Summarizer interface {
|
||||||
// Build a prompt string
|
Summarize(prompt string, endpoint string, token string, model string) (string, error)
|
||||||
fullPrompt := prompt + fmt.Sprintf("\n\nHere's the PRs and Tickets for the employee %s:\n\n", employeename)
|
|
||||||
for repo, prList := range prs {
|
|
||||||
fullPrompt += fmt.Sprintf("Repository: %s\n", repo)
|
|
||||||
for _, pr := range prList {
|
|
||||||
fullPrompt += fmt.Sprintf("- Title: %s\n", pr.Title)
|
|
||||||
fullPrompt += fmt.Sprintf(" Body: %s\n", pr.Body)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fullPrompt += "Issues:\n"
|
|
||||||
for _, issue := range issues {
|
|
||||||
fullPrompt += fmt.Sprintf("Summary: %s\n", issue.Summary)
|
|
||||||
fullPrompt += fmt.Sprintf("Description: %s\n", issue.Description)
|
|
||||||
fullPrompt += "--------\n"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save prompt to file for debugging
|
// OpenAISummarizer implements the Summarizer interface for OpenAI-compatible endpoints
|
||||||
promptf, err := os.Create(fmt.Sprintf("prompt-%s-%d.json", employeename, time.Now().Unix()))
|
type OpenAISummarizer struct{}
|
||||||
if err != nil {
|
|
||||||
fmt.Println(fmt.Errorf("error creating PR file: %w", err))
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
promptf.WriteString(fullPrompt)
|
|
||||||
defer promptf.Close()
|
|
||||||
|
|
||||||
return fullPrompt
|
// Summarize sends the prompt to an OpenAI-compatible endpoint for summarization
|
||||||
}
|
func (o *OpenAISummarizer) Summarize(fullPrompt string, openaiEndpoint string, openaiToken string, openaiModel string) (string, error) {
|
||||||
|
|
||||||
// callSummarizationEndpoint sends the prompt to an OpenAI-compatible endpoint for summarization
|
|
||||||
func callSummarizationEndpoint(fullPrompt string, openaiEndpoint string, openaiToken string, openaiModel string) (string, error) {
|
|
||||||
// Create a JSON payload for the OpenAI API
|
// Create a JSON payload for the OpenAI API
|
||||||
payload := struct {
|
payload := struct {
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
|
|
@ -95,14 +73,44 @@ func callSummarizationEndpoint(fullPrompt string, openaiEndpoint string, openaiT
|
||||||
return string(body), nil
|
return string(body), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// buildPrompt constructs the prompt string from PRs, issues, and tasks
|
||||||
|
func buildPrompt(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string) string {
|
||||||
|
// Build a prompt string
|
||||||
|
fullPrompt := prompt + fmt.Sprintf("\n\nHere's the PRs and Tickets for the employee %s:\n\n", employeename)
|
||||||
|
for repo, prList := range prs {
|
||||||
|
fullPrompt += fmt.Sprintf("Repository: %s\n", repo)
|
||||||
|
for _, pr := range prList {
|
||||||
|
fullPrompt += fmt.Sprintf("- Title: %s\n", pr.Title)
|
||||||
|
fullPrompt += fmt.Sprintf(" Body: %s\n", pr.Body)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fullPrompt += "Issues:\n"
|
||||||
|
for _, issue := range issues {
|
||||||
|
fullPrompt += fmt.Sprintf("Summary: %s\n", issue.Summary)
|
||||||
|
fullPrompt += fmt.Sprintf("Description: %s\n", issue.Description)
|
||||||
|
fullPrompt += "--------\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save prompt to file for debugging
|
||||||
|
promptf, err := os.Create(fmt.Sprintf("prompt-%s-%d.json", employeename, time.Now().Unix()))
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println(fmt.Errorf("error creating PR file: %w", err))
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
promptf.WriteString(fullPrompt)
|
||||||
|
defer promptf.Close()
|
||||||
|
|
||||||
|
return fullPrompt
|
||||||
|
}
|
||||||
|
|
||||||
// SummarizeData builds the prompt and calls the summarization endpoint
|
// SummarizeData builds the prompt and calls the summarization endpoint
|
||||||
func SummarizeData(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string, openaiEndpoint string, openaiToken string, openaiModel string) (string, error) {
|
func SummarizeData(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string, openaiEndpoint string, openaiToken string, openaiModel string, summarizer Summarizer) (string, error) {
|
||||||
// Build the prompt
|
// Build the prompt
|
||||||
fullPrompt := buildPrompt(employeename, prs, issues, tasks, prompt)
|
fullPrompt := buildPrompt(employeename, prs, issues, tasks, prompt)
|
||||||
|
|
||||||
// Call the summarization endpoint only if OpenAI env vars are set
|
// Call the summarization endpoint only if OpenAI env vars are set
|
||||||
if openaiEndpoint != "" && openaiToken != "" {
|
if openaiEndpoint != "" && openaiToken != "" {
|
||||||
result, err := callSummarizationEndpoint(fullPrompt, openaiEndpoint, openaiToken, openaiModel)
|
result, err := summarizer.Summarize(fullPrompt, openaiEndpoint, openaiToken, openaiModel)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue