refactor: simplify OpenAI and Ollama summarizer implementations

This commit is contained in:
Olivier Tremblay 2025-11-16 16:40:17 -05:00 committed by Olivier Tremblay (aider)
parent 214cdcd2b2
commit 9e82b77276

View file

@ -33,37 +33,19 @@ func (o *OpenAISummarizer) Summarize(fullPrompt string, openaiEndpoint string, o
return "", fmt.Errorf("OpenAI endpoint or token not set")
}
// Create a JSON payload for the OpenAI API
payload := struct {
Model string `json:"model"`
Messages []struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"messages"`
}{
Model: openaiModel,
Messages: []struct {
Role string `json:"role"`
Content string `json:"content"`
}{{Role: "system", Content: fullPrompt}},
}
// Create a POST request to the OpenAI endpoint with JSON body
req, err := api.GenerateRequest{
req := api.GenerateRequest{
Model: openaiModel,
Prompt: fullPrompt,
Stream: nil,
}
if err != nil {
return "", err
}
// Use the Ollama client to generate the response
ctx := context.Background()
client := api.ClientFromEnvironment()
client, _ := api.ClientFromEnvironment()
var result string
err = client.Generate(ctx, &req, func(resp api.GenerateResponse) error {
err := client.Generate(ctx, &req, func(resp api.GenerateResponse) error {
result += resp.Response
return nil
})
@ -86,7 +68,7 @@ func (o *OllamaSummarizer) Summarize(fullPrompt string, ollamaEndpoint string, o
// Create the request
ctx := context.Background()
client := api.ClientFromEnvironment()
client, _ := api.ClientFromEnvironment()
req := &api.GenerateRequest{
Model: ollamaModel,
@ -123,7 +105,7 @@ func buildPrompt(employeename string, prs map[string][]contributions.PullRequest
fullPrompt += fmt.Sprintf("Description: %s\n", issue.Description)
fullPrompt += "--------\n"
}
// Save prompt to file for debugging
promptf, err := os.Create(fmt.Sprintf("prompt-%s-%d.json", employeename, time.Now().Unix()))
if err != nil {
@ -132,7 +114,7 @@ func buildPrompt(employeename string, prs map[string][]contributions.PullRequest
}
promptf.WriteString(fullPrompt)
defer promptf.Close()
return fullPrompt
}
@ -140,12 +122,12 @@ func buildPrompt(employeename string, prs map[string][]contributions.PullRequest
func SummarizeData(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string, openaiEndpoint string, openaiToken string, openaiModel string, summarizer Summarizer) (string, error) {
// Build the prompt
fullPrompt := buildPrompt(employeename, prs, issues, tasks, prompt)
// Always call the summarizer's Summarize method
result, err := summarizer.Summarize(fullPrompt, openaiEndpoint, openaiToken, openaiModel)
if err != nil {
return "", err
}
return result, nil
}