refactor: simplify OpenAI and Ollama summarizer implementations

This commit is contained in:
Olivier Tremblay 2025-11-16 16:40:17 -05:00 committed by Olivier Tremblay (aider)
parent 214cdcd2b2
commit 9e82b77276

View file

@ -33,37 +33,19 @@ func (o *OpenAISummarizer) Summarize(fullPrompt string, openaiEndpoint string, o
return "", fmt.Errorf("OpenAI endpoint or token not set") return "", fmt.Errorf("OpenAI endpoint or token not set")
} }
// Create a JSON payload for the OpenAI API
payload := struct {
Model string `json:"model"`
Messages []struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"messages"`
}{
Model: openaiModel,
Messages: []struct {
Role string `json:"role"`
Content string `json:"content"`
}{{Role: "system", Content: fullPrompt}},
}
// Create a POST request to the OpenAI endpoint with JSON body // Create a POST request to the OpenAI endpoint with JSON body
req, err := api.GenerateRequest{ req := api.GenerateRequest{
Model: openaiModel, Model: openaiModel,
Prompt: fullPrompt, Prompt: fullPrompt,
Stream: nil, Stream: nil,
} }
if err != nil {
return "", err
}
// Use the Ollama client to generate the response // Use the Ollama client to generate the response
ctx := context.Background() ctx := context.Background()
client := api.ClientFromEnvironment() client, _ := api.ClientFromEnvironment()
var result string var result string
err = client.Generate(ctx, &req, func(resp api.GenerateResponse) error { err := client.Generate(ctx, &req, func(resp api.GenerateResponse) error {
result += resp.Response result += resp.Response
return nil return nil
}) })
@ -86,7 +68,7 @@ func (o *OllamaSummarizer) Summarize(fullPrompt string, ollamaEndpoint string, o
// Create the request // Create the request
ctx := context.Background() ctx := context.Background()
client := api.ClientFromEnvironment() client, _ := api.ClientFromEnvironment()
req := &api.GenerateRequest{ req := &api.GenerateRequest{
Model: ollamaModel, Model: ollamaModel,