refactor: simplify Summarizer interface by moving endpoint, token, and model to struct properties

Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat>
This commit is contained in:
Olivier Tremblay 2025-11-16 16:41:11 -05:00
parent 9e82b77276
commit bffdff73a4

View file

@ -20,30 +20,42 @@ I'd like the summary for the accomplishments to be in prose form, in a few parag
// Summarizer interface defines the contract for summarization implementations
type Summarizer interface {
Summarize(prompt string, endpoint string, token string, model string) (string, error)
Summarize(prompt string) (string, error)
}
// OpenAISummarizer implements the Summarizer interface for OpenAI-compatible endpoints
type OpenAISummarizer struct{}
type OpenAISummarizer struct {
endpoint string
token string
model string
}
// NewOpenAISummarizer creates a new OpenAISummarizer with the given parameters
func NewOpenAISummarizer(endpoint, token, model string) *OpenAISummarizer {
return &OpenAISummarizer{
endpoint: endpoint,
token: token,
model: model,
}
}
// Summarize sends the prompt to an OpenAI-compatible endpoint for summarization
func (o *OpenAISummarizer) Summarize(fullPrompt string, openaiEndpoint string, openaiToken string, openaiModel string) (string, error) {
func (o *OpenAISummarizer) Summarize(fullPrompt string) (string, error) {
// Check if required environment variables are set
if openaiEndpoint == "" || openaiToken == "" {
if o.endpoint == "" || o.token == "" {
return "", fmt.Errorf("OpenAI endpoint or token not set")
}
// Create a POST request to the OpenAI endpoint with JSON body
// Create the request
ctx := context.Background()
client, _ := api.ClientFromEnvironment()
req := api.GenerateRequest{
Model: openaiModel,
Model: o.model,
Prompt: fullPrompt,
Stream: nil,
}
// Use the Ollama client to generate the response
ctx := context.Background()
client, _ := api.ClientFromEnvironment()
var result string
err := client.Generate(ctx, &req, func(resp api.GenerateResponse) error {
result += resp.Response
@ -57,12 +69,25 @@ func (o *OpenAISummarizer) Summarize(fullPrompt string, openaiEndpoint string, o
}
// OllamaSummarizer implements the Summarizer interface for Ollama endpoints
type OllamaSummarizer struct{}
type OllamaSummarizer struct {
endpoint string
token string
model string
}
// NewOllamaSummarizer creates a new OllamaSummarizer with the given parameters
func NewOllamaSummarizer(endpoint, token, model string) *OllamaSummarizer {
return &OllamaSummarizer{
endpoint: endpoint,
token: token,
model: model,
}
}
// Summarize sends the prompt to an Ollama endpoint for summarization
func (o *OllamaSummarizer) Summarize(fullPrompt string, ollamaEndpoint string, ollamaToken string, ollamaModel string) (string, error) {
func (o *OllamaSummarizer) Summarize(fullPrompt string) (string, error) {
// Check if required parameters are set
if ollamaModel == "" {
if o.model == "" {
return "", fmt.Errorf("Ollama model not set")
}
@ -71,7 +96,7 @@ func (o *OllamaSummarizer) Summarize(fullPrompt string, ollamaEndpoint string, o
client, _ := api.ClientFromEnvironment()
req := &api.GenerateRequest{
Model: ollamaModel,
Model: o.model,
Prompt: fullPrompt,
Stream: nil,
}
@ -119,12 +144,12 @@ func buildPrompt(employeename string, prs map[string][]contributions.PullRequest
}
// SummarizeData builds the prompt and calls the summarization endpoint
func SummarizeData(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string, openaiEndpoint string, openaiToken string, openaiModel string, summarizer Summarizer) (string, error) {
func SummarizeData(employeename string, prs map[string][]contributions.PullRequest, issues []issues.Issue, tasks []vikunja.Task, prompt string, summarizer Summarizer) (string, error) {
// Build the prompt
fullPrompt := buildPrompt(employeename, prs, issues, tasks, prompt)
// Always call the summarizer's Summarize method
result, err := summarizer.Summarize(fullPrompt, openaiEndpoint, openaiToken, openaiModel)
result, err := summarizer.Summarize(fullPrompt)
if err != nil {
return "", err
}