From 214cdcd2b2fb0140295d12af65c5cb077faad11f Mon Sep 17 00:00:00 2001 From: Olivier Tremblay Date: Sun, 16 Nov 2025 16:21:09 -0500 Subject: [PATCH] feat: implement Ollama Summarizer using official SDK as per article example Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) --- cmd/acb/summarize.go | 78 ++++++++++++++++++++++++++++++-------------- 1 file changed, 53 insertions(+), 25 deletions(-) diff --git a/cmd/acb/summarize.go b/cmd/acb/summarize.go index 915e87d..ad41c9f 100644 --- a/cmd/acb/summarize.go +++ b/cmd/acb/summarize.go @@ -1,17 +1,16 @@ package main import ( - "bytes" - "encoding/json" + "context" "fmt" - "io" - "net/http" "os" "time" "o5r.ca/autocrossbow/contributions" "o5r.ca/autocrossbow/issues" "o5r.ca/autocrossbow/issues/vikunja" + + "github.com/ollama/ollama/api" ) const defaultPrompt = `I will provide you, for a given period, with an employee name and a list of Pull Request titles and summaries split by repository, and a list of Jira Issues an employee has worked on. I may also provide, optionally, the employee's self-assessment. If I do, integrate that. @@ -49,33 +48,62 @@ func (o *OpenAISummarizer) Summarize(fullPrompt string, openaiEndpoint string, o }{{Role: "system", Content: fullPrompt}}, } - jsonPayload, err := json.Marshal(payload) - fmt.Println(string(jsonPayload)) - if err != nil { - return "", err - } - // Create a POST request to the OpenAI endpoint with JSON body - req, err := http.NewRequest("POST", openaiEndpoint, bytes.NewBuffer(jsonPayload)) - if err != nil { - return "", err + req, err := api.GenerateRequest{ + Model: openaiModel, + Prompt: fullPrompt, + Stream: nil, } - req.Header.Set("Content-Type", "application/json") - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", openaiToken)) - - client := &http.Client{} - resp, err := client.Do(req) - if err != nil { - return "", err - } - defer resp.Body.Close() - - body, err := io.ReadAll(resp.Body) if err != nil { return "", err } - return string(body), nil + // Use the Ollama client to generate the response + ctx := context.Background() + client := api.ClientFromEnvironment() + + var result string + err = client.Generate(ctx, &req, func(resp api.GenerateResponse) error { + result += resp.Response + return nil + }) + if err != nil { + return "", err + } + + return result, nil +} + +// OllamaSummarizer implements the Summarizer interface for Ollama endpoints +type OllamaSummarizer struct{} + +// Summarize sends the prompt to an Ollama endpoint for summarization +func (o *OllamaSummarizer) Summarize(fullPrompt string, ollamaEndpoint string, ollamaToken string, ollamaModel string) (string, error) { + // Check if required parameters are set + if ollamaModel == "" { + return "", fmt.Errorf("Ollama model not set") + } + + // Create the request + ctx := context.Background() + client := api.ClientFromEnvironment() + + req := &api.GenerateRequest{ + Model: ollamaModel, + Prompt: fullPrompt, + Stream: nil, + } + + var result string + err := client.Generate(ctx, req, func(resp api.GenerateResponse) error { + result += resp.Response + return nil + }) + if err != nil { + return "", err + } + + return result, nil } // buildPrompt constructs the prompt string from PRs, issues, and tasks