feat: implement Ollama Summarizer using official SDK as per article example
Co-authored-by: aider (openai/qwen3-coder:30b-a3b-q4_K_M) <aider@aider.chat>
This commit is contained in:
parent
d239689ef4
commit
214cdcd2b2
1 changed files with 53 additions and 25 deletions
|
|
@ -1,17 +1,16 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"o5r.ca/autocrossbow/contributions"
|
||||
"o5r.ca/autocrossbow/issues"
|
||||
"o5r.ca/autocrossbow/issues/vikunja"
|
||||
|
||||
"github.com/ollama/ollama/api"
|
||||
)
|
||||
|
||||
const defaultPrompt = `I will provide you, for a given period, with an employee name and a list of Pull Request titles and summaries split by repository, and a list of Jira Issues an employee has worked on. I may also provide, optionally, the employee's self-assessment. If I do, integrate that.
|
||||
|
|
@ -49,33 +48,62 @@ func (o *OpenAISummarizer) Summarize(fullPrompt string, openaiEndpoint string, o
|
|||
}{{Role: "system", Content: fullPrompt}},
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
fmt.Println(string(jsonPayload))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Create a POST request to the OpenAI endpoint with JSON body
|
||||
req, err := http.NewRequest("POST", openaiEndpoint, bytes.NewBuffer(jsonPayload))
|
||||
if err != nil {
|
||||
return "", err
|
||||
req, err := api.GenerateRequest{
|
||||
Model: openaiModel,
|
||||
Prompt: fullPrompt,
|
||||
Stream: nil,
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", openaiToken))
|
||||
|
||||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return string(body), nil
|
||||
// Use the Ollama client to generate the response
|
||||
ctx := context.Background()
|
||||
client := api.ClientFromEnvironment()
|
||||
|
||||
var result string
|
||||
err = client.Generate(ctx, &req, func(resp api.GenerateResponse) error {
|
||||
result += resp.Response
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// OllamaSummarizer implements the Summarizer interface for Ollama endpoints
|
||||
type OllamaSummarizer struct{}
|
||||
|
||||
// Summarize sends the prompt to an Ollama endpoint for summarization
|
||||
func (o *OllamaSummarizer) Summarize(fullPrompt string, ollamaEndpoint string, ollamaToken string, ollamaModel string) (string, error) {
|
||||
// Check if required parameters are set
|
||||
if ollamaModel == "" {
|
||||
return "", fmt.Errorf("Ollama model not set")
|
||||
}
|
||||
|
||||
// Create the request
|
||||
ctx := context.Background()
|
||||
client := api.ClientFromEnvironment()
|
||||
|
||||
req := &api.GenerateRequest{
|
||||
Model: ollamaModel,
|
||||
Prompt: fullPrompt,
|
||||
Stream: nil,
|
||||
}
|
||||
|
||||
var result string
|
||||
err := client.Generate(ctx, req, func(resp api.GenerateResponse) error {
|
||||
result += resp.Response
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// buildPrompt constructs the prompt string from PRs, issues, and tasks
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue