refactor: simplify OpenAI and Ollama summarizer implementations
This commit is contained in:
parent
214cdcd2b2
commit
9e82b77276
1 changed files with 9 additions and 27 deletions
|
|
@ -33,37 +33,19 @@ func (o *OpenAISummarizer) Summarize(fullPrompt string, openaiEndpoint string, o
|
|||
return "", fmt.Errorf("OpenAI endpoint or token not set")
|
||||
}
|
||||
|
||||
// Create a JSON payload for the OpenAI API
|
||||
payload := struct {
|
||||
Model string `json:"model"`
|
||||
Messages []struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
} `json:"messages"`
|
||||
}{
|
||||
Model: openaiModel,
|
||||
Messages: []struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
}{{Role: "system", Content: fullPrompt}},
|
||||
}
|
||||
|
||||
// Create a POST request to the OpenAI endpoint with JSON body
|
||||
req, err := api.GenerateRequest{
|
||||
req := api.GenerateRequest{
|
||||
Model: openaiModel,
|
||||
Prompt: fullPrompt,
|
||||
Stream: nil,
|
||||
}
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Use the Ollama client to generate the response
|
||||
ctx := context.Background()
|
||||
client := api.ClientFromEnvironment()
|
||||
client, _ := api.ClientFromEnvironment()
|
||||
|
||||
var result string
|
||||
err = client.Generate(ctx, &req, func(resp api.GenerateResponse) error {
|
||||
err := client.Generate(ctx, &req, func(resp api.GenerateResponse) error {
|
||||
result += resp.Response
|
||||
return nil
|
||||
})
|
||||
|
|
@ -86,7 +68,7 @@ func (o *OllamaSummarizer) Summarize(fullPrompt string, ollamaEndpoint string, o
|
|||
|
||||
// Create the request
|
||||
ctx := context.Background()
|
||||
client := api.ClientFromEnvironment()
|
||||
client, _ := api.ClientFromEnvironment()
|
||||
|
||||
req := &api.GenerateRequest{
|
||||
Model: ollamaModel,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue