in gollm/openai.go [118:147]
func (c *OpenAIClient) GenerateCompletion(ctx context.Context, req *CompletionRequest) (CompletionResponse, error) {
klog.Infof("OpenAI GenerateCompletion called with model: %s", req.Model)
klog.V(1).Infof("Prompt:\n%s", req.Prompt)
// Use the Chat Completions API as shown in examples
chatReq := openai.ChatCompletionNewParams{
Model: openai.ChatModel(req.Model), // Use the model specified in the request
Messages: []openai.ChatCompletionMessageParamUnion{
// Assuming a simple user message structure for now
openai.UserMessage(req.Prompt),
},
}
completion, err := c.client.Chat.Completions.New(ctx, chatReq)
if err != nil {
return nil, fmt.Errorf("failed to generate OpenAI completion: %w", err)
}
// Check if there are choices and a message
if len(completion.Choices) == 0 || completion.Choices[0].Message.Content == "" {
return nil, errors.New("received an empty response from OpenAI")
}
// Return the content of the first choice
resp := &simpleCompletionResponse{
content: completion.Choices[0].Message.Content,
}
return resp, nil
}