in ChatBot/SemanticKernel/SemanticKernelService.cs [44:67]
public async Task<string> GetResponseAsync(string userInput)
{
_chatHistory.AddUserMessage(userInput);
// The `AllowParallelCalls` option instructs the AI model to call multiple functions in parallel if the model supports parallel function calls.
FunctionChoiceBehaviorOptions options = new() { AllowParallelCalls = true };
var openAIPromptExecutionSettings = new OpenAIPromptExecutionSettings
{
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: options)
};
var result = await _chatCompletionService.GetChatMessageContentAsync(
_chatHistory,
executionSettings: openAIPromptExecutionSettings,
kernel: _kernel
);
var response = result.Content ?? "No response received from AI.";
_chatHistory.AddMessage(result.Role, response);
return response;
}