in conversational-assistant/app/api/turn_response/route.ts [14:88]
async start(controller) {
try {
console.log('Starting OpenAI stream', messages[messages.length - 1])
const openaiStream = openai.beta.chat.completions.stream({
model: MODEL,
messages,
temperature: 0,
tools: tools as ChatCompletionTool[],
parallel_tool_calls: false
})
let functionArguments = ''
let callId = ''
let functionName = ''
let isCollectingFunctionArgs = false
for await (const part of openaiStream) {
const delta = part.choices[0].delta
const finishReason = part.choices[0].finish_reason
if (delta.content) {
const data = JSON.stringify({
event: 'assistant_delta',
data: delta
})
controller.enqueue(`data: ${data}\n\n`)
}
if (delta.tool_calls) {
isCollectingFunctionArgs = true
if (delta.tool_calls[0].id) {
callId = delta.tool_calls[0].id
}
if (delta.tool_calls[0].function?.name) {
functionName = delta.tool_calls[0].function.name
console.log('Function execution:', functionName)
}
functionArguments += delta.tool_calls[0].function?.arguments || ''
const data = JSON.stringify({
event: 'function_arguments_delta',
data: {
callId: callId,
name: functionName,
arguments: delta.tool_calls[0].function?.arguments
}
})
controller.enqueue(`data: ${data}\n\n`)
}
if (finishReason === 'tool_calls' && isCollectingFunctionArgs) {
console.log(`tool call ${functionName} is complete`)
const data = JSON.stringify({
event: 'function_arguments_done',
data: {
callId: callId,
name: functionName,
arguments: functionArguments
}
})
controller.enqueue(`data: ${data}\n\n`)
// Reset function arguments
functionArguments = ''
functionName = ''
isCollectingFunctionArgs = false
}
}
controller.close()
} catch (error) {
console.error('Error in stream start:', error)
controller.error(error)
}
}