in src/lib/utils/business.svelte.ts [213:244]
export async function handleNonStreamingResponse(
conversation: ConversationClass | Conversation
): Promise<{ message: ChatCompletionOutputMessage; completion_tokens: number }> {
const metadata = await getCompletionMetadata(conversation);
if (metadata.type === "openai") {
const response = await metadata.client.chat.completions.create({
...metadata.args,
stream: false,
} as OpenAI.ChatCompletionCreateParamsNonStreaming);
if (response.choices && response.choices.length > 0 && response.choices[0]?.message) {
return {
message: {
role: "assistant",
content: response.choices[0].message.content || "",
},
completion_tokens: response.usage?.completion_tokens || 0,
};
}
throw new Error("No response from the model");
}
// HuggingFace non-streaming
const response = await metadata.client.chatCompletion(metadata.args);
if (response.choices && response.choices.length > 0) {
const { message } = response.choices[0]!;
const { completion_tokens } = response.usage;
return { message, completion_tokens };
}
throw new Error("No response from the model");
}