in packages/inference/src/providers/providerHelper.ts [307:324]
async getResponse(response: ChatCompletionOutput): Promise<ChatCompletionOutput> {
if (
typeof response === "object" &&
Array.isArray(response?.choices) &&
typeof response?.created === "number" &&
typeof response?.id === "string" &&
typeof response?.model === "string" &&
/// Together.ai and Nebius do not output a system_fingerprint
(response.system_fingerprint === undefined ||
response.system_fingerprint === null ||
typeof response.system_fingerprint === "string") &&
typeof response?.usage === "object"
) {
return response;
}
throw new InferenceClientProviderOutputError("Expected ChatCompletionOutput");
}