public static StreamingChatCompletionUpdate StreamingChatCompletionUpdate()

in src/Custom/Chat/OpenAIChatModelFactory.cs [230:316]


    public static StreamingChatCompletionUpdate StreamingChatCompletionUpdate(
        string completionId,
        ChatMessageContent contentUpdate,
        StreamingChatFunctionCallUpdate functionCallUpdate,
        IEnumerable<StreamingChatToolCallUpdate> toolCallUpdates,
        ChatMessageRole? role,
        string refusalUpdate,
        IEnumerable<ChatTokenLogProbabilityDetails> contentTokenLogProbabilities,
        IEnumerable<ChatTokenLogProbabilityDetails> refusalTokenLogProbabilities,
        ChatFinishReason? finishReason,
        DateTimeOffset createdAt,
        string model,
        string systemFingerprint,
        ChatTokenUsage usage) =>
        StreamingChatCompletionUpdate(
            completionId: completionId,
            contentUpdate: contentUpdate,
            functionCallUpdate: functionCallUpdate,
            toolCallUpdates: toolCallUpdates,
            role: role,
            refusalUpdate: refusalUpdate,
            contentTokenLogProbabilities: contentTokenLogProbabilities,
            refusalTokenLogProbabilities: refusalTokenLogProbabilities,
            finishReason: finishReason,
            createdAt: createdAt,
            model: model,
            systemFingerprint: systemFingerprint,
            usage: usage,
            outputAudioUpdate: default);

    /// <summary> Initializes a new instance of <see cref="OpenAI.Chat.StreamingChatCompletionUpdate"/>. </summary>
    /// <returns> A new <see cref="OpenAI.Chat.StreamingChatCompletionUpdate"/> instance for mocking. </returns>
    public static StreamingChatCompletionUpdate StreamingChatCompletionUpdate(
        string completionId = null,
        ChatMessageContent contentUpdate = null,
        StreamingChatFunctionCallUpdate functionCallUpdate = null,
        IEnumerable<StreamingChatToolCallUpdate> toolCallUpdates = null,
        ChatMessageRole? role = default,
        string refusalUpdate = null,
        IEnumerable<ChatTokenLogProbabilityDetails> contentTokenLogProbabilities = null,
        IEnumerable<ChatTokenLogProbabilityDetails> refusalTokenLogProbabilities = null,
        ChatFinishReason? finishReason = default,
        DateTimeOffset createdAt = default,
        string model = null,
        string systemFingerprint = null,
        ChatTokenUsage usage = default,
        StreamingChatOutputAudioUpdate outputAudioUpdate = default)
    {
        contentUpdate ??= new ChatMessageContent();
        toolCallUpdates ??= new List<StreamingChatToolCallUpdate>();
        contentTokenLogProbabilities ??= new List<ChatTokenLogProbabilityDetails>();
        refusalTokenLogProbabilities ??= new List<ChatTokenLogProbabilityDetails>();

        InternalChatCompletionStreamResponseDelta delta = new InternalChatCompletionStreamResponseDelta(
            outputAudioUpdate,
            functionCallUpdate,
            toolCallUpdates.ToList(),
            refusalUpdate,
            role,
            contentUpdate,
            additionalBinaryDataProperties: null);

        InternalCreateChatCompletionStreamResponseChoiceLogprobs logprobs = new InternalCreateChatCompletionStreamResponseChoiceLogprobs(
            contentTokenLogProbabilities.ToList(),
            refusalTokenLogProbabilities.ToList(),
            additionalBinaryDataProperties: null);

        IReadOnlyList<InternalCreateChatCompletionStreamResponseChoice> choices = [
            new InternalCreateChatCompletionStreamResponseChoice(
                delta,
                logprobs,
                index: 0,
                finishReason,
                additionalBinaryDataProperties: null)
        ];

        return new StreamingChatCompletionUpdate(
            model,
            systemFingerprint,
            InternalCreateChatCompletionStreamResponseObject.ChatCompletionChunk,
            completionId,
            serviceTier: null,
            choices,
            createdAt,
            usage,
            additionalBinaryDataProperties: null);
    }