in openai/streaming.go [27:47]
func streamingChatCompletionsHandler(w http.ResponseWriter, r *http.Request, model string, chat *genai.ChatSession, lastPart genai.Part) {
iter := chat.SendMessageStream(r.Context(), lastPart)
for {
gresp, err := iter.Next()
if err == iterator.Done {
break
}
if err != nil {
internal.ErrorHandler(w, r, http.StatusInternalServerError, "failed to stream response: %v", err)
return
}
chunk, err := json.Marshal(toOpenAIResponse(gresp, "chat.completion.chunk", model))
if err != nil {
internal.ErrorHandler(w, r, http.StatusInternalServerError, "failed to marshal chunk: %v", err)
return
}
fmt.Fprintf(w, "data: %s\n", chunk)
}
fmt.Fprint(w, "data: [DONE]\n")
}