in server-python/app.py [0:0]
def stream():
"""Streams AI responses for real-time chat interactions.
This function initiates a streaming session with the Gemini AI model,
continuously sending user inputs and streaming back the responses. It handles
POST requests to the '/stream' endpoint with a JSON payload similar to the
'/chat' endpoint.
Args:
None (uses Flask `request` object to access POST data)
Returns:
A Flask `Response` object that streams the AI-generated responses.
"""
def generate():
data = request.json
msg = data.get('chat', '')
chat_history = data.get('history', [])
chat_session = model.start_chat(history=chat_history)
response = chat_session.send_message(msg, stream=True)
for chunk in response:
yield f"{chunk.text}"
return Response(stream_with_context(generate()), mimetype="text/event-stream")