in packages/blueprints/gen-ai-chatbot/static-assets/chatbot-genai-components/backend/python/app/agents/handlers/final_std.py [0:0]
def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
print(f"on_llm_new_token")
"""Run on new LLM token. Only available when streaming is enabled."""
# Remember the last n tokens, where n = len(answer_prefix_tokens)
self.append_to_last_tokens(token)
# Check if the last n tokens match the answer_prefix_tokens list ...
if self.check_if_answer_reached():
self.answer_reached = True
if self.stream_prefix:
for t in self.last_tokens:
sys.stdout.write(t)
sys.stdout.flush()
return
# ... if yes, then print tokens from now on
if self.answer_reached:
sys.stdout.write(token)
sys.stdout.flush()