in components/frontend_streamlit/src/pages/3_Chat.py [0:0]
def on_submit(user_input):
""" Run dispatch agent when adding an user input prompt """
st.session_state.error_msg = None
st.session_state.messages.append({"HumanInput": user_input})
message_index = len(st.session_state.messages)
with st.chat_message("user"):
st.write(user_input, is_user=True, key=f"human_{message_index}")
show_loading()
# Send API to llm-service
default_route = st.session_state.get("default_route", None)
routing_agents = get_all_routing_agents()
routing_agent_names = list(routing_agents.keys())
chat_llm_type = st.session_state.get("chat_llm_type")
logging.info("llm_type in session %s", chat_llm_type)
if default_route is None:
# pick the first routing agent as default
if routing_agent_names:
routing_agent = routing_agent_names[0]
else:
routing_agent = "default"
response = run_dispatch(user_input,
routing_agent,
chat_id=st.session_state.get("chat_id"),
llm_type=chat_llm_type,
run_as_batch_job=True)
# st.session_state.default_route = response.get("route", None)
elif default_route in routing_agent_names:
response = run_dispatch(user_input,
default_route,
chat_id=st.session_state.get("chat_id"),
llm_type=chat_llm_type,
run_as_batch_job=True)
# st.session_state.default_route = response.get("route", None)
elif default_route == "Chat":
response = run_chat(user_input,
chat_id=st.session_state.get("chat_id"),
llm_type=chat_llm_type)
elif default_route == "Plan":
response = run_agent_plan("Plan", user_input,
chat_id=st.session_state.get("chat_id"),
llm_type=chat_llm_type)
else:
st.error(f"Unsupported route {default_route}")
response = None
if response:
st.session_state.chat_id = response["chat"]["id"]
st.session_state.user_chats.insert(0, response["chat"])
# TODO: Currently the AIOutput vs content are inconsistent across
# API response and in a UserChat history.
if "content" in response:
response["AIOutput"] = response["content"]
del response["chat"]
# Append new message from the API response and display it.
append_and_display_message(response)
# If the response has a batch async job, keep pulling the job result.
if "batch_job" in response:
if st.session_state.get("debug"):
with st.expander("batch_job info:"):
st.write(response)
update_async_job(response["batch_job"]["id"])