components/frontend_streamlit/src/pages/3_Chat.py [117:162]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  with st.chat_message("user"):
    st.write(user_input, is_user=True, key=f"human_{message_index}")

  show_loading()

  # Send API to llm-service
  default_route = st.session_state.get("default_route", None)
  routing_agents = get_all_routing_agents()
  routing_agent_names = list(routing_agents.keys())
  chat_llm_type = st.session_state.get("chat_llm_type")
  logging.info("llm_type in session %s", chat_llm_type)

  if default_route is None:
    # pick the first routing agent as default
    if routing_agent_names:
      routing_agent = routing_agent_names[0]
    else:
      routing_agent = "default"
    response = run_dispatch(user_input,
                            routing_agent,
                            chat_id=st.session_state.get("chat_id"),
                            llm_type=chat_llm_type,
                            run_as_batch_job=True)
    # st.session_state.default_route = response.get("route", None)

  elif default_route in routing_agent_names:
    response = run_dispatch(user_input,
                            default_route,
                            chat_id=st.session_state.get("chat_id"),
                            llm_type=chat_llm_type,
                            run_as_batch_job=True)
    # st.session_state.default_route = response.get("route", None)

  elif default_route == "Chat":
    response = run_chat(user_input,
                        chat_id=st.session_state.get("chat_id"),
                        llm_type=chat_llm_type)
  elif default_route == "Plan":
    response = run_agent_plan("Plan", user_input,
                              chat_id=st.session_state.get("chat_id"),
                              llm_type=chat_llm_type)
  else:
    st.error(f"Unsupported route {default_route}")
    response = None

  if response:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



components/frontend_streamlit/src/pages/6_Custom_Chat.py [152:195]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    with st.chat_message("user"):
      st.write(user_input, is_user=True, key=f"human_{message_index}")

    show_loading()

    # Send API to llm-service
    default_route = st.session_state.get("default_route", None)
    routing_agents = get_all_routing_agents()
    routing_agent_names = list(routing_agents.keys())
    chat_llm_type = st.session_state.get("chat_llm_type")
    logging.info("llm_type in session %s", chat_llm_type)

    if default_route is None:
      # pick the first routing agent as default
      if routing_agent_names:
        routing_agent = routing_agent_names[0]
      else:
        routing_agent = "default"
      response = run_dispatch(user_input,
                              routing_agent,
                              chat_id=st.session_state.get("chat_id"),
                              llm_type=chat_llm_type,
                              run_as_batch_job=True)

    elif default_route in routing_agent_names:
      response = run_dispatch(user_input,
                              default_route,
                              chat_id=st.session_state.get("chat_id"),
                              llm_type=chat_llm_type,
                              run_as_batch_job=True)

    elif default_route == "Chat":
      response = run_chat(user_input,
                          chat_id=st.session_state.get("chat_id"),
                          llm_type=chat_llm_type)
    elif default_route == "Plan":
      response = run_agent_plan("Plan", user_input,
                                chat_id=st.session_state.get("chat_id"),
                                llm_type=chat_llm_type)
    else:
      st.error(f"Unsupported route {default_route}")
      response = None

    if response:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



