components/frontend_streamlit/src/pages/3_Chat.py [246:412]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
      return

    time.sleep(loop_seconds)
    time_elapsed = time.time() - start_time

  # Timeout
  display_message({
    "AIOutput": f"Timed out after {timeout_seconds} seconds."
  }, len(st.session_state.messages))


def render_cloud_storage_url(url):
  """ Parse a cloud storage url. """
  if url[:3] == "/b/":
    url = url.replace("/b/", "https://storage.googleapis.com/")
    url = url.replace("/o/", "/")
  return url


def init_messages():
  """ Init all messages """
  if st.session_state.chat_id:
    chat_data = get_chat(st.session_state.chat_id)
    st.session_state.messages = chat_data["history"]
  elif not st.session_state.get("messages", None):
    display_message({
      "AIOutput": "You can ask me anything.",
    }, 0)
    st.session_state.messages = []

  index = 1
  last_batch_job = None
  for item in st.session_state.messages:
    display_message(item, index)
    if "batch_job" in item:
      last_batch_job = item["batch_job"]["job_id"]
    index += 1

  if last_batch_job:
    update_async_job(last_batch_job)


def append_new_messages():
  chat_data = get_chat(st.session_state.chat_id)
  new_messages = chat_data["history"][len(st.session_state.messages):]
  for item in new_messages:
    append_and_display_message(item)

  return new_messages


def append_and_display_message(item):
  st.session_state.messages.append(item)
  display_message(item, len(st.session_state.messages))


def display_message(item, item_index):
  if "HumanInput" in item:
    with st.chat_message("user"):
      st.write(item["HumanInput"], is_user=True, key=f"human_{item_index}")

  if "route_name" in item and "AIOutput" not in item:
    route_name = item["route_name"]
    with st.chat_message("ai"):
      st.write(f"Using route **`{route_name}`** to respond.")

  route_logs = item.get("route_logs", None)
  if route_logs and route_logs.strip() != "":
    with st.expander("Expand to see Agent's thought process"):
      utils.print_ai_output(route_logs)

  if "AIOutput" in item:
    with st.chat_message("ai"):
      utils.print_ai_output(item["AIOutput"])

  # Append all query references.
  if item.get("db_result", None):
    with st.chat_message("ai"):
      st.write("Query result:")
      result_index = 1

      # Clean up empty rows.
      db_result = []
      for result in item["db_result"]:
        if len(result.keys()) > 0:
          db_result.append(result)

      if len(db_result) > 0:
        for result in db_result:
          values = [str(x) for x in list(result.values())]
          if len(values) > 0:
            markdown_content = f"{result_index}. **{values[0]}**"
            markdown_content += " - " + ", ".join(values[1:])
            with stylable_container(
              key=f"ref_{result_index}",
              css_styles=REFERENCE_CSS_STYLE
            ):
              st.markdown(markdown_content)
          result_index = result_index + 1

      else:
        with stylable_container(
          key=f"ref_{result_index}",
          css_styles=REFERENCE_CSS_STYLE
        ):
          st.markdown("No result found.")

  # Append all resources.
  if item.get("resources", None):
    with st.chat_message("ai"):
      for name, link in item["resources"].items():
        st.markdown(f"Resource: [{name}]({link})")

  # Append all query references.
  if item.get("query_references", None):
    with st.chat_message("ai"):
      st.write("References:")
      reference_index = 1
      for reference in dedup_list(item["query_references"], "chunk_id"):
        document_url = render_cloud_storage_url(reference["document_url"])
        document_text = reference["document_text"]
        st.markdown(
            f"**{reference_index}.** [{document_url}]({document_url})")
        markdown_content = re.sub(
            r"<b>(.*?)</b>", r"**\1**", document_text, flags=re.IGNORECASE)

        with stylable_container(
          key=f"ref_{reference_index}",
          css_styles=REFERENCE_CSS_STYLE
        ):
          st.markdown(markdown_content)

        reference_index = reference_index + 1
      st.divider()

  if "plan" in item:
    with st.chat_message("ai"):
      plan_index = 1
      plan = get_plan(item["plan"]["id"])
      logging.info(plan)

      for step in plan["plan_steps"]:
        with stylable_container(
          key=f"ref_{plan_index}",
          css_styles=STEP_CSS_STYLE
        ):
          st.markdown(step["description"])
        plan_index += 1

      plan_id = plan["id"]
      if st.button("Execute this plan", key=f"plan-{plan_id}"):
        with st.spinner("Executing the plan..."):
          output = run_agent_execute_plan(
            plan_id=plan_id,
            chat_id=st.session_state.chat_id,
            auth_token=st.session_state.auth_token)
        st.session_state.messages.append({
          "AIOutput": f"Plan executed successfully. (plan_id={plan_id})",
        })

        agent_process_output = output.get("agent_process_output", "")
        agent_process_output = ansi_escape.sub("", agent_process_output)
        st.session_state.messages.append({
          "AIOutput": agent_process_output,
        })

  agent_logs = item.get("agent_logs", None)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



components/frontend_streamlit/src/pages/6_Custom_Chat.py [266:433]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
      return

    time.sleep(loop_seconds)
    time_elapsed = time.time() - start_time

  # Timeout
  display_message({
    "AIOutput": f"Timed out after {timeout_seconds} seconds."
  }, len(st.session_state.messages))


def render_cloud_storage_url(url):
  """ Parse a cloud storage url. """
  if url[:3] == "/b/":
    url = url.replace("/b/", "https://storage.googleapis.com/")
    url = url.replace("/o/", "/")
  return url


def init_messages():
  """ Init all messages """
  if st.session_state.chat_id:
    chat_data = get_chat(st.session_state.chat_id)
    st.session_state.messages = chat_data["history"]
  elif not st.session_state.get("messages", None):
    display_message({
      "AIOutput": "You can ask me anything.",
    }, 0)
    st.session_state.messages = []

  index = 1
  last_batch_job = None
  for item in st.session_state.messages:
    display_message(item, index)
    if "batch_job" in item:
      last_batch_job = item["batch_job"]["job_id"]
    index += 1

  if last_batch_job:
    update_async_job(last_batch_job)


def append_new_messages():
  chat_data = get_chat(st.session_state.chat_id)
  new_messages = chat_data["history"][len(st.session_state.messages):]
  for item in new_messages:
    append_and_display_message(item)

  return new_messages


def append_and_display_message(item):
  st.session_state.messages.append(item)
  display_message(item, len(st.session_state.messages))


def display_message(item, item_index):
  if "HumanInput" in item:
    with st.chat_message("user"):
      st.write(item["HumanInput"], is_user=True, key=f"human_{item_index}")

  if "route_name" in item and "AIOutput" not in item:
    route_name = item["route_name"]
    with st.chat_message("ai"):
      st.write(f"Using route **`{route_name}`** to respond.")

  route_logs = item.get("route_logs", None)
  if route_logs and route_logs.strip() != "":
    with st.expander("Expand to see Agent's thought process"):
      utils.print_ai_output(route_logs)

  if "AIOutput" in item:
    with st.chat_message("ai"):
      utils.print_ai_output(item["AIOutput"])

  # Append all query references.
  if item.get("db_result", None):
    with st.chat_message("ai"):
      st.write("Query result:")
      result_index = 1

      # Clean up empty rows.
      db_result = []
      for result in item["db_result"]:
        if len(result.keys()) > 0:
          db_result.append(result)

      if len(db_result) > 0:
        for result in db_result:
          values = [str(x) for x in list(result.values())]
          if len(values) > 0:
            markdown_content = f"{result_index}. **{values[0]}**"
            markdown_content += " - " + ", ".join(values[1:])
            with stylable_container(
              key=f"ref_{result_index}",
              css_styles=REFERENCE_CSS_STYLE
            ):
              st.markdown(markdown_content)
          result_index = result_index + 1

      else:
        with stylable_container(
          key=f"ref_{result_index}",
          css_styles=REFERENCE_CSS_STYLE
        ):
          st.markdown("No result found.")

  # Append all resources.
  if item.get("resources", None):
    with st.chat_message("ai"):
      for name, link in item["resources"].items():
        st.markdown(f"Resource: [{name}]({link})")

  # Append all query references.
  if item.get("query_references", None):
    with st.chat_message("ai"):
      st.write("References:")
      reference_index = 1
      for reference in dedup_list(item["query_references"], "chunk_id"):
        document_url = render_cloud_storage_url(reference["document_url"])
        document_text = reference["document_text"]
        st.markdown(
            f"**{reference_index}.** [{document_url}]({document_url})")
        markdown_content = re.sub(
            r"<b>(.*?)</b>", r"**\1**", document_text, flags=re.IGNORECASE)

        with stylable_container(
          key=f"ref_{reference_index}",
          css_styles=REFERENCE_CSS_STYLE
        ):
          st.markdown(markdown_content)

        reference_index = reference_index + 1
      st.divider()

  if "plan" in item:
    with st.chat_message("ai"):
      plan_index = 1

      plan = get_plan(item["plan"]["id"])
      logging.info(plan)

      for step in plan["plan_steps"]:
        with stylable_container(
          key=f"ref_{plan_index}",
          css_styles=STEP_CSS_STYLE
        ):
          st.markdown(step["description"])
        plan_index += 1

      plan_id = plan["id"]
      if st.button("Execute this plan", key=f"plan-{plan_id}"):
        with st.spinner("Executing the plan..."):
          output = run_agent_execute_plan(
            plan_id=plan_id,
            chat_id=st.session_state.chat_id,
            auth_token=st.session_state.auth_token)
        st.session_state.messages.append({
          "AIOutput": f"Plan executed successfully. (plan_id={plan_id})",
        })

        agent_process_output = output.get("agent_process_output", "")
        agent_process_output = ansi_escape.sub("", agent_process_output)
        st.session_state.messages.append({
          "AIOutput": agent_process_output,
        })

  agent_logs = item.get("agent_logs", None)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



