backend-apis/app/routers/p5_contact_center_analyst.py [119:210]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@router.post(path="/generate-conversations-insights")
def generate_insights_conversations(
    data: GenerateConversationsInsightsRequest,
) -> GenerateConversationsInsightsResponse:
    """
    # Generate insights from conversations.
     - Summary
     - Insights (what went good/not good)
     - Pending tasks
     - Next best action

    ## Request Body [GenerateConversationsInsightsRequest]:
    **conversations**: *list*
    - Conversations to generate the insights from

    ## Response Body [GenerateConversationsInsightsResponse]:
    **summary**: *string*
    - Summary of the conversations

    **entities**: *list*
    - Entities extracted with Cloud NL API

    **insights**: *string*
    - Insights from the conversations

    **pending_tasks**: *string*
    - Pending tasks from the conversations

    **next_best_action**: *string*
    - Next best action extracted from the conversations
    """
    if len(data.conversations) > 1:
        prompt_summary = config["search-persona5"][
            "prompt_summary_multi_conversations"
        ]
        prompt_insights = config["search-persona5"][
            "prompt_insights_multi_conversations"
        ]
        prompt_tasks = config["search-persona5"][
            "prompt_pending_tasks_multi_conversations"
        ]
        prompt_nbs = config["search-persona5"][
            "prompt_nbs_multi_conversations"
        ]
    else:
        prompt_summary = config["search-persona5"][
            "prompt_summary_conversation"
        ]
        prompt_insights = config["search-persona5"][
            "prompt_insights_conversation"
        ]
        prompt_tasks = config["search-persona5"][
            "prompt_pending_tasks_conversation"
        ]
        prompt_nbs = config["search-persona5"]["prompt_nbs_conversation"]

    input_text = json.dumps({"conversations": data.conversations})

    try:
        summary = utils_gemini.generate_gemini_pro_text(
            prompt=prompt_summary.format(input_text)
        )
        insights = utils_gemini.generate_gemini_pro_text(
            prompt=prompt_insights.format(input_text)
        )
        pending_tasks = utils_gemini.generate_gemini_pro_text(
            prompt=prompt_tasks.format(input_text)
        )
        next_best_action = utils_gemini.generate_gemini_pro_text(
            prompt=prompt_nbs.format(input_text)
        )
    except GoogleAPICallError as e:
        raise HTTPException(
            status_code=400,
            detail=f"Error calling Vertex AI PaLM API. " f"{str(e)}",
        ) from e

    try:
        entities = utils_cloud_nlp.nlp_analyze_entities(input_text)
    except GoogleAPICallError as e:
        raise HTTPException(
            status_code=400,
            detail=f"Error calling Google Cloud NL API. " f"{str(e)}",
        ) from e

    return GenerateConversationsInsightsResponse(
        summary=summary,
        entities=entities,
        insights=insights,
        pending_tasks=pending_tasks,
        next_best_action=next_best_action,
    )
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



backend-apis/app/routers/p6_field_service_agent.py [313:404]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@router.post(path="/generate-conversations-insights")
def generate_insights_conversations(
    data: GenerateConversationsInsightsRequest,
) -> GenerateConversationsInsightsResponse:
    """
    # Generate insights from conversations.
     - Summary
     - Insights (what went good/not good)
     - Pending tasks
     - Next best action

    ## Request Body [GenerateConversationsInsightsRequest]:
    **conversations**: *list*
    - Conversations to generate the insights from

    ## Response Body [GenerateConversationsInsightsResponse]:
    **summary**: *string*
    - Summary of the conversations

    **entities**: *list*
    - Entities extracted with Cloud NL API

    **insights**: *string*
    - Insights from the conversations

    **pending_tasks**: *string*
    - Pending tasks from the conversations

    **next_best_action**: *string*
    - Next best action extracted from the conversations
    """
    if len(data.conversations) > 1:
        prompt_summary = config["search-persona5"][
            "prompt_summary_multi_conversations"
        ]
        prompt_insights = config["search-persona5"][
            "prompt_insights_multi_conversations"
        ]
        prompt_tasks = config["search-persona5"][
            "prompt_pending_tasks_multi_conversations"
        ]
        prompt_nbs = config["search-persona5"][
            "prompt_nbs_multi_conversations"
        ]
    else:
        prompt_summary = config["search-persona5"][
            "prompt_summary_conversation"
        ]
        prompt_insights = config["search-persona5"][
            "prompt_insights_conversation"
        ]
        prompt_tasks = config["search-persona5"][
            "prompt_pending_tasks_conversation"
        ]
        prompt_nbs = config["search-persona5"]["prompt_nbs_conversation"]

    input_text = json.dumps({"conversations": data.conversations})

    try:
        summary = utils_gemini.generate_gemini_pro_text(
            prompt=prompt_summary.format(input_text)
        )
        insights = utils_gemini.generate_gemini_pro_text(
            prompt=prompt_insights.format(input_text)
        )
        pending_tasks = utils_gemini.generate_gemini_pro_text(
            prompt=prompt_tasks.format(input_text)
        )
        next_best_action = utils_gemini.generate_gemini_pro_text(
            prompt=prompt_nbs.format(input_text)
        )
    except GoogleAPICallError as e:
        raise HTTPException(
            status_code=400,
            detail=f"Error calling Vertex AI PaLM API. " f"{str(e)}",
        ) from e

    try:
        entities = utils_cloud_nlp.nlp_analyze_entities(input_text)
    except GoogleAPICallError as e:
        raise HTTPException(
            status_code=400,
            detail=f"Error calling Google Cloud NL API. " f"{str(e)}",
        ) from e

    return GenerateConversationsInsightsResponse(
        summary=summary,
        entities=entities,
        insights=insights,
        pending_tasks=pending_tasks,
        next_best_action=next_best_action,
    )
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



