in appdev_genai_googlecloud/src/genai-app-firestore/main.py [0:0]
def search():
search_id = generate()
# Validate POST data
data = request.get_json()
if data.get("question", None) is None or len(data["question"]) == 0:
app.logger.info(f"{search_id}: failed to process search: invalid question")
return ("A question is invalid (None or length = 0)", 400)
if data.get("user_id", None) is None or len(data["user_id"]) == 0:
app.logger.info(f"{search_id}: failed to process search: invalid user_id")
return ("Please send a user_id", 400)
user_id, question = data.get("user_id"),data.get("question")
app.logger.info(f"{search_id}: start searching by {user_id}: {question}")
# Generate embedding from question
app.logger.info(f"{search_id}: start generating embedding from question")
query_embedding = embed_texts(texts=[question], task = "RETRIEVAL_QUERY")[0]
app.logger.info(f"{search_id}: finished generating embedding from question")
# Search relevant documents with Vector Search
app.logger.info(f"{search_id}: start vector search")
vector_query = db.collection("users").document(user_id).collection("embeddings").find_nearest(
vector_field="embedding",
query_vector=Vector(query_embedding),
distance_measure=DistanceMeasure.EUCLIDEAN,
limit=3,
)
docs = [doc for doc in vector_query.stream()]
if not docs:
app.logger.info(f"{search_id}: no relevant documents found")
return ("No relevant documents found", 400)
app.logger.info(f"{search_id}: finished vector search and found {len(docs)} relevant documents")
context, page, source = docs[0].get('text'), docs[0].get('page'), docs[0].get('source')
# Create prompt using the context fetched above to ask Gemini
template = """
Answer to the question using the following context.
If you couldn't find the answer, reply as "I couldn't find the answer."
Generate the answer in Japanese.
CONTEXT: {context}
QUESTION: {question}
"""
prompt = PromptTemplate(
input_variables=["context", "question"],
template=template,
)
final_prompt = prompt.format(context=context, question=question)
# Ask Gemini
app.logger.info(f"{search_id}: start generating answer by Gemini")
model = GenerativeModel(model_name=LLM_MODEL)
response = model.generate_content(final_prompt)
app.logger.info(f"{search_id}: finished generating answer by Gemini")
app.logger.info(f"{search_id}: finished searching by {user_id}: {question}")
# Return the answer from Gemini in JSON format
return jsonify({
"answer": response.text,
"metadata": {
"page": page,
"source": source
}
})