in genai-for-marketing/backend_apis/app/main.py [0:0]
def post_text_bison_generate(data: TextGenerateRequest,
) -> TextGenerateResponse:
"""Text generation with PaLM API
Parameters:
model: str = "latest"
[Options] "latest" | "ga"
prompt: str
temperature: float = 0.2
top_k: int = 40
top_p: float = 0.8
max_output_tokens: int = 1024
Returns:
text (str): Response from the LLM
safety_attributes: Safety attributes from LLM
"""
if data.model == "latest":
llm = llm_latest
elif data.model == "ga":
llm = llm_ga
else:
raise HTTPException(
status_code=400,
detail="Invalid model name. Options: ga | latest."
)
try:
llm_response = llm.predict(
prompt=data.prompt,
max_output_tokens=data.max_output_tokens,
temperature=data.temperature,
top_k=data.top_k,
top_p=data.top_p)
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
else:
return TextGenerateResponse(
text=llm_response.text,
safety_attributes=llm_response.safety_attributes
)