def _call()

in courses/writing_prompts/streamlit_gemini_text/app.py [0:0]


    def _call(self,
              prompt: str,
              stop: Optional[List[str]] = None,
              run_manager: Optional[CallbackManagerForLLMRun] = None,
              **kwargs: Any,) -> str:

        if stop is not None:
            raise ValueError("stop kwargs are not permitted.")
        
        gemini_pro_model = GenerativeModel("gemini-1.5-pro")
        
        model_response = gemini_pro_model.generate_content(
            prompt, 
            generation_config={"temperature": temperature,
                               "top_p": top_p,
                               "top_k": top_k,
                               "max_output_tokens": max_output_tokens}
        )
        print(model_response)

        if len(model_response.candidates[0].content.parts) > 0:
            return model_response.candidates[0].content.parts[0].text
        else:
            return "There was an issue with returning a response. Please try again."