def predict()

in src/fmeval/model_runners/bedrock_model_runner.py [0:0]


    def predict(self, prompt: str) -> Union[Tuple[Optional[str], Optional[float]], List[float]]:
        """
        Invoke the Bedrock model and parse the model response.
        :param prompt: Input data for which you want the model to provide inference.
        """
        composed_data = self._composer.compose(prompt)
        body = json.dumps(composed_data)
        response = self._bedrock_runtime_client.invoke_model(
            body=body, modelId=self._model_id, accept=self._accept_type, contentType=self._content_type
        )
        model_output = json.loads(response.get("body").read())

        embedding = (
            self._extractor.extract_embedding(data=model_output, num_records=1)
            if self._extractor.embedding_jmespath_expression
            else None
        )
        if embedding:
            return embedding

        output = (
            self._extractor.extract_output(data=model_output, num_records=1)
            if self._extractor.output_jmespath_expression
            else None
        )
        log_probability = (
            self._extractor.extract_log_probability(data=model_output, num_records=1)
            if self._extractor.log_probability_jmespath_expression
            else None
        )
        return output, log_probability