in microservices/classification_service/src/utils/classification/vertex_predicitons.py [0:0]
def get_classification_predications(
self,
endpoint_id: str,
filename: str,
api_endpoint: str = f"{REGOIN}-aiplatform.googleapis.com"):
""" Get prediction on images.
Returns:
_type_: _description_
Sample JSON request
{
"instances": [
{
"key": "test",
"image_bytes": {
"b64": "<YOUR_BASE64_IMG_DATA>"
}
}
],
"parameters": {
"confidenceThreshold": 0.5,
"maxPredictions": 5
}
}
"""
# The AI Platform services require regional API endpoints.
client_options = {"api_endpoint": api_endpoint}
# Initialize client that will be used to create and send requests.
# This client only needs to be created once, and
# can be reused for multiple requests.
client = aiplatform.gapic.PredictionServiceClient(
client_options=client_options)
with open(filename, "rb") as f:
file_content = f.read()
print(f"filename = {filename}")
# The format of each instance should conform to the deployed
# model's prediction input schema.
encoded_content = base64.b64encode(file_content).decode("utf-8")
print(f"encoded_content size: {len(encoded_content)}")
instances = [{"key": filename, "image_bytes": {"b64": encoded_content}}]
parameters = {"confidenceThreshold": 0.5, "maxPredictions": 5}
endpoint = client.endpoint_path(
project=self.project_id, location=self.loc, endpoint=endpoint_id)
print("endpoint")
print(json.dumps(endpoint))
print("parameters")
print(parameters)
response = client.predict(
endpoint=endpoint, instances=instances, parameters=parameters)
print("response")
print(" deployed_model_id:", response.deployed_model_id)
predictions = response.predictions
return dict(predictions[0])