def inference()

in miscellaneous/distributed_tensorflow_mask_rcnn/container-serving/resources/predict.py [0:0]


def inference():
    if not request.is_json:
        result = {"error": "Content type is not application/json"}
        print(result)
        return Response(response=result, status=415, mimetype="application/json")

    try:
        content = request.get_json()
        img_id = content["img_id"]
        with tempfile.NamedTemporaryFile() as fh:
            ing_data_string = content["img_data"]
            img_data_bytes = bytearray(ing_data_string, encoding="utf-8")
            fh.write(base64.decodebytes(img_data_bytes))
            fh.seek(0)
            img = cv2.imread(fh.name, cv2.IMREAD_COLOR)
            fh.close()
            pred = MaskRCNNService.predict(img=img, img_id=img_id)

            return Response(response=json.dumps(pred), status=200, mimetype="application/json")
    except Exception as e:
        print(str(e))
        result = {"error": "Internal server error"}
        return Response(response=result, status=500, mimetype="application/json")