def inference()

in miscellaneous/distributed_tensorflow_mask_rcnn/container-serving-optimized/resources/predict.py [0:0]


def inference():

    if not request.is_json:
        result = {"error": "Content type is not application/json"}
        print(result)
        return Response(response=result, status=415, mimetype="application/json")


    try:
        content = request.get_json()
        img_id = content["img_id"]
        with tempfile.NamedTemporaryFile() as fh:
            img_data_string = content["img_data"]
            img_data_bytes = bytearray(img_data_string, encoding="utf-8")
            fh.write(base64.decodebytes(img_data_bytes))
            fh.seek(0)
            img = cv2.imread(fh.name, cv2.IMREAD_COLOR)
            fh.close()
            
            rpn = False
            try:
                rpn = content["rpn"]
            except KeyError:
                pass

            score_threshold = 0.8
            try:
                score_threshold = content["score_threshold"]
            except KeyError:
                pass

            mask_threshold = 0.5
            try:
                mask_threshold = content["mask_threshold"]
            except KeyError:
                pass

            pred = MaskRCNNService.predict(
                img=img,
                img_id=img_id,
                rpn=rpn,
                score_threshold=score_threshold,
                mask_threshold=mask_threshold,
            )

            return Response(response=json.dumps(pred), status=200, mimetype="application/json")
    except Exception as e:
        print(str(e))
        result = {"error": "Internal server error"}
        return Response(response=result, status=500, mimetype="application/json")