def output_fn()

in source/inference.py [0:0]


def output_fn(prediction, response_content_type):
    
    logger.info("Processing output predictions...")
    logger.debug(f"Output object type is {type(prediction)}")
        
    try:
        logger.info(f"response_content_type: {response_content_type}")
        if "json" in response_content_type:
            logger.debug('JSON processing -- d2_deserializer')
            output = d2_deserializer.d2_to_json(prediction)

        elif "detectron2" in response_content_type:
            logger.debug("check prediction before pickling")
            logger.debug(type(prediction))
            
            instances = prediction['instances']
            rle_masks = d2_deserializer.convert_masks_to_rle(instances.get_fields()["pred_masks"])
            instances.set("pred_masks_rle", rle_masks)
            instances.remove('pred_masks')
            
            pickled_outputs = pickle.dumps(prediction)
            stream = io.BytesIO(pickled_outputs)
            output = stream.getvalue()
            
        else:
            raise Exception(f"Unsupported response content type {response_content_type}")
        
    except Exception as e:
        logger.error("Output processing failed...")
        logger.error(e)
        return None
    
    logger.info("Output processing completed")
    logger.debug(f"Predicted output type is {type(output)}")

    return output