def model_fn()

in src/inference_pytorch.py [0:0]


def model_fn(model_dir=None):
    '''
    Loads the model into memory from storage and return the model.
    '''
    model = torchvision.models.detection.fasterrcnn_resnet50_fpn(pretrained=True)
    # load the model onto the computation device
    model = model.eval().to(device)    
    return model