def main()

in tensorflow_script_mode_local_model_inference/tensorflow_script_mode_local_model_inference.py [0:0]


def main():
    session = LocalSession()
    session.config = {'local': {'local_code': True}}

    role = DUMMY_IAM_ROLE
    model_dir = 's3://aws-ml-blog/artifacts/tensorflow-script-mode-local-model-inference/model.tar.gz'

    model = TensorFlowModel(
        entry_point='inference.py',
        source_dir = './code',
        role=role,
        model_data=model_dir,
        framework_version='2.3.0',
    )

    print('Deploying endpoint in local mode')
    print(
        'Note: if launching for the first time in local mode, container image download might take a few minutes to complete.')
    predictor = model.deploy(
        initial_instance_count=1,
        instance_type='local',
    )

    print('Endpoint deployed in local mode')

    dummy_inputs = {
        'bucket_name': 'aws-ml-blog',
        'object_name': 'artifacts/tensorflow-script-mode-local-model-inference/instances.json'
    }

    predictions = predictor.predict(dummy_inputs)
    print("predictions: {}".format(predictions))

    print('About to delete the endpoint')
    predictor.delete_endpoint(predictor.endpoint_name)