def model_fn()

in models/model-a/src/code/inference.py [0:0]


def model_fn(model_dir):
    print('[INFO] model_fn-thread id: {}'.format(threading.currentThread().getName()))
    print('[INFO] model_fn-process id: {}'.format(os.getpid()))
    logger.info('model_fn: Loading the model-{}'.format(model_dir))

    file_list = os.listdir(model_dir)
    logger.info("model_fn: model_dir list-{}".format(file_list))

    model = torch.load(os.path.join(model_dir, _model_file_name))
    dictionary = torch.load(os.path.join(model_dir, _vocab_file_name))

    return {'model': model, 'dictionary': dictionary}