code/inference.py [43:72]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    nlp_model.to(device)
    model = {'model':nlp_model, 'tokenizer':tokenizer}

    return model

# Deserialize the Invoke request body into an object we can perform prediction on
def input_fn(serialized_input_data, content_type='text/plain'):
    logger.info('Deserializing the input data.')
    try:
        data = [serialized_input_data.decode('utf-8')]
        return data
    except:
        raise Exception('Requested unsupported ContentType in content_type: {}'.format(content_type))

# Perform prediction on the deserialized object, with the loaded model
def predict_fn(input_object, model):
    logger.info("Calling model")
    start_time = time.time()
    sentence_embeddings = embed_tformer(model['model'], model['tokenizer'], input_object)
    print("--- Inference time: %s seconds ---" % (time.time() - start_time))
    response = sentence_embeddings[0].tolist()
    return response

# Serialize the prediction result into the desired response content type
def output_fn(prediction, accept):
    logger.info('Serializing the generated output.')
    if accept == 'application/json':
        output = json.dumps(prediction)
        return output
    raise Exception('Requested unsupported ContentType in Accept: {}'.format(content_type))
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



inference.py [41:72]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    nlp_model.to(device)
    model = {'model':nlp_model, 'tokenizer':tokenizer}

#     model = SentenceTransformer(model_dir + '/transformer/')
#     logger.info(model)
    return model

# Deserialize the Invoke request body into an object we can perform prediction on
def input_fn(serialized_input_data, content_type='text/plain'):
    logger.info('Deserializing the input data.')
    try:
        data = [serialized_input_data.decode('utf-8')]
        return data
    except:
        raise Exception('Requested unsupported ContentType in content_type: {}'.format(content_type))

# Perform prediction on the deserialized object, with the loaded model
def predict_fn(input_object, model):
    logger.info("Calling model")
    start_time = time.time()
    sentence_embeddings = embed_tformer(model['model'], model['tokenizer'], input_object)
    print("--- Inference time: %s seconds ---" % (time.time() - start_time))
    response = sentence_embeddings[0].tolist()
    return response

# Serialize the prediction result into the desired response content type
def output_fn(prediction, accept):
    logger.info('Serializing the generated output.')
    if accept == 'application/json':
        output = json.dumps(prediction)
        return output
    raise Exception('Requested unsupported ContentType in Accept: {}'.format(content_type))
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



