def load_model()

in run.py [0:0]


def load_model(question_generator):
    """
    Load model
    :param question_generator: Class containing all question generator modules. Defined in question_generator_model.py
    :return: model defition file
    """
    # Build the model
    if question_generator.datasets.use_keyword:
        model = question_generator.build_keyword_model()
    elif 'glove' in question_generator.datasets.embedding_file:
        model = question_generator.build_glove_model()
    elif 'elmo' in question_generator.datasets.embedding_file:
        model = question_generator.build_elmo_model()
    elif 'bert' in question_generator.datasets.embedding_file:
        bert_path = "https://tfhub.dev/google/bert_uncased_L-12_H-768_A-12/1"
        # Instantiate tokenizer
        question_generator.tokenizer = create_tokenizer_from_hub_module(bert_path)
        model = question_generator.build_bert_model()
    else:
        logging.error('Embedding model not found')
        exit(-1)

    return model