in train/CustomModel.py [0:0]
def define_network(embedding_layer):
'''
Define LSTM network with an attention layer
'''
sequence_input = tf.keras.Input(shape=(MAX_SEQUENCE_LENGTH,), dtype='int32')
## If pretrained embedding layer is not given, train your own
if embedding_layer == "none":
embedded_sequences = tf.keras.layers.Embedding(MAX_NB_WORDS,EMBEDDING_DIM,input_length=MAX_SEQUENCE_LENGTH)(sequence_input)
else:
embedded_sequences = embedding_layer(sequence_input)
lstm = Bidirectional(LSTM(100,dropout = 0.2, recurrent_dropout = 0.2,return_sequences=True))(embedded_sequences)
lstm = LayerNormalization()(lstm)
attentionlstm = attention(return_sequences=False,activation='tanh')(lstm)
s = Dense(6,activation='sigmoid')(attentionlstm)
model_LSTM = tf.keras.Model(inputs=[sequence_input],outputs=[s])
print(model_LSTM.summary())
return model_LSTM