in product_matching/hyperboloid.py [0:0]
def build(self):
"""
Build model structure.
Hyperboloid use Joint learning arthitecture.
"""
print("Query")
query_input = Input(
name='text_left',
shape=self._params['input_shapes'][0],
)
asin_input = Input(
name='text_right',
shape=self._params['input_shapes'][1]
)
#query_input = Input(shape=(self.hyper_params["MAX_LEN"],), dtype='float32')
print(query_input)
query = ManifoldEmbedding(self._params["vocab_size"], self._params["emb_dim"], trainable = True, mask_zero=True,manifold=self.params['manifold'])(query_input)
#query = Embedding(self._params["vocab_size"], self._params["emb_dim"], trainable = True, mask_zero=True)(query_input)
print(query)
box = TimeDistributed(Dense(self._params["box_dim"], activation='sigmoid'))(query)
print(box)
box_bn = BatchNormalization()(box)
box_dropout = Dropout(self._params["dropout"])(box_bn)
print(box_dropout)
intersection = Lambda(self.intersection_layer)([box_dropout,box_dropout])
print(intersection)
reshape_intersection = Reshape((intersection.shape[1]//self._params["box_dim"],self._params["box_dim"]))(intersection)
print(reshape_intersection)
query_attention = SelfAttention(return_sequences=True)(reshape_intersection)
print(query_attention)
query_attention_bn = BatchNormalization()(query_attention)
query_attention_dropout = Dropout(self._params["dropout"])(query_attention_bn)
print(query_attention_dropout)
query_boxes = Dense(self._params["box_dim"])(query_attention_dropout)
print(query_boxes)
print("\nASIN")
#asin_input = Input(shape=(self.hyper_params["MAX_LEN"],), dtype='float32')
print(asin_input)
asin = ManifoldEmbedding(self._params["vocab_size"], self._params["emb_dim"], trainable = True, mask_zero=True,manifold=self.params['manifold'])(asin_input)
#asin = Embedding(self._params["vocab_size"], self._params["emb_dim"], trainable = True, mask_zero=True)(asin_input)
print(asin)
lstm = Bidirectional(LSTM(self._params["emb_dim"]//2, return_sequences=True))(asin)
print(lstm)
attention = SelfAttention(return_sequences=True)(lstm)
print(attention)
attention_bn = BatchNormalization()(attention)
print(attention_bn)
attention_dropout = Dropout(self._params["dropout"])(attention_bn)
print(attention_dropout)
dense_attention = Dense(self._params["box_dim"]//2)(attention_dropout)
print(dense_attention)
asin_embedding = self.hyperbolic_add()([self.crop_box(i,None,None)(dense_attention) for i in range(dense_attention.shape[1])])
print(asin_embedding)
print("\nJoin")
merged_layer = Lambda(self.merge_layer)([query_boxes,asin_embedding])
print(merged_layer)
merged_layer_bn = BatchNormalization()(merged_layer)
print(merged_layer_bn)
merged_layer_dropout = Dropout(self._params["dropout"])(merged_layer_bn)
print(merged_layer_dropout)