in tensorflow_addons/seq2seq/attention_wrapper.py [0:0]
def build(self, input_shape):
if not self._memory_initialized:
# This is for setting up the memory, which contains memory and
# optional memory_sequence_length. Build the memory_layer with
# memory shape.
if self.memory_layer is not None and not self.memory_layer.built:
if isinstance(input_shape, list):
self.memory_layer.build(input_shape[0])
else:
self.memory_layer.build(input_shape)
else:
# The input_shape should be query.shape and state.shape. Use the
# query to init the query layer.
if self.query_layer is not None and not self.query_layer.built:
self.query_layer.build(input_shape[0])