product_matching/euclidean_intersection.py [18:41]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class SelfAttention(Layer):
    def __init__(self, return_sequences=True):
        self.return_sequences = return_sequences
        super(SelfAttention,self).__init__()   
    def build(self, input_shape):       
        self.W=self.add_weight(name="att_weight", shape=(input_shape[-1],1),
                               initializer="normal")
        self.b=self.add_weight(name="att_bias", shape=(input_shape[1],1),
                               initializer="zeros")     
        super(SelfAttention,self).build(input_shape) 
    def compute_mask(self, inputs, mask=None):
        # Just pass the received mask from previous layer, to the next layer or 
        # manipulate it if this layer changes the shape of the input
        return mask
    def call(self, x, mask=None):      
        e = K.tanh(K.dot(x,self.W)+self.b)
        a = K.softmax(e, axis=1)
        output = x*a        
        if self.return_sequences:
            return output   
        return K.sum(output, axis=1)
    def get_config(self):
        config = super().get_config().copy()
        return config
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



product_matching/hyperboloid.py [38:64]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class SelfAttention(Layer):
    """
    Self Attention Layer
    """
    def __init__(self, return_sequences=True):
        self.return_sequences = return_sequences
        super(SelfAttention,self).__init__()   
    def build(self, input_shape):       
        self.W=self.add_weight(name="att_weight", shape=(input_shape[-1],1),
                               initializer="normal")
        self.b=self.add_weight(name="att_bias", shape=(input_shape[1],1),
                               initializer="zeros")     
        super(SelfAttention,self).build(input_shape) 
    def compute_mask(self, inputs, mask=None):
        # Just pass the received mask from previous layer, to the next layer or 
        # manipulate it if this layer changes the shape of the input
        return mask
    def call(self, x, mask=None):      
        e = K.tanh(K.dot(x,self.W)+self.b)
        a = K.softmax(e, axis=1)
        output = x*a        
        if self.return_sequences:
            return output   
        return K.sum(output, axis=1)
    def get_config(self):
        config = super().get_config().copy()
        return config
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



