models/compressive.py [177:191]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class TransformerSeqLayer(nn.Module):
    def __init__(self, args, layer_ind):
        super(TransformerSeqLayer, self).__init__()
        self.args = args
        self.attn = MultiHeadSeqAttention(args)
        self.ff = FeedForwardLayer(args)
        self.norm1 = nn.LayerNorm(args.hid_sz)
        self.norm2 = nn.LayerNorm(args.hid_sz)

    def forward(self, h, h_memory, c_memory):
        # h = B x M x H
        # h_memory = B x L+M x H

        if self.args.pre_norm:
            h_memory = self.norm1(h_memory)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



models/expire_span.py [287:302]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class TransformerSeqLayer(nn.Module):
    def __init__(self, args, layer_ind):
        super(TransformerSeqLayer, self).__init__()
        self.args = args
        self.attn = MultiHeadSeqAttention(args)
        self.ff = FeedForwardLayer(args)
        self.norm1 = nn.LayerNorm(args.hid_sz)
        self.norm2 = nn.LayerNorm(args.hid_sz)

    def forward(self, h, h_memory, c_memory):
        # h = B x M x H
        # h_memory = B x L x H
        # c_memory = B x L

        if self.args.pre_norm:
            h_memory = self.norm1(h_memory)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



