code/src/model/attention.py [151:177]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        assert self.bos_attr in ['', 'avg', 'cross']
        assert self.bias_attr in ['', 'avg', 'cross']

        # indexes
        self.bos_index = params.bos_index
        self.eos_index = params.eos_index
        self.pad_index = params.pad_index

        # attribute embeddings / bias
        if self.bos_attr != '' or self.bias_attr != '':
            self.register_buffer('attr_offset', params.attr_offset.clone())
            self.register_buffer('attr_shifts', params.attr_shifts.clone())
        if self.bos_attr != '':
            n_bos_attr = sum(params.n_labels) if self.bos_attr == 'avg' else reduce(mul, params.n_labels, 1)
            self.bos_attr_embeddings = nn.Embedding(n_bos_attr, self.emb_dim)
        if self.bias_attr != '':
            n_bias_attr = sum(params.n_labels) if self.bias_attr == 'avg' else reduce(mul, params.n_labels, 1)
            self.bias_attr_embeddings = nn.Embedding(n_bias_attr, self.n_words)

        # embedding layers
        if self.share_encdec_emb:
            logger.info("Sharing encoder and decoder input embeddings")
            self.embeddings = encoder.embeddings
        else:
            self.embeddings = nn.Embedding(self.n_words, self.emb_dim, padding_idx=self.pad_index)
            nn.init.normal_(self.embeddings.weight, 0, 0.1)
            nn.init.constant_(self.embeddings.weight[self.pad_index], 0)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



code/src/model/seq2seq.py [144:170]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        assert self.bos_attr in ['', 'avg', 'cross']
        assert self.bias_attr in ['', 'avg', 'cross']

        # indexes
        self.bos_index = params.bos_index
        self.eos_index = params.eos_index
        self.pad_index = params.pad_index

        # attribute embeddings / bias
        if self.bos_attr != '' or self.bias_attr != '':
            self.register_buffer('attr_offset', params.attr_offset.clone())
            self.register_buffer('attr_shifts', params.attr_shifts.clone())
        if self.bos_attr != '':
            n_bos_attr = sum(params.n_labels) if self.bos_attr == 'avg' else reduce(mul, params.n_labels, 1)
            self.bos_attr_embeddings = nn.Embedding(n_bos_attr, self.emb_dim)
        if self.bias_attr != '':
            n_bias_attr = sum(params.n_labels) if self.bias_attr == 'avg' else reduce(mul, params.n_labels, 1)
            self.bias_attr_embeddings = nn.Embedding(n_bias_attr, self.n_words)

        # embedding layers
        if self.share_encdec_emb:
            logger.info("Sharing encoder and decoder input embeddings")
            self.embeddings = encoder.embeddings
        else:
            self.embeddings = nn.Embedding(self.n_words, self.emb_dim, padding_idx=self.pad_index)
            nn.init.normal_(self.embeddings.weight, 0, 0.1)
            nn.init.constant_(self.embeddings.weight[self.pad_index], 0)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



