def __init__()

in NMT/src/model/attention.py [0:0]


    def __init__(self, params):
        """
        Encoder initialization.
        """
        super(Encoder, self).__init__()

        # model parameters
        self.n_langs = params.n_langs
        self.n_words = params.n_words
        self.share_lang_emb = params.share_lang_emb
        self.emb_dim = params.emb_dim
        self.hidden_dim = params.hidden_dim
        self.dropout = params.dropout
        self.n_enc_layers = params.n_enc_layers
        self.share_enc = params.share_enc
        self.pad_index = params.pad_index
        self.freeze_enc_emb = params.freeze_enc_emb
        self.max_len = params.max_len
        self.dis_input_proj = params.dis_input_proj
        assert not self.share_lang_emb or len(set(params.n_words)) == 1
        assert 0 <= self.share_enc <= self.n_enc_layers + 1

        # embedding layers
        if self.share_lang_emb:
            logger.info("Sharing encoder input embeddings")
            layer_0 = nn.Embedding(self.n_words[0], self.emb_dim, padding_idx=self.pad_index)
            nn.init.normal_(layer_0.weight, 0, 0.1)
            nn.init.constant_(layer_0.weight[self.pad_index], 0)
            embeddings = [layer_0 for _ in range(self.n_langs)]
        else:
            embeddings = []
            for n_words in self.n_words:
                layer_i = nn.Embedding(n_words, self.emb_dim, padding_idx=self.pad_index)
                nn.init.normal_(layer_i.weight, 0, 0.1)
                nn.init.constant_(layer_i.weight[self.pad_index], 0)
                embeddings.append(layer_i)
        self.embeddings = nn.ModuleList(embeddings)

        # LSTM layers / shared layers
        lstm = [
            nn.LSTM(self.emb_dim, self.hidden_dim, num_layers=self.n_enc_layers, dropout=self.dropout, bidirectional=True)
            for _ in range(self.n_langs)
        ]
        for k in range(self.n_enc_layers):
            if self.n_enc_layers - k <= self.share_enc - 1:
                logger.info("Sharing encoder bi-LSTM parameters for layer %i" % k)
                for i in range(1, self.n_langs):
                    for name in BILSTM_PARAMS:
                        setattr(lstm[i], name % k, getattr(lstm[0], name % k))
        self.lstm = nn.ModuleList(lstm)

        # projection layers
        if self.share_enc >= 1:
            logger.info("Sharing encoder projection layers")
            proj_0 = nn.Linear(2 * self.hidden_dim, self.emb_dim, bias=False)
            proj = [proj_0 for _ in range(self.n_langs)]
        else:
            proj = [nn.Linear(2 * self.hidden_dim, self.emb_dim, bias=False)
                    for _ in range(self.n_langs)]
        self.proj = nn.ModuleList(proj)