experiments/codes/model/gat/edge_gat.py [115:152]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        ]
        param_names = [self.weight_names[gi] for gi in gat_layer_param_indx]
        param_name_to_idx = {k: full_name_idx[k] for v, k in enumerate(param_names)}
        return param_name_to_idx

    def forward(self, batch, rel_emb=None):
        # import ipdb; ipdb.set_trace()
        data = batch.graphs
        param_name_to_idx = {k: v for v, k in enumerate(self.weight_names)}
        # initialize random node embeddings

        node_emb = torch.Tensor(
            size=(self.config.model.num_nodes, self.config.model.relation_embedding_dim)
        ).to(self.config.general.device)
        torch.nn.init.xavier_uniform_(node_emb, gain=1.414)
        x = F.embedding(data.x, node_emb)
        # x = F.embedding(data.x, self.weights[self.get_param_id(param_name_to_idx,
        #                                                        'node_embedding')])
        x = x.squeeze(1)
        # x = self.embedding(data.x).squeeze(1) # N x node_dim
        if rel_emb is not None:
            edge_attr = F.embedding(data.edge_attr, rel_emb)
        else:
            edge_attr = F.embedding(
                data.edge_attr,
                get_param(self.weights, param_name_to_idx, "relation_embedding"),
            )
        edge_attr = edge_attr.squeeze(1)
        # edge_attr = self.edge_embedding(data.edge_attr).squeeze(1) # E x edge_dim
        for nr in range(self.config.model.gat.num_layers - 1):
            param_name_dict = self.prepare_param_idx(nr)
            x = F.dropout(x, p=self.config.model.gat.dropout, training=self.training)
            x = self.edgeConvs[nr](
                x, data.edge_index, edge_attr, self.weights, param_name_dict
            )
            x = F.elu(x)
        x = F.dropout(x, p=self.config.model.gat.dropout, training=self.training)
        param_name_dict = self.prepare_param_idx(self.config.model.gat.num_layers - 1)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



experiments/codes/model/gat/edge_gat.py [296:332]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        ]
        param_names = [self.weight_names[gi] for gi in gat_layer_param_indx]
        param_name_to_idx = {k: full_name_idx[k] for v, k in enumerate(param_names)}
        return param_name_to_idx

    def forward(self, batch, rel_emb=None):
        data = batch.graphs
        param_name_to_idx = {k: v for v, k in enumerate(self.weight_names)}
        # initialize random node embeddings

        node_emb = torch.Tensor(
            size=(self.config.model.num_nodes, self.config.model.relation_embedding_dim)
        ).to(self.config.general.device)
        torch.nn.init.xavier_uniform_(node_emb, gain=1.414)
        x = F.embedding(data.x, node_emb)
        # x = F.embedding(data.x, self.weights[self.get_param_id(param_name_to_idx,
        #                                                        'node_embedding')])
        x = x.squeeze(1)
        # x = self.embedding(data.x).squeeze(1) # N x node_dim
        if rel_emb is not None:
            edge_attr = F.embedding(data.edge_attr, rel_emb)
        else:
            edge_attr = F.embedding(
                data.edge_attr,
                get_param(self.weights, param_name_to_idx, "relation_embedding"),
            )
        edge_attr = edge_attr.squeeze(1)
        # edge_attr = self.edge_embedding(data.edge_attr).squeeze(1) # E x edge_dim
        for nr in range(self.config.model.gat.num_layers - 1):
            param_name_dict = self.prepare_param_idx(nr)
            x = F.dropout(x, p=self.config.model.gat.dropout, training=self.training)
            x = self.edgeConvs[nr](
                x, data.edge_index, edge_attr, self.weights, param_name_dict
            )
            x = F.elu(x)
        x = F.dropout(x, p=self.config.model.gat.dropout, training=self.training)
        param_name_dict = self.prepare_param_idx(self.config.model.gat.num_layers - 1)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



