lmgvp/modules.py [392:414]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        h_V = self.W_v(h_V)
        h_E = self.W_e(h_E)
        # GVP Conv layers
        if not self.residual:
            for layer in self.layers:
                h_V = layer(h_V, edge_index, h_E)
            out = self.W_out(h_V)
        else:
            h_V_out = []  # collect outputs from all GVP Conv layers
            h_V_in = h_V
            for layer in self.layers:
                h_V_out.append(layer(h_V_in, edge_index, h_E))
                h_V_in = h_V_out[-1]
            # concat outputs from GVPConvLayers (separatedly for s and V)
            h_V_out = (
                torch.cat([h_V[0] for h_V in h_V_out], dim=-1),
                torch.cat([h_V[1] for h_V in h_V_out], dim=-2),
            )
            out = self.W_out(h_V_out)

        # aggregate node vectors to graph
        out = scatter_mean(out, batch.batch, dim=0)
        return self.dense(out).squeeze(-1) + 0.5
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



lmgvp/modules.py [689:709]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        h_V = self.W_v(h_V)
        h_E = self.W_e(h_E)
        if not self.residual:
            for layer in self.layers:
                h_V = layer(h_V, edge_index, h_E)
            out = self.W_out(h_V)
        else:
            h_V_out = []  # collect outputs from GVPConvLayers
            h_V_in = h_V
            for layer in self.layers:
                h_V_out.append(layer(h_V_in, edge_index, h_E))
                h_V_in = h_V_out[-1]
            # concat outputs from GVPConvLayers (separatedly for s and V)
            h_V_out = (
                torch.cat([h_V[0] for h_V in h_V_out], dim=-1),
                torch.cat([h_V[1] for h_V in h_V_out], dim=-2),
            )
            out = self.W_out(h_V_out)

        out = scatter_mean(out, batch.batch, dim=0)
        return self.dense(out).squeeze(-1) + 0.5
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



