in lib/util.py [0:0]
def forward(self, x):
"""Norm after the activation. Experimented with this for both IAM and BC and it was slightly better."""
if self.norm is not None:
x = self.norm(x)
x = self.layer(x)
if self.use_activation:
x = F.relu(x, inplace=True)
return x