banding_removal/fastmri/model/classifiers/resnet_r1.py [80:131]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        out = self.pool(out)
        out = out.view(batch_size, self.nf0)
        out = self.fc(actvn(out))
        
        return out


class ResnetBlock(nn.Module):
    def __init__(self, fin, fout, fhidden=None):
        super().__init__()
        # Attributes
        self.learned_shortcut = (fin != fout)
        self.fin = fin
        self.fout = fout
        if fhidden is None:
            self.fhidden = min(fin, fout)
        else:
            self.fhidden = fhidden

        # Submodules
        self.norm_0 = nn.GroupNorm(self.fin//32, self.fin)

        self.conv_0 = nn.Conv2d(self.fin, self.fhidden, 
            kernel_size, stride=1, padding=kernel_size//2, bias=False)

        self.norm_1 = nn.GroupNorm(self.fhidden//32, self.fhidden)

        self.conv_1 = nn.Conv2d(self.fhidden, self.fout, 
            kernel_size, stride=1, padding=kernel_size//2, bias=False)

        if self.learned_shortcut:
            self.conv_s = nn.Conv2d(self.fin, self.fout, 
                1, stride=1, padding=0, bias=False)

    def forward(self, x):
        x_s = self._shortcut(x)
        dx = self.conv_0(actvn(self.norm_0(x)))
        dx = self.conv_1(actvn(self.norm_1(dx)))
        out = x_s + dx

        return out

    def _shortcut(self, x):
        if self.learned_shortcut:
            x_s = self.conv_s(x)
        else:
            x_s = x
        return x_s


def actvn(x):
    return F.relu(x)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



banding_removal/fastmri/model/classifiers/resnet_r1_prev.py [77:128]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        out = self.pool(out)
        out = out.view(batch_size, self.nf0)
        out = self.fc(actvn(out))
        
        return out


class ResnetBlock(nn.Module):
    def __init__(self, fin, fout, fhidden=None):
        super().__init__()
        # Attributes
        self.learned_shortcut = (fin != fout)
        self.fin = fin
        self.fout = fout
        if fhidden is None:
            self.fhidden = min(fin, fout)
        else:
            self.fhidden = fhidden

        # Submodules
        self.norm_0 = nn.GroupNorm(self.fin//32, self.fin)

        self.conv_0 = nn.Conv2d(self.fin, self.fhidden, 
            kernel_size, stride=1, padding=kernel_size//2, bias=False)

        self.norm_1 = nn.GroupNorm(self.fhidden//32, self.fhidden)

        self.conv_1 = nn.Conv2d(self.fhidden, self.fout, 
            kernel_size, stride=1, padding=kernel_size//2, bias=False)

        if self.learned_shortcut:
            self.conv_s = nn.Conv2d(self.fin, self.fout, 
                1, stride=1, padding=0, bias=False)

    def forward(self, x):
        x_s = self._shortcut(x)
        dx = self.conv_0(actvn(self.norm_0(x)))
        dx = self.conv_1(actvn(self.norm_1(dx)))
        out = x_s + dx

        return out

    def _shortcut(self, x):
        if self.learned_shortcut:
            x_s = self.conv_s(x)
        else:
            x_s = x
        return x_s


def actvn(x):
    return F.relu(x)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



