def __init__()

in models/decoders/mvp.py [0:0]


    def __init__(self, nprims, primsize, inch, outch, chstart=256, hstart=4,
            texwarp=False, elr=True, norm=None, mod=False, ub=True, upconv=None,
            penultch=None, use3dconv=False, reduced3dch=False):
        super(SlabContentDecoder, self).__init__()

        assert not texwarp
        assert upconv == None

        self.nprims = nprims
        self.primsize = primsize

        self.nprimy = int(math.sqrt(nprims))
        self.nprimx = nprims // self.nprimy
        assert nprims == self.nprimx * self.nprimy

        self.slabw = self.nprimx * primsize[0]
        self.slabh = self.nprimy * primsize[1]
        self.slabd =               primsize[2]

        nlayers = int(math.log2(min(self.slabw, self.slabh))) - int(math.log2(hstart))
        nlayers3d = int(math.log2(self.slabd))
        nlayers2d = nlayers - nlayers3d

        lastch = chstart
        dims = (1, hstart, hstart * self.nprimx // self.nprimy)

        layers = []
        layers.append(LinearELR(inch, chstart*dims[1]*dims[2], act=nn.LeakyReLU(0.2)))
        layers.append(Reshape(-1, chstart, dims[1], dims[2]))

        for i in range(nlayers):
            nextch = lastch if i % 2 == 0 else lastch // 2

            if use3dconv and reduced3dch and i >= nlayers2d:
                nextch //= 2

            if i == nlayers - 2 and penultch is not None:
                nextch = penultch

            if use3dconv and i >= nlayers2d:
                if i == nlayers2d:
                    layers.append(Reshape(-1, lastch, 1, dims[1], dims[2]))
                layers.append(ConvTranspose3dELR(
                    lastch,
                    (outch if i == nlayers - 1 else nextch),
                    4, 2, 1,
                    ub=(dims[0]*2, dims[1]*2, dims[2]*2) if ub else None,
                    norm=None if i == nlayers - 1 else norm,
                    act=None if i == nlayers - 1 else nn.LeakyReLU(0.2)
                    ))
            else:
                layers.append(ConvTranspose2dELR(
                    lastch,
                    (outch * primsize[2] if i == nlayers - 1 else nextch),
                    4, 2, 1,
                    ub=(dims[1]*2, dims[2]*2) if ub else None,
                    norm=None if i == nlayers - 1 else norm,
                    act=None if i == nlayers - 1 else nn.LeakyReLU(0.2)
                    ))

            lastch = nextch
            dims = (dims[0] * (2 if use3dconv and i >= nlayers2d else 1), dims[1] * 2, dims[2] * 2)

        self.mod = nn.Sequential(*layers)