BigGAN_PyTorch/BigGANdeep.py [328:367]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            self.optim = utils.Adam16(
                params=self.parameters(),
                lr=self.lr,
                betas=(self.B1, self.B2),
                weight_decay=0,
                eps=self.adam_eps,
            )
        else:
            self.optim = optim.Adam(
                params=self.parameters(),
                lr=self.lr,
                betas=(self.B1, self.B2),
                weight_decay=0,
                eps=self.adam_eps,
            )

        # LR scheduling, left here for forward compatibility
        # self.lr_sched = {'itr' : 0}# if self.progressive else {}
        # self.j = 0

    # Initialize
    def init_weights(self):
        self.param_count = 0
        for module in self.modules():
            if (
                isinstance(module, nn.Conv2d)
                or isinstance(module, nn.Linear)
                or isinstance(module, nn.Embedding)
            ):
                if self.init == "ortho":
                    init.orthogonal_(module.weight)
                elif self.init == "N02":
                    init.normal_(module.weight, 0, 0.02)
                elif self.init in ["glorot", "xavier"]:
                    init.xavier_uniform_(module.weight)
                else:
                    print("Init style not recognized...")
                self.param_count += sum(
                    [p.data.nelement() for p in module.parameters()]
                )
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



BigGAN_PyTorch/BigGANdeep.py [632:670]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            self.optim = utils.Adam16(
                params=self.parameters(),
                lr=self.lr,
                betas=(self.B1, self.B2),
                weight_decay=0,
                eps=self.adam_eps,
            )
        else:
            self.optim = optim.Adam(
                params=self.parameters(),
                lr=self.lr,
                betas=(self.B1, self.B2),
                weight_decay=0,
                eps=self.adam_eps,
            )
        # LR scheduling, left here for forward compatibility
        # self.lr_sched = {'itr' : 0}# if self.progressive else {}
        # self.j = 0

    # Initialize
    def init_weights(self):
        self.param_count = 0
        for module in self.modules():
            if (
                isinstance(module, nn.Conv2d)
                or isinstance(module, nn.Linear)
                or isinstance(module, nn.Embedding)
            ):
                if self.init == "ortho":
                    init.orthogonal_(module.weight)
                elif self.init == "N02":
                    init.normal_(module.weight, 0, 0.02)
                elif self.init in ["glorot", "xavier"]:
                    init.xavier_uniform_(module.weight)
                else:
                    print("Init style not recognized...")
                self.param_count += sum(
                    [p.data.nelement() for p in module.parameters()]
                )
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



