BigGAN_PyTorch/BigGAN.py [571:615]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            self.lr, self.B1, self.B2, self.adam_eps = D_lr, D_B1, D_B2, adam_eps
            if D_mixed_precision:
                print("Using fp16 adam in D...")
                import utils

                self.optim = utils.Adam16(
                    params=self.parameters(),
                    lr=self.lr,
                    betas=(self.B1, self.B2),
                    weight_decay=0,
                    eps=self.adam_eps,
                )
            else:
                self.optim = optim.Adam(
                    params=self.parameters(),
                    lr=self.lr,
                    betas=(self.B1, self.B2),
                    weight_decay=0,
                    eps=self.adam_eps,
                )
        # LR scheduling, left here for forward compatibility
        # self.lr_sched = {'itr' : 0}# if self.progressive else {}
        # self.j = 0

    # Initialize
    def init_weights(self):
        self.param_count = 0
        for module in self.modules():
            if (
                isinstance(module, nn.Conv2d)
                or isinstance(module, nn.Linear)
                or isinstance(module, nn.Embedding)
            ):
                if self.init == "ortho":
                    init.orthogonal_(module.weight)
                elif self.init == "N02":
                    init.normal_(module.weight, 0, 0.02)
                elif self.init in ["glorot", "xavier"]:
                    init.xavier_uniform_(module.weight)
                else:
                    print("Init style not recognized...")
                self.param_count += sum(
                    [p.data.nelement() for p in module.parameters()]
                )
        print("Param count for D" "s initialized parameters: %d" % self.param_count)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



BigGAN_PyTorch/BigGANdeep.py [627:671]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        self.lr, self.B1, self.B2, self.adam_eps = D_lr, D_B1, D_B2, adam_eps
        if D_mixed_precision:
            print("Using fp16 adam in D...")
            import utils

            self.optim = utils.Adam16(
                params=self.parameters(),
                lr=self.lr,
                betas=(self.B1, self.B2),
                weight_decay=0,
                eps=self.adam_eps,
            )
        else:
            self.optim = optim.Adam(
                params=self.parameters(),
                lr=self.lr,
                betas=(self.B1, self.B2),
                weight_decay=0,
                eps=self.adam_eps,
            )
        # LR scheduling, left here for forward compatibility
        # self.lr_sched = {'itr' : 0}# if self.progressive else {}
        # self.j = 0

    # Initialize
    def init_weights(self):
        self.param_count = 0
        for module in self.modules():
            if (
                isinstance(module, nn.Conv2d)
                or isinstance(module, nn.Linear)
                or isinstance(module, nn.Embedding)
            ):
                if self.init == "ortho":
                    init.orthogonal_(module.weight)
                elif self.init == "N02":
                    init.normal_(module.weight, 0, 0.02)
                elif self.init in ["glorot", "xavier"]:
                    init.xavier_uniform_(module.weight)
                else:
                    print("Init style not recognized...")
                self.param_count += sum(
                    [p.data.nelement() for p in module.parameters()]
                )
        print("Param count for D" "s initialized parameters: %d" % self.param_count)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



