lib/utils.py [29:38]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        return dcgan_loss_dis, dcgan_loss_gen
    elif config.model in ['wgan', 'wgan_gp']:
        return wgan_loss_dis, wgan_loss_gen
    else:
        raise NotImplementedError('%s model is not implemented!' % config.model)

def dcgan_loss_dis(x_real, x_fake, netD, device):
    p_real, p_gen = netD(x_real), netD(x_fake)
    dis_loss = F.softplus(-p_real).mean() + F.softplus(p_gen).mean()
    return dis_loss, p_real, p_gen
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



train_mnist.py [164:197]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        return dcgan_loss_dis, dcgan_loss_gen
    elif config.model in ['wgan', 'wgan_gp']:
        return wgan_loss_dis, wgan_loss_gen
    # elif config.model == 'wgan_gp':
    #     return functools.partial(wgan_loss_dis, grad_penalty=True, gp_lambda=config.gp_lambda), wgan_loss_gen
    else:
        raise NotImplementedError('%s model is not implemented!' % config.model)


# def dcgan_loss_dis(x_real, x_fake, netD, device):
#     p_real, p_gen = netD(x_real), netD(x_fake)

#     criterion = nn.BCELoss()
#     real_label = torch.full((p_real.size(0),), 1, device=device)
#     fake_label = torch.full((p_real.size(0),), 0, device=device)
#     errD_real = criterion(p_real, real_label)
#     errD_gen = criterion(p_gen, fake_label)
#     dis_loss = errD_real + errD_gen
#     return dis_loss, p_real, p_gen


# def dcgan_loss_gen(x_fake, netD, device):
#     p_gen = netD(x_fake)

#     criterion = nn.BCELoss()
#     real_label = torch.full((p_gen.size(0),), 1, device=device)
#     gen_loss = criterion(p_gen, real_label)
#     return gen_loss, p_gen


def dcgan_loss_dis(x_real, x_fake, netD, device):
    p_real, p_gen = netD(x_real), netD(x_fake)
    dis_loss = F.softplus(-p_real).mean() + F.softplus(p_gen).mean()
    return dis_loss, p_real, p_gen
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



