models/degrader_constant.py [172:189]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        treatments_transformed = torch.clamp(torch.exp(_treatments) - 1.0, 1e-12, 1e6)
        c6a, c12a, ara_a = torch.unbind(treatments_transformed, axis=1)
        c6 = torch.transpose(c6a.repeat([n_iwae, 1]), 0, 1)
        c12 = torch.transpose(c12a.repeat([n_iwae, 1]), 0, 1)

        x0 = torch.stack(
            [
                theta.init_x,
                theta.init_rfp,
                theta.init_yfp,
                theta.init_cfp,
                zero,
                zero,
                theta.init_luxR,
                theta.init_lasR,
                theta.init_aiiA,
                c6,
                c12,
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



models/degrader_constant.py [231:248]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        treatments_transformed = torch.clamp(torch.exp(_treatments) - 1.0, 1e-12, 1e6)
        c6a, c12a, ara_a = torch.unbind(treatments_transformed, axis=1)
        c6 = torch.transpose(c6a.repeat([n_iwae, 1]), 0, 1)
        c12 = torch.transpose(c12a.repeat([n_iwae, 1]), 0, 1)

        x0 = torch.stack(
            [
                theta.init_x,
                theta.init_rfp,
                theta.init_yfp,
                theta.init_cfp,
                zero,
                zero,
                theta.init_luxR,
                theta.init_lasR,
                theta.init_aiiA,
                c6,
                c12,
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



