def forward()

in ml3/mbrl_utils.py [0:0]


    def forward(self, x):
        x = torch.Tensor(x)
        means = []
        variances = []
        for eid in range(self.ensemble_size):
            mean_and_var = self.models[eid](x)
            means.append(mean_and_var[0])
            variances.append(mean_and_var[1])

        mean = sum(means) / len(means)
        dum = torch.zeros_like(variances[0])
        for i in range(len(means)):
            dum_var2 = variances[i]
            dum_mean2 = means[i] * means[i]
            dum += dum_var2 + dum_mean2

        var = (dum / len(means)) - (mean * mean)
        # Clipping the variance to a minimum of 1e-3, we can interpret this as saying weexpect a minimum
        # level of noise
        # the clipping here is probably not necessary anymore because we're now clipping at the individual model level
        var = var.clamp_min(1e-3)
        return torch.stack((mean, var))