in botorch/models/higher_order_gp.py [0:0]
def make_posterior_variances(self, joint_covariance_matrix: LazyTensor) -> Tensor:
r"""
Computes the posterior variances given the data points X. As currently
implemented, it computes another forwards call with the stacked data to get out
the joint covariance across all data points.
"""
# TODO: use the exposed joint covariances from the prediction strategy
data_joint_covariance = joint_covariance_matrix.lazy_tensors[
0
].evaluate_kernel()
num_train = self.train_inputs[0].shape[-2]
test_train_covar = data_joint_covariance[..., num_train:, :num_train]
train_train_covar = data_joint_covariance[..., :num_train, :num_train]
test_test_covar = data_joint_covariance[..., num_train:, num_train:]
full_train_train_covar = KroneckerProductLazyTensor(
train_train_covar, *joint_covariance_matrix.lazy_tensors[1:]
)
full_test_test_covar = KroneckerProductLazyTensor(
test_test_covar, *joint_covariance_matrix.lazy_tensors[1:]
)
full_test_train_covar_list = [test_train_covar] + [
*joint_covariance_matrix.lazy_tensors[1:]
]
train_evals, train_evecs = full_train_train_covar.symeig(eigenvectors=True)
# (\kron \Lambda_i + \sigma^2 I)^{-1}
train_inv_evals = DiagLazyTensor(1.0 / (train_evals + self.likelihood.noise))
# compute K_i S_i \hadamard K_i S_i
test_train_hadamard = KroneckerProductLazyTensor(
*[
lt1.matmul(lt2).evaluate() ** 2
for lt1, lt2 in zip(
full_test_train_covar_list, train_evecs.lazy_tensors
)
]
)
# and compute the column sums of
# (\kron K_i S_i * K_i S_i) \tilde{\Lambda}^{-1}
test_train_pred_covar = test_train_hadamard.matmul(train_inv_evals).sum(dim=-1)
pred_variances = full_test_test_covar.diag() - test_train_pred_covar
return pred_variances