in theseus/optimizer/dense_linearization.py [0:0]
def _linearize_jacobian_impl(self):
err_row_idx = 0
self.A = torch.zeros(
(self.objective.batch_size, self.num_rows, self.num_cols),
device=self.objective.device,
dtype=self.objective.dtype,
)
self.b = torch.zeros(
(self.objective.batch_size, self.num_rows),
device=self.objective.device,
dtype=self.objective.dtype,
)
for cost_function in self.objective:
jacobians, error = cost_function.weighted_jacobians_error()
num_rows = cost_function.dim()
for var_idx_in_cost_function, var_jacobian in enumerate(jacobians):
var_idx_in_order = self.ordering.index_of(
cost_function.optim_var_at(var_idx_in_cost_function).name
)
var_start_col = self.var_start_cols[var_idx_in_order]
num_cols = var_jacobian.shape[2]
row_slice = slice(err_row_idx, err_row_idx + num_rows)
col_slice = slice(var_start_col, var_start_col + num_cols)
self.A[:, row_slice, col_slice] = var_jacobian
self.b[:, row_slice] = -error
err_row_idx += cost_function.dim()