in benchmarks/run_benchmarks.py [0:0]
def get_config_combinations(self):
non_accel_algo_modes = {"auto", "mom"}
algo_modes_set = set(self.algo_modes)
accel_algo_mode = algo_modes_set - non_accel_algo_modes
non_sketch_solvers = {"cg", "direct"}
solvers_set = set(self.solvers)
sketch_solvers = solvers_set - non_sketch_solvers
combinations = list(
itertools.product(
self.sparse_formats,
self.problems,
sketch_solvers,
self.sketch_sizes,
self.operator_modes,
accel_algo_mode,
self.accel_params,
)
)
# Adding runs for non accelerated modes
for algo_mode in non_accel_algo_modes:
if algo_mode in algo_modes_set:
combinations.extend(
list(
itertools.product(
self.sparse_formats,
self.problems,
sketch_solvers,
self.sketch_sizes,
self.operator_modes,
(algo_mode,),
((0.0, 0.0),),
)
)
)
# Only run cg and direct once per sketch size, op mode
# and acceleration
for solver in non_sketch_solvers:
if solver in solvers_set:
combinations.extend(
list(
itertools.product(
self.sparse_formats,
self.problems,
[solver],
[self.sketch_sizes[0]],
[self.operator_modes[0]],
[self.algo_modes[0]],
[self.accel_params[0]],
)
)
)
return combinations