in opacus/optimizers/__init__.py [0:0]
def get_optimizer_class(clipping: str, distributed: bool):
if clipping == "flat" and distributed is False:
return DPOptimizer
elif clipping == "flat" and distributed is True:
return DistributedDPOptimizer
elif clipping == "per_layer" and distributed is False:
return DPPerLayerOptimizer
elif clipping == "per_layer" and distributed is True:
return DistributedPerLayerOptimizer
raise ValueError(
f"Unexpected optimizer parameters. Clipping: {clipping}, distributed: {distributed}"
)