in src/inplace_abn_cpu.cpp [144:170]
std::tuple<at::Tensor, at::Tensor, at::Tensor, at::Tensor> backward_reduce_cpu(
const at::Tensor& y_act,
const at::Tensor& dy_act,
const c10::optional<at::Tensor>& weight,
const c10::optional<at::Tensor>& bias,
float eps,
Activation activation,
float activation_param) {
CHECK_NOT_HALF(y_act);
// Run templated implementation
return AT_DISPATCH_FLOATING_TYPES(
y_act.scalar_type(), "backward_reduce_cpu", [&] {
switch (activation) {
case Activation::LeakyReLU:
return backward_reduce_impl<scalar_t, Activation::LeakyReLU>(
y_act, dy_act, weight, bias, eps, activation_param);
case Activation::ELU:
return backward_reduce_impl<scalar_t, Activation::ELU>(
y_act, dy_act, weight, bias, eps, activation_param);
case Activation::Identity:
default:
return backward_reduce_impl<scalar_t, Activation::Identity>(
y_act, dy_act, weight, bias, eps, activation_param);
}
});
}