def backward()

in fast_grad_conv.py [0:0]


    def backward(ctx, weight_grad, bias_grad):
        weight_param, = ctx.saved_tensors
        batch_size = int(weight_grad.size(0) / weight_param.size(0))
        weight_grad = weight_grad.view(batch_size, -1, weight_grad.size(1),
                weight_grad.size(2), weight_grad.size(3)).sum(0)
        if bias_grad is not None:
            bias_grad = bias_grad.view(batch_size, -1).sum(0) 
        return weight_grad, bias_grad, None