fn apply_gradient_step()

in gad/src/net_ext.rs [47:73]


    fn apply_gradient_step(&mut self, lambda: T, batch: Vec<Self::Input>) -> Result<T> {
        let mut delta: Option<Self::Weights> = None;
        let mut cumulated_output: Option<T> = None;
        for example in batch {
            // Forward pass
            let mut g = Graph1::new();
            let (output, info) = self.eval_with_gradient_info(&mut g, example)?;
            match &mut cumulated_output {
                opt @ None => *opt = Some(*output.data()),
                Some(val) => *val += *output.data(),
            }
            // Backward pass
            let store = g.evaluate_gradients_once(output.gid()?, T::one())?;
            // Accumulate gradient.
            let gradients = self.read_weight_gradients(info, &store)?;
            match &mut delta {
                opt @ None => *opt = Some(gradients.scale(lambda)),
                Some(val) => val.add_assign(gradients.scale(lambda))?,
            }
        }
        // Update weights.
        if let Some(delta) = delta {
            self.update_weights(delta)?;
        }
        // Report cumulated error
        cumulated_output.ok_or_else(|| Error::empty(func_name!()))
    }