in src/nn.ts [282:329]
export function backProp(network: Node[][], target: number,
errorFunc: ErrorFunction): void {
// The output node is a special case. We use the user-defined error
// function for the derivative.
let outputNode = network[network.length - 1][0];
outputNode.outputDer = errorFunc.der(outputNode.output, target);
// Go through the layers backwards.
for (let layerIdx = network.length - 1; layerIdx >= 1; layerIdx--) {
let currentLayer = network[layerIdx];
// Compute the error derivative of each node with respect to:
// 1) its total input
// 2) each of its input weights.
for (let i = 0; i < currentLayer.length; i++) {
let node = currentLayer[i];
node.inputDer = node.outputDer * node.activation.der(node.totalInput);
node.accInputDer += node.inputDer;
node.numAccumulatedDers++;
}
// Error derivative with respect to each weight coming into the node.
for (let i = 0; i < currentLayer.length; i++) {
let node = currentLayer[i];
for (let j = 0; j < node.inputLinks.length; j++) {
let link = node.inputLinks[j];
if (link.isDead) {
continue;
}
link.errorDer = node.inputDer * link.source.output;
link.accErrorDer += link.errorDer;
link.numAccumulatedDers++;
}
}
if (layerIdx === 1) {
continue;
}
let prevLayer = network[layerIdx - 1];
for (let i = 0; i < prevLayer.length; i++) {
let node = prevLayer[i];
// Compute the error derivative with respect to each node's output.
node.outputDer = 0;
for (let j = 0; j < node.outputs.length; j++) {
let output = node.outputs[j];
node.outputDer += output.weight * output.dest.inputDer;
}
}
}
}