我在网上找到了一个示例,其中包含一种反向传播错误并调整权重的方法。我想知道这到底是如何工作的,以及使用了什么权重更新算法。会不会是梯度下降?
/**
* all output propagate back
*
* @param expectedOutput
* first calculate the partial derivative of the error with
* respect to each of the weight leading into the output neurons
* bias is also updated here
*/
public void applyBackpropagation(double expectedOutput[]) {
// error check, normalize value ]0;1[
for (int i = 0; i < expectedOutput.length; i++) {
double d = expectedOutput[i];
if (d < 0 || d > 1) {
if (d < 0)
expectedOutput[i] = 0 + epsilon;
else
expectedOutput[i] = 1 - epsilon;
}
}
int i = 0;
for (Neuron n : outputLayer) {
ArrayList<Connection> connections = n.getAllInConnections();
for (Connection con : connections) {
double ak = n.getOutput();
double ai = con.leftNeuron.getOutput();
double desiredOutput = expectedOutput[i];
double partialDerivative = -ak * (1 - ak) * ai
* (desiredOutput - ak);
double deltaWeight = -learningRate * partialDerivative;
double newWeight = con.getWeight() + deltaWeight;
con.setDeltaWeight(deltaWeight);
con.setWeight(newWeight + momentum * con.getPrevDeltaWeight());
}
i++;
}
// update weights for the hidden layer
for (Neuron n : hiddenLayer) {
ArrayList<Connection> connections = n.getAllInConnections();
for (Connection con : connections) {
double aj = n.getOutput();
double ai = con.leftNeuron.getOutput();
double sumKoutputs = 0;
int j = 0;
for (Neuron out_neu : outputLayer) {
double wjk = out_neu.getConnection(n.id).getWeight();
double desiredOutput = (double) expectedOutput[j];
double ak = out_neu.getOutput();
j++;
sumKoutputs = sumKoutputs
+ (-(desiredOutput - ak) * ak * (1 - ak) * wjk);
}
double partialDerivative = aj * (1 - aj) * ai * sumKoutputs;
double deltaWeight = -learningRate * partialDerivative;
double newWeight = con.getWeight() + deltaWeight;
con.setDeltaWeight(deltaWeight);
con.setWeight(newWeight + momentum * con.getPrevDeltaWeight());
}
}
}