我已经阅读了很多反向道具算法,我不知道为什么我的做法正在做。
E(k) = (target(k) - O(k).output) * f'(O(k).output) = (target - actual) * 1
线性激活fn得到1作为导数E(j) = sum(w(kj) * E(k)) * f'(N(j).output) = sum(w(kj) * E(k) * N(j).output * (1 - N(j).output)
// Output layer
for (var i = 0; i < this._outputs.length; i++) {
var o = this._outputs[i];
o.error = targetOutputs[i] - o.output;
this._mse += o.error * o.error;
}
// Go through hidden layers
for (var cIdx = this._layers.length - 2; cIdx > 0; cIdx--) {
var curr = this._layers[cIdx];
var next = this._layers[cIdx + 1];
// Go through hidden neurons
for (var hN = 0, hLen = curr.length; hN < hLen; hN++) {
var h = curr[hN];
var sum = 0;
for (var nN = 0, nLen = next.length; nN < nLen; nN++) {
var n = next[nN];
sum += n.w[hN] * n.error;
}
h.error = sum * h.dActivationFn(h.output);
}
}
/**
* The logisticFunction function is 1 / (1 + e ^ (-x))
* @param x
*/
function logisticFunction(x) {
return 1 / (1 + Math.pow(Math.E, -x));
}
/**
* The derivative of the logistic function
* @param {Number} x
* @returns {Number}
*/
function dLogisticFunction(x) {
return x * (1 - x);
}
Neuron.dActivation = dLogisticFunction
我的网络只是收敛到一个答案(随机),无论输入是什么(当为正数时),当使用100多个数据点进行训练时,该值不会改变...
有什么想法吗?