这是反向传播的正确实现吗? 网络分为三层,激活功能为relu。
def back_propagation(error, learning_rate, layer, wights, bias):
for i0 in range(len(layer)):
for i1 in range(len(layer[0])):
wights[i0][i1] = wights[i0][i1] - (learning_rate * ((layer[i0][i1] / wights[i0][i1]) * (error / layer[i0][i1])))
bias = bias - (learning_rate * ((layer[i0][i1] / bias) * (error / layer[i0][i1])))