import numpy as np
def cost_function(X,Y,B):
J = np.sum((X.T.dot(B)-Y) ** 2) / (2 * len(Y))
return J
def gradient_descent(X,Y,B,alpha,iterations):
cost_history = [0] * iterations
for iteration in range(iterations):
h = X.T.dot(B)
loss = h - Y
gradient = X.dot(loss) / len(Y)
B = B + (alpha * gradient)
cost = cost_function(X,Y,B)
cost_history[iteration] = cost
return B,cost_history
B-重量(2,1) X--输入(2,700) Y--输出(700,1) alpha--学习率(0.001) 迭代-3000 我正在使用成本函数来计算误差