def gradient_descent_runner(b, k):
# 计算数据总数
m = float(len(x_data))
for i in range(epochs):
# 计算梯度总和求平均
b_grad = 0
k_grad = 0
for j in range(0, len(x_data)):
b_grad += (1 / m) * ((k * x_data[j] + b) - y_data[j])
k_grad += (1 / m) * x_data[j] * ((k * x_data[j] + b) - y_data[j])
# 更新b和k
b = b - (Ir * b_grad)
k = k - (Ir * k_grad)
return b, k
def gradient_descent_runner(b, k):
# 计算数据总数
m = float(len(x_data))
b_grad = 0
k_grad = 0
for i in range(epochs):
# 计算梯度总和求平均
for j in range(0, len(x_data)):
b_grad += (1 / m) * ((k * x_data[j] + b) - y_data[j])
k_grad += (1 / m) * x_data[j] * ((k * x_data[j] + b) - y_data[j])
# 更新b和k
b = b - (Ir * b_grad)
k = k - (Ir * k_grad)
return b, k