pip install matplatlib
import numpy as np
import matplotlib.pyplot as plt
data=np.loadtxt(r'hw.txt', delimiter='\t', skiprows=True)
plt.scatter(data[:,0], data[:,1])
plt.show()
初始化:
def init_data():
data = np.loadtxt('hw.txt', delimiter='\t')
return data
def linear_regression():
learning_rate = 0.01 #步长
initial_b = 0
initial_m = 0
num_iter = 1000 #迭代次数
data = init_data()
[b, m] = optimizer(data, initial_b, initial_m, learning_rate, num_iter)
plot_data(data,b,m)
print(b, m)
return b, m
def optimizer(data, initial_b, initial_m, learning_rate, num_iter):
b = initial_b
m = initial_m
for i in range(num_iter):
b, m = compute_gradient(b, m, data, learning_rate)
# after = computer_error(b, m, data)
if i % 100 == 0:
print(i, computer_error(b, m, data)) # 损失函数,即误差
return [b, m]
def compute_gradient(b_cur, m_cur, data, learning_rate):
b_gradient = 0
m_gradient = 0
N = float(len(data))
#
# 偏导数, 梯度
for i in range(0, len(data)):
x = data[i, 0]
y = data[i, 1]
b_gradient += -(2 / N) * (y - ((m_cur * x) + b_cur))
m_gradient += -(2 / N) * x * (y - ((m_cur * x) + b_cur)) #偏导数
new_b = b_cur - (learning_rate * b_gradient)
new_m = m_cur - (learning_rate * m_gradient)
return [new_b, new_m]
def computer_error(b, m, data):
totalError = 0
x = data[:, 0]
y = data[:, 1]
totalError = (y - m * x - b) ** 2
totalError = np.sum(totalError, axis=0)
return totalError / len(data)
if __name__ == '__main__':
linear_regression()
解决:hw.TXT中数据是以tab分开的
在修改代码
data = np.loadtxt('hw.txt', delimiter='\t')
import warnings
warnings.filterwarnings('ignore')