import pandas as pd
import matplotlib.pyplot as plt
import  numpy as np

#目标函数
def function_h(w,x):
    return np.sum(w*x)

#损失函数
def function_J(x,y,w):
    m=x.shape[0]
    i=0
    s=0
    while i<m:
        s+=(function_h(w, x[i])-y[0,i])**2
        i+=1
    return s/(2*m)

#批量梯度下降算法
def batchGradientDecent(x,y,w,learningRate):
    m = len(x)
    return w-learningRate*((w@x.T-y)@x)/m

#线性回归调用主函数
def linearRegression(x,y,w,learningRate):
    epoch=1000
    r=np.zeros((1,epoch))
    i=0
    while i<epoch:
        w=batchGradientDecent(x,y,w,learningRate)
        r[0,i]=function_J(x,y,w)
        i+=1
    return w,r

#1单变量
plt.rcParams['font.sans-serif'] = ['SimHei']
plt.rcParams['axes.unicode_minus'] = False

df = pd.read_csv('data/regress_data1.csv')

x = df['人口']
y = df['收益']

plt.scatter(x, y)

plt.title('原始数据散点图')
plt.xlabel('人口')
plt.ylabel('收益')
x=x.values.reshape((x.shape[0], 1))
t=np.ones((x.shape[0], 1))
x=np.hstack((x,t))
y=y.values.reshape((1,y.shape[0]))
w=np.zeros((1, 2))
learningRate=0.01
ret,r=linearRegression(x,y,w,learningRate)

x_line = np.linspace(np.min(x[:, 0]), np.max(x[:, 0]), 100)
y_line = ret[0,0] * x_line+ret[0,1]
plt.plot(x_line, y_line, label='回归直线')

# 显示图形
plt.show()
print("参数向量:",ret[0,0])
print("偏置:",ret[0,1])
print("损失值:",function_J(x,y,w))

#2多变量
plt.rcParams['font.sans-serif'] = ['SimHei']
plt.rcParams['axes.unicode_minus'] = False

df = pd.read_csv('data/regress_data2.csv')

x1=df['面积']
x2 = df['房间数']
y = df['价格']

x1=x1.values.reshape((x1.shape[0], 1))
x2=x2.values.reshape((x2.shape[0], 1))
t=np.ones((x1.shape[0], 1))
x=np.hstack((x1,x2))

#归一化
mean_values = np.mean(x, axis=0)
std_values = np.std(x, axis=0)
x = (x - mean_values) / std_values
x=np.hstack((x,t))
y=y.values.reshape((1,y.shape[0]))
w=np.zeros((1, 3))
learningRate=0.01

ret,data=linearRegression(x,y,w,learningRate)
print("参数向量:",ret[0,0],ret[0,1])
print("偏置:",ret[0,2])
print("损失值:",function_J(x,y,w))

#画图
indices = np.arange(data.shape[1])
indices=indices+1
data = data[0, :]
plt.plot(indices, data)
plt.xlim(0, 1100)
plt.ylim(0, 120000000000)
plt.xlabel('学习轮次')
plt.ylabel('损失')
plt.show()

