'''
以ex1data2.txt做为训练库,进行对元线性回归,以及正则方程
正则方程:
    theta = (X.T @ X) @ X.T @ Y
'''
import numpy as np
import matplotlib.pyplot as plt

path = '/home/wdd/Desktop/ML_CODE/Linear_Regression/ex1data2.txt'

# 从文件中读取数据,并且分离x,y
data = np.loadtxt(path, delimiter=',')
# print(data.shape)
x = data[:, :-1]
y = data[:, -1].reshape((-1, 1))

# 对训练数据进行特征缩放,加速迭代过程,并添加一列
x[:, 0] = (x[:, 0] - np.mean(x[:, 0])) / np.std(x[:, 0])
x[:, 1] = (x[:, 1] - np.mean(x[:, 1])) / np.std(x[:, 1])
x = np.insert(x, 0, values=1, axis=1)

# 定义theta
theta = [0] * x.shape[1]


# 计算损失
def computerCross(x, y, theta):
    theta = np.array(theta).reshape([3, 1])
    cost = np.sum(np.power((np.dot(x, theta) - y), 2)) / (2 * len(x))
    return cost


# 梯度下降
def gradientDescent(x, y, alpha, theta, iters):
    temp = theta
    cost = []
    para_num = len(temp)
    for i in range(iters):
        error = x @ np.array(theta).reshape([3, 1]) - y

        for j in range(para_num):
            term = error * x[:, j].reshape((-1, 1))
            temp[j] = theta[j] - (alpha / len(x)) * np.sum(term)
        theta = temp
        cost.append(computerCross(x, y, theta))
    return theta, cost


# 正则化
def regularizationCopmuter(x, y):
    theta = np.linalg.inv(x.T @ x) @ x.T @ y
    return theta


# theta1 = regularizationCopmuter(x, y)
# print(theta1)
theta, cost = gradientDescent(x, y, 0.001, theta, 1000)
print(theta)
for i in range(len(x)):
    y_predict = x[i, :] @ np.array(theta).reshape([3,1])
    print(y_predict,y[i])
plt.plot(cost)
plt.show()
