import matplotlib.pyplot as plt
import numpy as np


class Linear_Regression:

    def normal_equation(self, X, y):
        '''
        最小二乘法
        :param X:
        :param y:
        :return:
        '''
        return np.linalg.pinv(X.T.dot(X)).dot(X.T).dot(y)

    def cost(self, X, theta, y):
        '''
        计算损失函数
        :param X: 矩阵
        :param theta: 步长
        :param y: 结果
        :return: 矩阵X的函数
        '''
        m = X.shape[0]
        temp = X.dot(theta) - y
        return temp.T.dot(temp) / (2 * m)

    def gradient_descent(self, X, theta, y, alpha, iterations):
        '''

        :param X: 矩阵X
        :param theta: 变量
        :param y: 结果
        :param alpha: 步长
        :param iterations: 迭代次数
        :return:
        '''
        m = X.shape[0]
        print("行数--", m)
        print("y行数", len(y))
        c = []  # 存储计算损失值
        for i in range(iterations):
            theta -= (alpha / m) * X.T.dot(X.dot(theta) - y)
            # 计算损失值
            c.append(self.cost(X, theta, y))

        return theta, c

    def maxminnorm(self, array):
        '''
        Numpy数组的归一化处理
        :param array: 需要归一化的数组
        :return:
        '''
        maxcols = array.max(axis=0)
        mincols = array.min(axis=0)
        data_shape = array.shape
        data_rows = data_shape[0]
        data_cols = data_shape[1]
        t = np.empty((data_rows, data_cols))
        for i in range(data_cols):
            t[:, i] = (array[:, i] - mincols[i]) / (maxcols[i] - mincols[i])
        return t

    def __init__(self):
        # 读取文件a.txt中的数据,分隔符为",",以double格式读取数据
        self.data = np.loadtxt('ex1data2.txt', delimiter=',', dtype=np.float64)
        # m设置行数
        self.m = self.data.shape[0]
        # 进行数据归一化
        self.normalization_data = self.maxminnorm(self.data)

    def main(self):
        '''
        主函数
        :return:
        '''
        theta = np.zeros((2,))
        print(theta)
        alpha = 0.1
        iterations = 10000
        # 第一列和第二列是X,数据
        X = self.normalization_data[:, 0:2]
        # 第三列为价格
        y = self.normalization_data[:, 2]
        theta, c = self.gradient_descent(X=X,
                                         theta=theta,
                                         y=y,
                                         alpha=alpha,
                                         iterations=iterations)
        # 可视化下降过程
        plt.rcParams["font.sans-serif"] = ["SimHei"]
        plt.rcParams["axes.unicode_minus"] = False
        plt.plot()
        plt.title("损失函数J(θ)")
        plt.xlabel("接待次数")
        plt.ylabel("损失值")
        plt.plot([i for i in range(iterations)], c, color="red")
        plt.show()
        print("使用梯度下降:", theta)
        print("使用最小二乘法", self.normal_equation(X, y))
        pass


if __name__ == '__main__':
    obj = Linear_Regression()
    obj.main()
