import numpy as np
import matplotlib.pyplot as plt


def mean_square_error(bias, weight, points):
    """
    计算方均差
    :param bias:偏移量
    :param weight:权重
    :param points:输入的点
    :return:在当前 w b模型下 对应的方均差值
    """
    total_error = 0
    for i in range(len(points)):
        x = points[i, 0]
        y = points[i, 1]
        # 计算方差
        total_error += (y - (weight * x + bias)) ** 2

    return total_error / len(points)


def step_gradient(current_bias, current_weight, points, learning_rate):
    """
    计算更新梯度
    :param current_bias: 当前偏移量
    :param current_weight: 当前权重
    :param points:
    :param learning_rate: 学习率
    :return: 新的梯度信息
    """
    bias_gradient = 0
    weight_gradient = 0

    N = float(len(points))
    for i in range(len(points)):
        x = points[i, 0]
        y = points[i, 1]

        # 计算当前的梯度信息
        bias_gradient += (2 / N) * ((current_weight * x + current_bias) - y)
        weight_gradient += (2 / N) * x * ((current_weight * x + current_bias) - y)

    new_bias = current_bias - (learning_rate * bias_gradient)
    new_weight = current_weight - (learning_rate * weight_gradient)

    return [new_bias, new_weight]


plot_data = []


def gradient_descent(points, initial_bias, initial_weight, learning_rate, iterations):
    """
    梯度下降算法
    :param points: 输入的点
    :param initial_bias: 初始的偏移量
    :param initial_weight: 初始的权重
    :param learning_rate: 初始的学习率
    :param iterations: 迭代次数(计算次数)
    :return: 偏移量和权重
    """
    b = initial_bias
    w = initial_weight

    for step in range(iterations):
        b, w = step_gradient(b, w, np.array(points), learning_rate)
        loss = mean_square_error(b, w, points)
        plot_data.append([step, loss])
        if step % 50 == 0:
            print(f"iteration:{step}, loss:{loss}, bias:{b}, weight:{w}")
    return [b, w]


def draw_plot():
    plt.rcParams['font.sans-serif'] = ['SimHei']
    plot = np.array(plot_data)
    plt.plot(plot[:, 0], plot[:, 1])
    plt.xlabel('迭代次数')
    plt.ylabel('方均误差')
    plt.title("迭代次数和方均误差的关系图")
    plt.show()


if __name__ == '__main__':
    # 加载数据
    data_points = np.genfromtxt("dataset.csv", delimiter=',')
    # print(data_points)

    bias = 0
    weight = 0
    learning_rate = 0.0001
    iterations = 9000
    [b, w] = gradient_descent(data_points, bias, weight, learning_rate, iterations)
    loss = mean_square_error(b, w, data_points)
    print(f"final results: weight:{w}, bias:{b}, loss:{loss}")
    draw_plot()
