from __future__ import print_function
import torch
import time
import random
import numpy as np
import pandas as pd

start = time.perf_counter()


# 获取所有点的总误差的均值
def compute_error_for_line_given_points(b, wu, wv, points):
    totalError = 0
    for i in range(0, len(points)):
        u = points[i, 0]
        v = points[i, 1]
        y = points[i, 2]
        totalError += (y - (wu * u + wv * v + b)) ** 2
    return totalError / float(len(points))


# 梯度算法
def step_gradient(b_current, wu_current, wv_current, points, learningRate):
    b_gradient = 0
    wu_gradient = 0
    wv_gradient = 0
    N = float(len(points))
    for i in range(0, len(points)):
        u = points[i, 0]
        v = points[i, 1]
        y = points[i, 2]
        b_gradient += -(2 / N) * (y - ((wu_current * u) + (wv_current * v) + b_current))
        wu_gradient += -(2 / N) * u * (y - ((wu_current * u) + (wv_current * v) + b_current))
        wv_gradient += -(2 / N) * v * (y - ((wu_current * u) + (wv_current * v) + b_current))
    new_b = b_current - (learningRate * b_gradient)
    new_wu = wu_current - (learningRate * wu_gradient)
    new_wv = wv_current - (learningRate * wv_gradient)
    return [new_b, new_wu, new_wv]


# 开始迭代
def gradient_descent_runner(points, starting_b, starting_wu, starting_wv, learning_rate, num_iteration):
    b = starting_b
    wu = starting_wu
    wv = starting_wv
    for i in range(num_iteration):
        b, wu, wv = step_gradient(b, wu, wv, np.array(points), learning_rate)
    return [b, wu, wv]


def run():
    x = pd.read_excel("E:\本科教学\大三下\数学建模\新疆大学数学建模校赛2021年度赛题及说明\A题\data.xlsx", header=None)
    x = np.array(x)
    points = np.vstack((np.vstack((x[:, 1], x[:, 2])), x[:, 5])).transpose()
    learning_rate = 0.00000005
    initial_b = 0
    initial_wu = 0
    initial_wv = 0
    num_iterations = 100000
    print("Starting gradient descent at b = {0}, wu = {1}, wv={2}, error = {3}"
          .format(initial_b, initial_wu, initial_wv,
                  compute_error_for_line_given_points(initial_b, initial_wu, initial_wv, points)))
    print("Runing")
    [b, wu, wv] = gradient_descent_runner(points, initial_b, initial_wu, initial_wv, learning_rate, num_iterations)
    print(wu)
    print(wv)
    print(b)
    print("After {0} iterations b = {1}, wu = {2}, wv={3}, error = {4}".
          format(num_iterations, b, wu, wv, compute_error_for_line_given_points(b, wu, wv, points)))


if __name__ == '__main__':
    run()

end = time.perf_counter()

print("运行耗时：", end - start)
