from numpy import *
import matplotlib.pyplot as plt
import pandas as pd
import time
from IPython import display
import threading

# %%

# %matplotlib inline

# y = mx + b
# m is slope, b is y-intercept
# 求误差函数，就是预测值和实际值相减然后求平方，最后再取平均值
def compute_error_for_line_given_points(b, m, points):
    totalError = 0
    # 将数据分别赋值为x,y
    for i in range(0, len(points)):
        x = points[i, 0]
        y = points[i, 1]
        totalError += (y - (m * x + b)) ** 2
    return totalError / float(len(points))


# 梯度下降算法实现，迭代一次后输出一组参数值
def step_gradient(b_current, m_current, points, learningRate):
    b_gradient = 0
    m_gradient = 0
    N = float(len(points))
    # for i in range(0, len(points)):
    #     x = points[i, 0]
    #     y = points[i, 1]
    #     b_gradient += -(2/N) * (y - ((m_current * x) + b_current))
    #     m_gradient += -(2/N) * x * (y - ((m_current * x) + b_current))
    for i in range(0, len(points)):
        x = points[i, 0]
        y = points[i, 1]
        b_gradient += - (y - ((m_current * x) + b_current))
        m_gradient += - x * (y - ((m_current * x) + b_current))

    new_b = b_current - (learningRate * b_gradient / N)
    new_m = m_current - (learningRate * m_gradient / N)

    return [new_b, new_m]


gM = 0
gB = 0
gData = []
gPoint = []


def refreshGraphic():


    while 1:
        print ("\rAfter {0} iterations b = {1}, m = {2}, error = {3}"
               .format(1000, gB, gM, compute_error_for_line_given_points(gB, gM, gPoint)),end='',flush=True)

        # time.sleep(0.5)
        plt.pause(0.04)

        plt.clf()
        # plt.figure(figsize=(20,10))
        plt.plot(gData.x, gData.y, "bo")
        plt.plot(gData.x, gData.x * gM + gB)
        # plt.show()
        display.clear_output(wait=True)
        # display.display(plt.gcf())


def gradient_descent_runner(data, points, starting_b, starting_m, learning_rate, num_iterations):
    global gM
    global gB
    global gData
    gData = data
    b = starting_b
    m = starting_m

    for i in range(num_iterations):
        b, m = step_gradient(b, m, array(points), learning_rate)
        gB = b
        gM = m
        # plt.clf()
        # plt.plot(data.x,data.y,"bo")
        # plt.plot(data.x,data.x*m+b)
        # plt.draw()

        # plt.draw()
        time.sleep(0.01)
        # print ("\rAfter {0} iterations b = {1}, m = {2}, error = {3}"
        #        .format(num_iterations, b, m, compute_error_for_line_given_points(b, m, points)),end='',flush=True)

        # plt.show()

    return [b, m]


def run():
    global gPoint
    points = genfromtxt('/Users/xunxun/Downloads/GradientDescentExample-master/data.csv', delimiter=",")
    gPoint = points
    data = pd.read_csv('/Users/xunxun/Downloads/GradientDescentExample-master/data.csv', names=['x', 'y'])
    learning_rate = 0.000001
    initial_b = 0  # initial y-intercept guess
    initial_m = 0  # initial slope guess
    num_iterations = 10000
    # plt.plot(data.x,data.y,'bo')

    print("Starting gradient descent at b = {0}, m = {1}, error = {2}".format(initial_b, initial_m,
                                                                              compute_error_for_line_given_points(
                                                                                  initial_b, initial_m, points)))
    print("Running...")
    t = threading.Thread(target=gradient_descent_runner,args=[data, points, initial_b, initial_m, learning_rate, num_iterations])
    t.start()
    # [b, m] = gradient_descent_runner()
    refreshGraphic()
    t.join()
    # plt.plot(data.x,data.x*m+b)
    # plt.show()


if __name__ == '__main__':
    run()

