from mpl_toolkits import mplot3d
import numpy as np
import matplotlib.pyplot as plt
from python_ai.ML.lin_regression.xlib import *
from scipy.optimize import curve_fit

# Load data
dataLoaded = np.loadtxt(r'./data/ex1data2.txt', delimiter=',')
m = dataLoaded.shape[0]
print(f'data len = {m}')
x1 = dataLoaded[:, 0].reshape(m, 1)
x2 = dataLoaded[:, 1].reshape(m, 1)
y = dataLoaded[:, 2].reshape(m, 1)

# Scale data
dataScaled, x_mu, x_sigma = scale_feature_data(dataLoaded)
x1_scaled = dataScaled[:, 0].reshape(m, 1)
x2_scaled = dataScaled[:, 1].reshape(m, 1)
y_scaled = dataScaled[:, 2].reshape(m, 1)

# data for plot
plt_x_scaled = np.linspace(dataScaled[:, 0].min(), dataScaled[:, 0].max(), m)
plt_y_scaled = np.linspace(dataScaled[:, 1].min(), dataScaled[:, 1].max(), m)
plt_x = np.linspace(dataLoaded[:, 0].min(), dataLoaded[:, 0].max(), m)
plt_y = np.linspace(dataLoaded[:, 1].min(), dataLoaded[:, 1].max(), m)

# gradient descent
X_scaled = np.c_[np.ones([m, 1]), x1_scaled, x2_scaled]
num_iters_ori = num_iters = 150000
theta, history, xscores = gradient_descent_algorithm(X_scaled, y_scaled, alpher=0.001, num_iters=num_iters)
num_iters = len(history)
if num_iters < num_iters_ori:
    print(f'Convergence at {num_iters}th iteration')
else:
    print('None convergence!!!')
print('THETA', theta)

# descale
x1x2 = np.c_[x1, x2]
theta0 = scale_theta_back(x1x2, y, theta)
print('THETA0', theta0)


def func_fit(X, theta):
    Y = np.dot(X, theta)
    return Y


def on_ax(ax):
    ax.set_title('梯度下降算法')

    # scaled training examples
    ax.scatter3D(x1_scaled, x2_scaled, y_scaled)
    # training examples
    ax.scatter3D(x1, x2, y)

    plt_X_scaled = np.c_[np.ones([m, 1]), plt_x_scaled.reshape(m, 1), plt_y_scaled.reshape(m, 1)]
    ax.plot3D(plt_x_scaled, plt_y_scaled, func_fit(plt_X_scaled, theta).ravel(), 'r--')

    plt_X = np.c_[np.ones([m, 1]), plt_x.reshape(m, 1), plt_y.reshape(m, 1)]
    ax.plot3D(plt_x, plt_y, func_fit(plt_X, theta0).ravel(), 'r-')

    ax.set_xlabel('x')
    ax.set_ylabel('y')
    ax.set_zlabel('z')


plt.ioff()

fig = plt.figure(figsize=[12, 8])
pr = 1
pc = 1
plt.rcParams['font.sans-serif'] = ['Simhei']  # 设置中文字体为黑体
plt.rcParams['axes.unicode_minus'] = False  # 显示负号

ax = fig.add_subplot(pr, pc, 1, projection='3d')
on_ax(ax)

fig = plt.figure(figsize=[12, 8])
pr = 2
pc = 2
plt.rcParams['font.sans-serif'] = ['Simhei']  # 设置中文字体为黑体
plt.rcParams['axes.unicode_minus'] = False  # 显示负号

ax = fig.add_subplot(pr, pc, 1, projection='3d')
on_ax(ax)

plt.subplot(pr, pc, 2)
plt.title('Cost function value')
xx = range(num_iters)
plt.plot(xx, history)
plt.grid()

plt.subplot(pr, pc, 3)
plt.title('Cost function value, 2nd half')
xx = range(num_iters // 2, num_iters)
plt.plot(xx, history[num_iters // 2:])
plt.grid()

if len(xscores) > 0:
    plt.subplot(pr, pc, 4)
    plt.title('Score function value')
    xx = range(num_iters)
    plt.plot(xx, xscores)
    plt.grid()

plt.show()
