import numpy as np
from numpy import *
import matplotlib.pylab as plt
from sklearn import linear_model

# 显示中文
from pylab import *
mpl.rcParams['font.sans-serif'] = ['SimHei']

# 加载数据
data = np.loadtxt('data/ex1data1.txt', delimiter=',')
print(data)

# 提取数据X, y
X = data[:, 0]
y = data[:, 1]

# 特征缩放
X -= np.mean(X, 0)
X /= np.std(X, 0, ddof=1)

# 数据的初始化
m = X.shape[0]
X = np.c_[np.ones(m), X]
y = np.c_[y]; print(y)

# 定义代价函数
def  costFunction(X, y, theta):
    m = X.shape[0]
    h = X.dot(theta)
    J = (1 / ( 2 * m ))* (h - y).T.dot(h - y)
    return J

# 定义梯度下降函数
def gradDesc(X, y, alpha=0.01, iter_num=15000):
    m, n = X.shape
    theta = np.zeros((n, 1))
    J_history = np.zeros(iter_num)

    for i in range(iter_num):
        J_history[i] = costFunction(X, y, theta)
        h = X.dot(theta)
        deltheta = 1 / m * X.T.dot(h - y)
        theta -= alpha * deltheta
    return theta, J_history

theta, J_history = gradDesc(X, y)

#  ----------------- Graph ---------------------
plt.figure('代价函数')
plt.title('代价曲线')
plt.xlabel('迭代次数')
plt.ylabel('代价')
plt.plot(J_history)
plt.show()

# 画出样本数据及回归模型
h = X.dot(theta)
plt.figure('真实值和预测值')
plt.title('真实值与预测值')
plt.scatter(y, y, c='r', label='真实值')
plt.scatter(y, h, marker='x', label='预测值')
plt.legend(loc='upper left')
plt.show()


