import numpy as np
import matplotlib.pyplot as plt

plt.figure(figsize=[12, 5])
spr = 1
spc = 2
spn = 0

# 完成数据集的读取
train_data = np.loadtxt('train_data.txt', delimiter=' ')
test_data = np.loadtxt('test_data.txt', delimiter=' ')
# print(train_data[:5])
# print(test_data[:5])
x = train_data[:, :-1]  # train data feature values matrix
XX = np.c_[np.ones([len(x), 1]), x]
y = train_data[:, -1]  # train data target values vector
x_test = test_data[:, :-1]  # test data feature values matrix
XX_test = np.c_[np.ones([len(x_test), 1]), x_test]
y_test = test_data[:, -1]  # test data target values vector


# 实现线性回归的代价函数
def model(XX, theta):
    """
    The hypothesis function

    :param XX: Matrix of feature values with bias.
    :param theta: Vector of theta.
    :return: The hypothesis vector.
    """
    return XX.dot(theta)


def cost_func(h, y):
    """
    The cost function.

    :param h: The hypothesis values vector
    :param y: The target values vector
    :return: The value of the cost function, a scalar.
    """
    e = h - y
    sq = e ** 2
    sqm = sq.mean()
    j = sqm / 2.0
    return j


# 实现梯度下降函数
def gradient_descent_algorithm(XX, y, alpha=0.001, iter0=15000):
    """
    The gradient descent algorithm for linear regression.

    :param XX: Feature values matrix with bias.
    :param y: Target values vector
    :param alpha: The learning rate, scalar
    :param iter0: The iteration numbers, scalar
    :return: Tuple(theta, j_his, h):
        The theta vector, the cost function values in each iteration, the last hypothesis vector.
    """
    m, n = XX.shape
    theta = np.zeros(n)  # theta vector
    j_his = np.zeros(iter0)

    for i in range(iter0):
        h = model(XX, theta)
        j = cost_func(h, y)
        j_his[i] = j
        e = h - y
        dt = 1.0 / m * XX.T.dot(e)
        theta -= alpha * dt

    return theta, j_his, h


#     要求输出迭代过程中的代价函数值
alpha = 0.001
iter0 = 1000
theta, j_his, h = gradient_descent_algorithm(XX, y, alpha, iter0)
print(f'Theta = {theta}')

# print the cost function values in iterations
checks = 10
group = iter0 // checks
print(f'Cost function values in every {group} iterations:')
for k, v in enumerate(j_his[::group]):
    print(f'#{group * k}: {v}')

# plot the cost function values in each iteration
spn +=1
plt.subplot(spr, spc, spn)
plt.plot(j_his, label='cost function values')
plt.xlabel('iteration numbers')
plt.grid()
plt.legend()

# 通过梯度下降计算回归模型，用所得模型对测试集的数据进行预测
h_test = model(XX_test, theta)

# 以横轴为真实值，纵轴为预测值，画出散点图进行对比
spn +=1
plt.subplot(spr, spc, spn)
plt.scatter(x_test[:, 0], y_test, s=1, label='target values')
plt.scatter(x_test[:, 0], h_test, s=1, label='hypothesis values')
plt.xlabel('X')
plt.ylabel('Y')
plt.grid()
plt.legend()

plt.show()
