import tensorflow as tf
import numpy as np
from sklearn.datasets import load_boston
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt

spr = 2  # subplot rows
spc = 2  # subplot columns
spn = 0  # subplot number
plt.figure(figsize=[8, 8])

# ①	导入正确的头文件包
# ②	运用sklearn加载boston房价数据集
x, y = load_boston(return_X_y=True)
y = y.reshape([-1, 1])

# ③	任意选取3个特征，进行特征缩放，洗牌处理。
# 选取3个特征
x = x[:, :3]
# 特征缩放
std = StandardScaler()
x = std.fit_transform(x)
m, n = x.shape
y = std.fit_transform(y)
# 洗牌处理
np.random.seed(666)
rand_idx = np.random.permutation(m)
x = x[rand_idx]
y = y[rand_idx]

# ④	进行训练集，测试集切分。
x_train, x_test, y_train, y_test = train_test_split(x, y, train_size=0.7, random_state=666)

# ⑤	定义初占位符结构
ph_x = tf.compat.v1.placeholder(tf.float32, [None, n], 'ph_x')
ph_y = tf.compat.v1.placeholder(tf.float32, [None, 1], 'ph_y')

# ⑥	设置weight，baise.
w = tf.Variable(tf.random.normal([n, 1]), dtype=tf.float32, name='w')
b = tf.Variable(tf.random.normal([1, 1]), dtype=tf.float32, name='b')

# ⑦	求预测函数（为线性函数）
h = tf.matmul(ph_x, w) + b

# ⑧	完成误差平方和的loss函数。
cost = tf.reduce_mean((h - ph_y) ** 2) / 2

# ⑨	选择梯度下降器，设置超参数learningrate。
train = tf.compat.v1.train.GradientDescentOptimizer(learning_rate=0.01)\
    .minimize(cost)

# R方精度
r2 = 1 - tf.reduce_mean((h - ph_y) ** 2) / tf.reduce_mean((ph_y - tf.reduce_mean(ph_y)) ** 2)

# ⑩	组件Session()函数，分析结果，迭代计算2000。
with tf.compat.v1.Session() as sess:
    sess.run(tf.compat.v1.global_variables_initializer())
    iters = 2000
    group = 200
    cost_arr = np.zeros(iters)
    for i in range(iters):
        wv, bv, h_train, costv, _, r2v = sess.run([w, b, h, cost, train, r2],
                                        feed_dict={ph_x: x_train, ph_y: y_train})
        cost_arr[i] = costv
        # 11	200次循环打出对应损失值。
        if i % group == 0:
            print(f'#{i + 1} cost = {costv}, r2 score = {r2v}')
    # 打印最后的损失值
    if i % group != 0:
        print(f'#{i + 1} cost = {costv}, r2 score = {r2v}')

    # 展示代价函数在迭代中的变化
    spn += 1
    plt.subplot(spr, spc, spn)
    plt.plot(cost_arr, label='Cost function values')
    plt.legend()

    # 12	最后求出预测值，加入测试集数据，进行合理验证，加入合理注释。
    # 展示预测值
    spn += 1
    plt.subplot(spr, spc, spn)
    plt.scatter(y_train, y_train, color='b', s=1, label='Target')
    plt.scatter(y_train, h_train, color='r', s=1, label='Hypothesis')
    plt.title(f'Train: R2 = {r2v:.2f}')
    plt.legend()
    # 加入测试集数据，进行合理验证
    h_test = sess.run(h, feed_dict={ph_x: x_test})  # 测试集预测值
    r2v_test = sess.run(r2, feed_dict={ph_x: x_test, ph_y: y_test})  # 测试集R方得分
    spn += 1
    plt.subplot(spr, spc, spn)
    plt.scatter(y_test, y_test, color='b', s=1, label='Target')
    plt.scatter(y_test, h_test, color='r', s=1, label='Hypothesis')
    plt.title(f'Test: R2 = {r2v_test:.2f}')
    plt.legend()

    # 13	加入合理注释。
    # 已经在代码中加入注释
