'''使用鸢尾花数据集，绘制如下图形，其中对角线为属性的直方图。'''
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
import tensorflow as tf
import pandas as pd


#下载数据集
TRAIN_URL = "http://download.tensorflow.org/data/iris_training.csv"
train_path = tf.keras.utils.get_file(TRAIN_URL.split('/')[-1],TRAIN_URL)

TEST_URL = "http://download.tensorflow.org/data/iris_test.csv"
test_path = tf.keras.utils.get_file(TEST_URL.split('/')[-1],TEST_URL)
column_names = ['SepalLength','SepalWidth','PetalLength','PetalWidth','Species']
df_iris_train = pd.read_csv(train_path,header=0)
df_iris_test = pd.read_csv(test_path,header=0)
#转化为numpy数组
iris_train = np.array(df_iris_train)
iris_test = np.array(df_iris_test)
#提取花萼长度、宽度
train_x = iris_train[:,0:2]
test_x = iris_test[:,0:2]
#提取标签
train_y = iris_train[:,4]
test_y = iris_test[:,4]
#提取0山鸢尾 2维吉尼亚鸢尾
x_train = train_x[train_y!=1]
x_test = test_x[test_y!=1]
#新标签列表
y_train = train_y[train_y!=1]
y_test = test_y[test_y!=1]

y_train[y_train==2] = 1
y_test[y_test==2] = 1

num_train = len(x_train)
num_test = len(x_test)
plt.figure(figsize=(10,3))
cm_pt = mpl.colors.ListedColormap(["blue","red"])#代表着要渲染的颜色映射中的低值和高值的颜色
cm_bg=mpl.colors.ListedColormap(["#FFD700","#008000"])
#数据处理中心化
x_train = x_train - np.mean(x_train,axis=0)#求列属性平均值，对源数据进行中心化处理
x_test = x_test - np.mean(x_test,axis=0)


plt.subplot(121)
plt.scatter(x_train[:,0],x_train[:,1],marker='.',c=y_train,cmap=cm_pt)
plt.subplot(122)
plt.scatter(x_test[:,0],x_test[:,1],marker='.',c=y_test,cmap=cm_pt)
plt.show()
x0_train = np.ones(num_train).reshape(-1,1)#创建全1数组并转置
X_train = tf.cast(tf.concat((x0_train,x_train),axis=1),dtype=tf.float32)
Y_train = tf.cast(y_train.reshape(-1,1),dtype=tf.float32)
x0_test = np.ones(num_test).reshape(-1,1)#创建全1数组并转置
X_test = tf.cast(tf.concat((x0_test,x_test),axis=1),dtype=tf.float32)
Y_test = tf.cast(y_test.reshape(-1,1),dtype=tf.float32)
# 第三步：设置超参数
learn_rate = 0.2#学习率
iter = 200#迭代次数
display_step = 15 #显示间隔
# 第四步：设置模型参数初始值
np.random.seed(612)
w = tf.Variable(np.random.randn(3,1),dtype=tf.float32)#w取正态分布随机值作为初始值
#第五步：训练模型
ce_train = []#记录训练集上的损失
ce_test = []#记录测试集上的损失
acc_train = []#准确率
acc_test = []

for i in range(0,iter+1):
    #自动求导
    with tf.GradientTape() as tape:#梯度带对象的with语句，实现对w的自动监视
        pred_train = 1/(1+tf.exp(-tf.matmul(X_train,w)))
        loss_train = -tf.reduce_mean(Y_train*tf.math.log(pred_train)+(1-Y_train)*tf.math.log(1-pred_train))
        pred_test = 1/(1+tf.exp(-tf.matmul(X_test,w)))
        loss_test = -tf.reduce_mean(Y_test*tf.math.log(pred_test)+(1-Y_test)*tf.math.log(1-pred_test))
    #预测值与实际值对比确定准确率
    #tf.cast将数据格式转化成dtype数据类型
    accuracy_train = tf.reduce_mean(tf.cast(tf.equal(tf.where(pred_train.numpy()<0.5,0.,1.),Y_train),tf.float32))
    accuracy_test = tf.reduce_mean(tf.cast(tf.equal(tf.where(pred_test.numpy()<0.5,0.,1.),Y_test),tf.float32))
    ce_train.append(loss_train)# 把训练集得到的均方误差加入列表 ce_train
    ce_test.append(loss_test) # 把训练集得到的均方误差加入列表 ce_test
    acc_train.append(accuracy_train)
    acc_test.append(accuracy_test)
    dl_dw = tape.gradient(loss_train,w)#求偏导
    # 然后使用迭代公式更新 w
    w.assign_sub(learn_rate*dl_dw)
    #训练误差和测试误差都是一直单调递减的，在测试集上，损失的下降更快。
    if i % display_step == 0:
        print("i:",i," ,trainacc:",accuracy_train.numpy()," ,trainloss:",loss_train.numpy()," ,testacc",accuracy_test.numpy(), " ,testloss:",loss_test.numpy())


 # 绘制分类图
M = 300
x1_min, x2_min = x_train.min(axis=0)  # 取花萼长度
x1_max, x2_max = x_train.max(axis=0)  # 取花萼宽度
t1 = np.linspace(x1_min, x1_max, M)
t2 = np.linspace(x2_min, x2_max, M)
m1, m2 = np.meshgrid(t1, t2)

m0 = np.ones(M * M)
X_mesh = tf.cast(np.stack((m0, m1.reshape(-1), m2.reshape(-1)), axis=1), dtype=tf.float32)
Y_mesh = tf.cast(1 / (1 + tf.exp(-tf.matmul(X_mesh, w))), dtype=tf.float32)
Y_mesh = tf.where(Y_mesh < 0.5, 0, 1)

n = tf.reshape(Y_mesh, m1.shape)
plt.subplot(121)
plt.pcolormesh(m1, m2, n, cmap=cm_bg)
plt.scatter(x_train[:, 0], x_train[:, 1],marker='.', c=y_train, cmap=cm_pt)

M = 300
x1_min, x2_min = x_test.min(axis=0)  # 取花萼长度
x1_max, x2_max = x_test.max(axis=0)  # 取花萼宽度
t1 = np.linspace(x1_min, x1_max, M)
t2 = np.linspace(x2_min, x2_max, M)
m1, m2 = np.meshgrid(t1, t2)

m0 = np.ones(M * M)
X_mesh = tf.cast(np.stack((m0, m1.reshape(-1), m2.reshape(-1)), axis=1), dtype=tf.float32)
Y_mesh = tf.cast(1 / (1 + tf.exp(-tf.matmul(X_mesh, w))), dtype=tf.float32)
Y_mesh = tf.where(Y_mesh < 0.5, 0, 1)

n = tf.reshape(Y_mesh, m1.shape)
plt.subplot(122)
plt.pcolormesh(m1, m2, n, cmap=cm_bg)
plt.scatter(x_test[:, 0], x_test[:, 1],marker='.', c=y_test, cmap=cm_pt)
plt.show()
#可视化损失率，准确率变化曲线
plt.figure(figsize=(10,3))
plt.subplot(121)
plt.plot(ce_train,color="blue",label="train")
plt.plot(ce_test,color="red",label="test")
plt.ylabel("Loss")
plt.legend()
plt.subplot(122)
plt.plot(acc_train,color="blue",label="train")
plt.plot(acc_test,color="red",label="test")
plt.ylabel("Accuracy")
plt.legend()
plt.show()
plt.subplot(121)
plt.scatter(x_train[:,0],x_train[:,1],marker='.',c=y_train,cmap=cm_pt)
x_ =[-1.5,2.5]
y_ =-(w[0]+w[1]*x_)/w[2]#绘制决策边界
plt.plot(x_,y_,color='g')
plt.subplot(122)
plt.scatter(x_test[:,0],x_test[:,1],marker='.',c=y_test,cmap=cm_pt)
x_ =[-1.5,1.5]
y_ =-(w[0]+w[1]*x_)/w[2]
plt.plot(x_,y_,color='g')
plt.show()