import matplotlib.pyplot as plt  
import numpy as np 
import pandas as pd
from sklearn.datasets import load_iris   #导入数据集iris
from sklearn import datasets
iris_data=datasets.load_iris()
input_data=iris_data.data
correct = iris_data.target 
n_data=len(correct) #样本数量
#对输入数据进行标准化处理
ave_input=np.average(input_data,axis=0)
std_input=np.std(input_data,axis=0)
input_data=(input_data-ave_input)/std_input
#将正确答案转换成独热编码格式
correct_data=np.zeros((n_data,3))
for i in range(n_data):
    correct_data[i,correct[i]] = 1.0

#分割测试集与训练集
index=np.arange(n_data)
index_train=index[index%2==0]
index_test=index[index%2!=0]
input_train=input_data[index_train,:]
input_test=input_data[index_test,:]
correct_train=correct_data[index_train,:]
correct_test=correct_data[index_test,:]
    
#2.实现各个网络层(class)
n_in=4
n_mid=2
n_out=3
wb_width=0.01
eta=0.01

n_train=input_train.shape[0]
n_test=input_test.shape[0]
class Baselayer:
    def __init__(self,n_upper,n):
        self.w=wb_width*np.random.randn(n_upper,n)#权重矩阵
        self.b=wb_width*np.random.randn(n)  #偏置（向量）
    def update(self,eta):
        self.w -= eta * self.grad_w
        self.b -= eta * self.grad_b
class MiddleLayer(Baselayer):
    def forward(self,x):
        self.x = x
        self.u = np.dot(x,self.w) + self.b
        self.y = np.where(self.u<0,0,self.u)
    def backward(self,grad_y):
        delta=grad_y * np.where(self.u<=0,0,self.u)
        self.grad_w=np.dot(self.x.T,delta)#sef.x.T转置 #对权重求梯度
        self.grad_b=np.sum(delta,axis=0)#对b求梯度
        self.grad_x=np.dot(delta,self.w.T)
class Outputlayer(Baselayer):
    def forward(self,x):#前向过程wb已经知道了，现在知道x就可以计算了
        self.x=x#知道x了，接收到了
        u=np.dot(x,self.w)+self.b
        self.y=np.exp(u)/np.sum(np.exp(u),axis=1,keepdims=True)#keepdims=True维度不变
        #softmax()函数
        
    #反向计算、反向传播
    def backward(self,t):
        delta=self.y-t#delta偏导 #交叉熵误差
        self.grad_w=np.dot(self.x.T,delta)#sef.x.T转置 #对权重求梯度
        self.grad_b=np.sum(delta,axis=0)#对b求梯度
        self.grad_x=np.dot(delta,self.w.T)
        
        
#3.实现神经网络（实例化 计算 ）
middleLayer_1=MiddleLayer(n_in,n_mid)
middleLayer_2=MiddleLayer(n_mid,n_mid)
outputLayer_1=Outputlayer(n_mid,n_out)

def forward_propagation(x):
    middleLayer_1.forward(x)
    middleLayer_2.forward(middleLayer_1.y)
    outputLayer_1.forward(middleLayer_2.y)

def backward_propagation(t):
    outputLayer_1.backward(t)
    middleLayer_2.backward(outputLayer_1.grad_x)
    middleLayer_1.backward(middleLayer_2.grad_x)
    
def update_wb():
    middleLayer_1.update(eta)
    middleLayer_2.update(eta)
    outputLayer_1.update(eta)

#计算交叉熵误差
def  get_error(t,batch_size):
    return - np.sum(t*np.log(outputLayer_1.y +1e-7))/batch_size

train_error_x=[]
train_error_y=[]
test_error_x=[]
test_error_y=[]
# forward_propagation(input_train)

#4.小批次学习
epoch = 1000
interval=100
batch_size=5
n_batch=n_train//batch_size
for i in range(epoch):
    forward_propagation(input_train)
    error_train=get_error(correct_train,n_train)
    forward_propagation(input_test)
    error_test=get_error(correct_test,n_test)
    train_error_x.append(i)
    train_error_y.append(error_train)
    test_error_x.append(i)
    test_error_y.append(error_test)
    index_random=np.arange(n_train)#随机梯度下降法n_data
    np.random.shuffle(index_random)
    # tb_index=index_random[j*batch_size:(j+1)*batch_size]
    # print(index_random)
    if i % interval==0:
        print("epoch:"+str(i)+"/"+str(epoch),
              "error_train:"+str(error_train),
              "error_test:"+str(error_test))
    for j in range(n_batch):
        tb_index = index_random[j * batch_size:(j+1)*batch_size]
        # print(tb_index)
        x=input_train[tb_index,:]
        t=correct_train[tb_index,:]
        forward_propagation(x)
        backward_propagation(t)
        update_wb()
    if i % interval ==0:
        plt.plot(train_error_x,train_error_y,label="train")
        plt.plot(test_error_x,test_error_y,label="test")
        plt.legend()
        plt.xlabel("epochs")
        plt.ylabel("error")
        plt.show()
        
    