import numpy as np
from sklearn import datasets, linear_model
import matplotlib.pyplot as plt
from sklearn.metrics import accuracy_score
# 初始化数据
np.random.seed(0)
x, y = datasets.make_moons(200, noise=0.20)
y_true = np.array(y).astype(float)
# generate nn output target
t = np.zeros((x.shape[0], 2))
t[np.where(y==0), 0] = 1
t[np.where(y==1), 1] = 1
# plot data
plt.scatter(x[:, 0], x[:, 1], c=y, cmap=plt.cm.Spectral)
plt.title("original data")
plt.show()
# 定义 sigmod
def sigmoid(X):
    return 1.0/(1+np.exp(-X))
# 定义神经网络的模型
class NN_Model:
    def __init__(self, nodes=None):
        self.epsilon = 0.01                 # learning rate
        self.n_epoch = 2000                 # iterative number
        
        if not nodes: # 如果还不存在nodes
            self.nodes = [2, 8, 2]          # default nodes size (from input -> output)
        else:
            self.nodes = nodes
# 定义神经网络的权重
    def init_weight(self):
        W = []
        B = []
        n_layer = len(self.nodes) # 层数
        for i in range(n_layer-1):
            w = np.random.randn(self.nodes[i], self.nodes[i+1]) / np.sqrt(self.nodes[i])
            b = np.random.randn(1, self.nodes[i+1])
            # print("beeeeeeeeee")
            W.append(w)
            B.append(b)
        # print(np.shape(W[0]))
        # print(np.shape(B[0]))
        self.W = W
        self.B = B
    def Reverse(self,lst):
        lst.reverse()
        return lst
# 前向网络计算
    def forward(self, X):
        Z = [] # 列表包多维数组
        x0 = X
        # print(np.shape(X))
        for i in range(len(self.nodes)-1):
            z = sigmoid(np.dot(x0, self.W[i]) + self.B[i])
            x0 = z
            Z.append(z)
        # print(np.shape(Z[0])) # (200, 2)
        self.Z = Z # 每一层的结果都会被记录在self.Z中
        return Z[-1]

# 后向网络计算
    def backpropagation(self, X, y, n_epoch=None, epsilon=None):
        if not n_epoch: n_epoch = self.n_epoch
        if not epsilon: epsilon = self.epsilon
        self.X = X
        self.Y = y
        for i in range(n_epoch): # 迭代次数
            # calc weights update
            self.forward(X)
            self.epoch = i
            self.evaluate()
            W = self.W
            B = self.B
            Z = self.Z
            # delta=0 # 某一层的误差
            De=[] # 记录每一层的误差函数delta，元素为矩阵形式
            #反向记录误差
            # 输出层先入
            z0=Z[-1]
            De.append(z0*(1-z0)*(y-z0))
            
            # 隐藏层后入
            for j in range(len(self.nodes)-2,0,-1):# 2,1,0
                '''
                print('bbbbb',np.shape(Z[j-1]*(1-Z[j-1])))
                print('ccccc',np.shape(np.dot(De[-1],W[j].T)))
                '''
                De.append(Z[j-1]*(1-Z[j-1])*np.dot(De[-1],W[j].T))
                # De为倒序 
            De.reverse()
            '''
            print(len(De))
            print('De 0',np.shape(De[0]))
            print('De 1',np.shape(De[1]))
            print('De 2',np.shape(De[2]))
            #print('De 3',np.shape(De[3]))
            print('Z 0',np.shape(Z[0]))
            print('Z 1',np.shape(Z[1]))
            print('Z 2',np.shape(Z[2]))
            print('W 0',np.shape(W[0]))
            print('W 1',np.shape(W[1]))
            print('W 2',np.shape(W[2]))
            print('X',np.shape(self.X))
            '''
            
            # 更新权重
            
            for j in range(len(self.nodes)-2,0,-1): #2,1
                #print(j)
                W[j]+=epsilon*np.dot(De[j-1].T,Z[j])
                B[j]+=epsilon * np.sum(De[j], axis=0)
                
            W[0]+=epsilon * np.dot(X.T, De[0]) # 输入层
            B[0]+=epsilon * np.sum(De[0], axis=0)
            
            
        
        #evaluate
    def evaluate(self):
        z = self.Z[-1]
        # print loss, accuracy  误差函数
        L = np.sum((z - self.Y)**2)
        y_pred = np.argmax(z, axis=1)
        y_true = np.argmax(self.Y, axis=1)
        acc = accuracy_score(y_true, y_pred)
        if self.epoch % 100 == 0:
            print("L = %f, acc = %f" % (L, acc))
            

# use the NN model and training
nn = NN_Model([2, 8, 7, 2])
nn.init_weight()
nn.forward(x)
nn.backpropagation(x, t, 4000)

# predict results & plot results
y_res  = nn.forward(x)
y_pred = np.argmax(y_res, axis=1)
'''
# plot data
plt.scatter(x[:, 0], x[:, 1], c=y, cmap=plt.cm.Spectral)
plt.title("ground truth")
plt.show()
'''
plt.scatter(x[:, 0], x[:, 1], c=y_pred, cmap=plt.cm.Spectral)
plt.title("predicted")
plt.show()
