import numpy as np
import matplotlib.pyplot as plt 
import pandas as pd
import random
import copy
import math
from sklearn.datasets import load_digits 
from sklearn.metrics import confusion_matrix
from sklearn import datasets, linear_model
from sklearn.metrics import accuracy_score
# load data
digits = load_digits()
x=np.load("x.npy")
z=np.load("z.npy")
t=np.load("t.npy")



# define sigmod
def sigmoid(X):
    return 1.0/(1+np.exp(-X))

# generate the NN model
class NN_Model:
    def __init__(self, nodes=None):
        self.epsilon = 0.01                 # learning rate
        self.n_epoch = 1000                 # iterative number
        
        if not nodes:
            self.nodes = [64,32,32,10]          # default nodes size (from input -> output)
        else:
            self.nodes = nodes
            
    def init_weight(self):
        W = []
        B = []
        n_layer = len(self.nodes)
        for i in range(n_layer-1):
            w = np.random.randn(self.nodes[i], self.nodes[i+1]) / np.sqrt(self.nodes[i])
            b = np.random.randn(1, self.nodes[i+1])
            W.append(w)
            B.append(b)
        self.W = W
        self.B = B
        
    def forward(self,X):
        Z = []
        x0 = X #1797,64
        for i in range(len(self.nodes)-1):
            z = sigmoid(np.dot(x0, self.W[i]) + self.B[i])
            x0 = z
            
            Z.append(z)
        
        self.Z = Z
        return Z[-1],Z
    
    # back-propagation
    def backpropagation(self, X, y, n_epoch=None, epsilon=None):
        if not n_epoch: n_epoch = self.n_epoch
        if not epsilon: epsilon = self.epsilon
        
        self.X = X
        self.Y = y
        #print('yshape',np.shape(self.Y)) #yshape (399, 2)
        for i in range(n_epoch):
            # forward to calculate each node's output
            self.forward(X)

            self.epoch = i
            self.evaluate()
            
            # calc weights update
            W = self.W
            B = self.B
            Z = self.Z
            
            D = []
            d0 = y
            n_layer = len(self.nodes)
            for j in range(n_layer-1, 0, -1):
                jj = j - 1
                z = self.Z[jj]
                
                if j == n_layer - 1:
                    d = z*(1-z)*(d0 - z)
                else:
                    d = z*(1-z)*np.dot(d0, W[j].T)
                    
                d0 = d
                # print('d0shape',np.shape(d0)) #399*(2,7,8,2)
                D.insert(0, d)
            
            # update weights
            for j in range(n_layer-1, 0, -1):
                jj = j - 1
                
                if jj != 0:
                    W[jj] += epsilon * np.dot(Z[jj-1].T, D[jj])
                else:
                    W[jj] += epsilon * np.dot(X.T, D[jj])
                    
                B[jj] += epsilon * np.sum(D[jj], axis=0)
        return D,W,B
    def evaluate(self):
        z = self.Z[-1]
        
        # print loss, accuracy
        L = np.sum((z - self.Y)**2)
            
        y_pred = np.argmax(z, axis=1)
        y_true = np.argmax(self.Y, axis=1)
        acc = accuracy_score(y_true, y_pred)
        
        if self.epoch % 100 == 0:
            print("L = %f, acc = %f" % (L, acc))
            
    def plotl(self,labels):
        # plot the digits
        fig = plt.figure(figsize=(6, 6))  # figure size in inches
        fig.subplots_adjust(left=0, right=1, bottom=0, top=1, hspace=0.05, wspace=0.05)
        len=50
        # plot the digits: each image is 8x8 pixels
        for i in range(len):
            ax = fig.add_subplot(8, 8, i + 1, xticks=[], yticks=[])
            ax.imshow(digits.images[i], cmap=plt.cm.binary)
            # label the image with the target value
            ax.text(0, 7, str(digits.target[i]))
            if labels[i]==z[i]:
                ax.text(4, 7, str(labels[i]),color='green',size=38)
            else:
                ax.text(4, 7, str(labels[i]),color='red',size=38)
        plt.show()
    def confusion(self,y,z):
        cm = confusion_matrix(y,z)#前一个是参考值，后一个是预测值
        plt.matshow(cm)
        plt.title('Confusion Matrix')
        plt.colorbar()
        plt.ylabel('Groundtruth')
        plt.xlabel(u'Predict')
        #plt.savefig('logistic_confusion_matrix.pdf')
        plt.show()        
    

nn = NN_Model([64,40,20,10, 10]) #好神奇啊
nn.init_weight()
D,W,B=nn.backpropagation(x, t, 800)
# predict results & plot results
y_res,Z  = nn.forward(x)
y_pred = np.argmax(y_res, axis=1)

nn.confusion(y_pred,z)
nn.plotl(y_pred)

