import numpy as np
from sklearn import datasets, linear_model
from sklearn.metrics import accuracy_score
import matplotlib.pyplot as plt
from sklearn.datasets import load_digits
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import adjusted_rand_score
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report, confusion_matrix

def sigmoid(X):#sigmoid函数
    return 1.0/(1+np.exp(-X))

def softmax(a):#softmax函数
    c = np.max(a)
    exp_a = np.exp(a - c) 
    sum_exp_a = np.sum(exp_a)
    y = exp_a / sum_exp_a
    return y

class Neural_Network:    
    def __init__(self,layers=[2,8,2],step=0.001,times=2000):
        self.layers=layers
        self.step=step
        self.times=times

    def init_WB(self):
        N1=len(self.layers)-1
        B=[]
        W=[]
        for i in range(N1):
            W.append(np.random.rand(self.layers[i],self.layers[i+1])/50)
            B.append(np.zeros((1,self.layers[i+1])))
        self.W=W
        self.B=B
         
    def forward(self,X):
        N1=len(self.layers)-1
        for i in range(N1):
            if i == 0:
                self.A[0]=sigmoid(np.dot(X,self.W[0])+self.B[0])
            else:
                self.A[i]=sigmoid(np.dot(self.A[i-1],self.W[i])+self.B[i])
        
    def backward(self,X,y,y_true):
        d=[]
        self.A=[]
        self.y_true=y_true
        N1=len(self.layers)-1
        for k in range(N1):
            d.append([0])
            self.A.append([0])
        for j in range(self.times):
            self.d=d
            self.forward(X)
            L = np.sum((self.A[N1-1] - y)**2)
            y_pred = np.argmax(self.A[N1-1], axis=1)
            acc = accuracy_score(self.y_true, y_pred)
            print("epoch [%4d] L = %f, acc = %f" % (j, L, acc))
            for i in range(N1):
                if i==0:
                    d[-1]=(y-self.A[-1])*self.A[-1]*(1-self.A[-1])
                else:
                    d[-1-i]=self.A[-1-i]*(1-self.A[-1-i])*(np.dot(d[-i],self.W[-i].T))
            for i in range(N1):                
                if i!=N1-1:
                    self.W[-i-1]+=self.step*np.dot(self.A[-i-2].T,d[-i-1])
                    self.B[-i-1]+=self.step*np.sum(d[-i-1],axis=0)
                else:
                    self.W[0]+=self.step*np.dot(X.T,d[0])
                    self.B[0]+=self.step*np.sum(d[0],axis=0)
        

#月牙识别
'''
layers=[2,8,2]

np.random.seed(0)
X, y1 = datasets.make_moons(300, noise=0.20)
print(X)
y_true = np.array(y1).astype(float)
y = np.zeros((X.shape[0], layers[-1]))
for i in range(layers[-1]):
    y[np.where(y1==i), i] = 1

nn=Neural_Network(layers,0.1,1000)
nn.init_WB()
nn.backward(X,y,y_true)

nn.forward(X)
y_pred = np.argmax(nn.A[-1], axis=1)
cm = confusion_matrix(y_true,y_pred)
plt.matshow(cm)
plt.title(u'Confusion Matrix')
plt.colorbar()
plt.ylabel(u'Groundtruth')
plt.xlabel(u'Predict')
plt.show()

plt.scatter(X[:,0],X[:,1],c=y_true)
plt.title("truth")
plt.show()
plt.scatter(X[:,0],X[:,1],c=y_pred)
plt.title("predict")
plt.show()

nn.forward(X[1])
col0=[0,1]
col1=np.around(softmax(nn.A[-1]),3)

plt.figure(figsize=(8,5))
tab = plt.table(cellText=col1, 
              colLabels=col0, 
              loc='center', 
              cellLoc='center',
              rowLoc='center')
tab.scale(1,1) 
plt.axis('off')
plt.show()'''

#circle识别
'''
layers=[2,7,2]

data = np.genfromtxt('dataset_circles.csv',delimiter=',',skip_header=0)
np.random.shuffle(data)
N = len(data)
N_train = int(N*0.5)

x_train = data[:N_train,:2]
x_test  = data[N_train:,:2]
x_all=data[:,:2]
y_train = data[:N_train,2]
y_test  = data[N_train:,2]
y_all=data[:,2]
y_true_train = np.array(y_train).astype(float)
y_true_test = np.array(y_test).astype(float)
y_true_all=np.array(y_all).astype(float)
y = np.zeros((x_train.shape[0], layers[-1]))
for i in range(layers[-1]):
    y[np.where(y_train==i), i] = 1

nn=Neural_Network(layers,0.0015,3000)
nn.init_WB()
nn.backward(x_train,y,y_true_train)

nn.forward(x_all)
y_pred_all = np.argmax(nn.A[-1], axis=1)
cm = confusion_matrix(y_true_all,y_pred_all)
plt.matshow(cm)
plt.title(u'Confusion Matrix')
plt.colorbar()
plt.ylabel(u'Groundtruth')
plt.xlabel(u'Predict')
plt.show()

plt.scatter(data[:,0],data[:,1],c=y_true_all)
plt.title("truth")
plt.show()
plt.scatter(data[:,0],data[:,1],c=y_pred_all)
plt.title("predict")
plt.show()

nn.forward(x_test[1])
col0=[0,1]
col1=np.around(softmax(nn.A[-1]),3)

plt.figure(figsize=(8,5))
tab = plt.table(cellText=col1, 
              colLabels=col0, 
              loc='center', 
              cellLoc='center',
              rowLoc='center')
tab.scale(1,1) 
plt.axis('off')
plt.show()'''

#数字识别
'''
layers=[64,30,10]
N=len(layers)-1
digits = load_digits()
target=digits.target
data=digits.data
N_train=int(data.shape[0]*0.5)
X=data[:N_train]
y1=target[:N_train]

y_true = np.array(y1).astype(float)
y_true_all = np.array(target).astype(float)

t = np.zeros((X.shape[0], layers[-1]))
for i in range(layers[-1]):
    t[np.where(y1==i), i] = 1
y=t

nn=Neural_Network(layers,0.001,1500)
nn.init_WB()
nn.backward(X,y,y_true)

nn.forward(data)
y_pred_all = np.argmax(nn.A[-1], axis=1)
cm = confusion_matrix(y_true_all,y_pred_all)
plt.matshow(cm)
plt.title(u'Confusion Matrix')
plt.colorbar()
plt.ylabel(u'Groundtruth')
plt.xlabel(u'Predict')
plt.show()

nn.forward(data[1])
col0=[0,1,2,3,4,5,6,7,8,9]
col1=np.around(softmax(nn.A[-1]),3)

plt.figure(figsize=(10,5))
tab = plt.table(cellText=col1, 
              colLabels=col0, 
              loc='center', 
              cellLoc='center',
              rowLoc='center')
tab.scale(1,1) 
plt.axis('off')
plt.show()'''

