import numpy as np
from sklearn.neural_network import MLPClassifier
from sklearn.preprocessing import OneHotEncoder
from sklearn.metrics import *
x=np.loadtxt('PEPX.txt',delimiter=',')
y=np.loadtxt('PEPL.txt',delimiter=',')
m=len(x)
np.random.seed(123)
order=np.random.permutation(m)
x=x[order]
y=y[order]

a=OneHotEncoder()
y_onehot=a.fit_transform(y.reshape(-1,1)).toarray()

num=int(0.75*m)
tr_x,te_x=np.split(x,[num,])
tr_y,te_y=np.split(y_onehot,[num,])

def model(x,theta):
    return x.dot(theta)
def sigmoid(z,a=False):
    if a==True:
        return z*(1-z)
    else:
        return 1/(1+np.exp(-z))
def costfunc(h,y):
    return -1/m*np.sum(y*np.log(h)+(1-y)*np.log(1-h))
def FP(x,theta1,theta2):
    z2=model(x,theta1)
    a2=sigmoid(z2)
    z3=model(a2,theta2)
    a3=sigmoid(z3)
    return a2,a3
def BP(x,y,theta1,theta2,a2,a3,alpha):
    s3=a3-y
    s2=s3.dot(theta2.T)*sigmoid(a2,a=True)

    dt2=1/m*a2.T.dot(s3)
    dt1=1/m*x.T.dot(s2)

    theta1-=dt1*alpha
    theta2-=dt2*alpha

    return theta1,theta2
def gard(x,y,iter0=500,alpha=0.2):
    m,n=x.shape
    theta1=np.random.randn(n,100)
    theta2=np.random.randn(100,3)
    J=np.zeros(iter0)
    for i in range(iter0):
        a2,a3=FP(x,theta1,theta2)
        J[i]=costfunc(a3,y)
        theta1,theta2=BP(x,y,theta1,theta2,a2,a3,alpha)
    return J,theta1,theta2,a3
def score(h,y):
    h_=np.argmax(h,axis=1)
    y_=np.argmax(y,axis=1)
    return np.mean(h_==y_)
if __name__ == '__main__':
    J,theta1,theta2,a3=gard(tr_x,tr_y)
    h1,te_h=FP(te_x,theta1,theta2)
    print(score(te_h,te_y))
    model=MLPClassifier()
    model.fit(x,y)
    h=model.predict(x)
    print(confusion_matrix(y,h))
    print(classification_report(y,h))