import numpy as np
import matplotlib.pyplot as plt

def model(x,theta):
    return x.dot(theta)

def sigmoid(z):
    return 1/(1+np.exp(-z))

def cost(h,y):
    m=len(h)
    return -1/m*np.sum(y*np.log(h)+(1-y)*np.log(1-h))

def FP(x,theta1,theta2):
    a1=x
    z2=model(a1,theta1)
    a2=sigmoid(z2)
    z3=model(a2,theta2)
    a3=sigmoid(z3)
    return a2,a3

def BP(x,y,theta1,theta2,a2,a3,alpha):
    s3=a3-y
    s2=s3.dot(theta2.T)*(a2*(1-a2))

    m=len(y)
    dt2=1/m*a2.T.dot(s3)
    dt1=1/m*x.T.dot(s2)

    theta2-=alpha*dt2
    theta1-=alpha*dt1
    return theta1,theta2

def grad(x,y,alpha=0.1,iter0=2000):
    m,n=x.shape
    theta1=np.random.randn(n,100)
    theta2=np.random.randn(100,1)
    J=np.zeros(iter0)
    for i in range(iter0):
        a2,a3=FP(x,theta1,theta2)
        J[i]=cost(a3,y)
        theta1,theta2=BP(x,y,theta1,theta2,a2,a3,alpha)
    return theta1,theta2,J,a3,

def score(h,y):
    return np.mean(y==[h>0.5])

if __name__ == '__main__':
    data=np.loadtxt('egg.txt',delimiter=',')
    x=data[:,:-1]
    y=data[:,-1:]

    miu=np.mean(x,axis=0)
    sigma=np.std(x,axis=0)
    x=(x-miu)/sigma

    np.random.seed(666)
    a=np.random.permutation(len(x))
    x=x[a]
    y=y[a]

    X=np.c_[np.ones(len(x)),x]

    num=int(0.7*len(x))
    train_x,test_x=np.split(X,[num,])
    train_y,test_y=np.split(y,[num,])

    theta1, theta2, J, train_h=grad(train_x,train_y)
    plt.plot(J)
    plt.show()

    _,test_h=FP(test_x,theta1,theta2)

    print(score(test_h,test_y))