import numpy as np
import matplotlib.pyplot as plt
#线性模型
def model(x,theta):
    return x.dot(theta)
#sigmoid函数、激活函数
def sigmoid(z):
    return 1/(1+np.exp(-z))
#代价函数
def cost(h,y):
    m=len(y)
    j=-1/m*np.sum(y*np.log(h)+(1-y)*np.log(1-h))
    return j
#正向传播
def FP(theta1,theta2,x):
    a1=x
    z2=model(a1,theta1)
    a2=sigmoid(z2)
    z3=model(a2,theta2)
    a3=sigmoid(z3)
    return a2,a3
#反向传播
def BP(x,y,theta1,theta2,a2,a3,alpha):
    s3=a3-y
    s2=s3.dot(theta2.T)*(a2*(1-a2))

    m=len(x)
    dt2=1/m*a2.T.dot(s3)
    dt1=1/m*x.T.dot(s2)

    theta2-=alpha*dt2
    theta1-=alpha*dt1
    return theta1,theta2
#梯度下降
def grad(x,y,alpha=0.1,iter0=5000):
    m,n=x.shape
    theta1=np.random.randn(n,100)
    theta2=np.random.randn(100,1)
    J=np.zeros(iter0)
    for i in range(iter0):
        a2,a3=FP(theta1,theta2,x)
        J[i]=cost(a3,y)
        theta1,theta2=BP(x,y,theta1,theta2,a2,a3,alpha)
    return theta1,theta2,a3,J
#准确率
def score(h,y):
    return np.mean(y==[h>0.5])
#主程序
if __name__ == '__main__':
    data=np.loadtxt('logist.txt',delimiter=',')

    x=data[:,:-1]
    y=data[:,-1:]

    miu=np.mean(x,axis=0)
    sigma=np.std(x,axis=0)
    x=(x-miu)/sigma

    X=np.c_[np.ones(len(x)),x]

    np.random.seed(666)
    a=np.random.permutation(len(x))
    X=X[a]
    y=y[a]

    num=int(0.7*len(x))
    train_x,test_x=np.split(X,[num,])
    train_y,test_y=np.split(y,[num,])

    theta1, theta2, train_h, J=grad(train_x,train_y)
    plt.plot(J)
    plt.show()

    print(score(train_h,train_y))

    min_x1,max_x1=np.min(x[:,0]),np.max(x[:,0])
    min_x2,max_x2=np.min(x[:,1]),np.max(x[:,1])

    xx,yy=np.mgrid[min_x1:max_x1:200j,min_x2:max_x2:200j]
    xy=np.c_[np.ones(40000),xx.ravel(),yy.ravel()]

    _,h=FP(theta1,theta2,xy)
    h_=h.reshape(200,200)
    plt.contourf(xx,yy,h_>0.5)
    plt.scatter(x[:,0],x[:,1],c=y.ravel(),cmap=plt.cm.Paired)
    plt.show()