import numpy as np
import matplotlib.pyplot as plt

def model(x,theta):
    return x.dot(theta)

def sigmoid(z):
    return 1/(1+np.exp(-z))

def cost(h,y,theta,lamda):
    m=len(y)
    r=lamda/(2*m)*np.sum(theta**2)
    j=-1/m*np.sum(y*np.log(h)+(1-y)*np.log(1-h))+r
    return j

def grad(x,y,lamda,iter0=5000,alpha=0.1):
    m,n=x.shape
    theta=np.zeros(n)
    J=np.zeros(iter0)
    for i in range(iter0):
        z=model(x,theta)
        h=sigmoid(z)
        J[i]=cost(h,y,theta,lamda)
        r=lamda/m*theta
        dt=1/m*x.T.dot(h-y)+r
        theta-=alpha*dt
    return theta,h,J
def score(h,y):
    return np.mean(y==[h>0.5])

if __name__ == '__main__':
    train=np.loadtxt('train.txt',delimiter=',')
    test=np.loadtxt('test.txt',delimiter=',')

    train_x,train_y=train[:,:-1],train[:,-1]
    test_x,test_y=test[:,:-1],test[:,-1]

    miu=np.mean(train_x,axis=0)
    sigma=np.std(train_x,axis=0)
    train_x=(train_x-miu)/sigma

    miu=np.mean(test_x,axis=0)
    sigma=np.std(test_x,axis=0)
    test_x=(test_x-miu)/sigma

    train_X=np.c_[np.ones(len(train_x)),train_x]
    test_X=np.c_[np.ones(len(test_x)),test_x]

    theta,train_h,J=grad(train_X,train_y,lamda=0)
    plt.plot(J)
    plt.show()

    z=model(test_X,theta)
    test_h=sigmoid(z)

    print(score(test_h,test_y))

    plt.scatter(test_x[:,1],test_x[:,2],c=test_y)
    plt.show()