import numpy as np
import matplotlib.pyplot as plt

def model(x,theta):
    return x.dot(theta)

def sigmoid(z):
    return 1/(1+np.exp(-z))

def cost(h,y,theta,lamda):
    m=len(y)
    #代价函数的正则项
    r=lamda/(2*m)*np.sum(theta**2)
    return -1/m*np.sum(y*np.log(h)+(1-y)*np.log(1-h))+r

def grad(x,y,lamda,iter0=5000,alpha=0.01):
    m,n=x.shape
    theta=np.zeros(n)
    J=np.zeros(iter0)
    for i in range(iter0):
        z=model(x,theta)
        h=sigmoid(z)
        J[i]=cost(h,y,theta,lamda)
        #梯度的正则项
        r=lamda/m*theta
        dt=1/m*x.T.dot(h-y)+r
        theta-=alpha*dt
    return h,theta,J

def score(h,y):
    return np.mean(y==[h>0.5])

if __name__ == '__main__':
    data=np.loadtxt('ex2data1.txt',delimiter=',')

    x=data[:,:-1]
    y=data[:,-1]

    miu=np.mean(x,axis=0)
    sigma=np.std(x,axis=0)
    x=(x-miu)/sigma

    X=np.c_[np.ones(len(x)),x]

    h0,theta0,J0=grad(X,y,0)
    h3,theta3,J3=grad(X,y,3)
    h6,theta6,J6=grad(X,y,6)

    plt.plot(J0)
    plt.plot(J3)
    plt.plot(J6)
    plt.show()

