import numpy as np

def h(x, theta):
    return np.dot(x, theta)

def cost_function(theta, X, y):
    m = y.shape[0]
    j = 0
    for i in range(m):
        j = j + y[i] * np.log(h(X[i], theta)) + (1 - y[i]) * np.log(1 - h(X[i], theta))
    return j


X_train = np.loadtxt('X_train.txt', delimiter=',')
y_train = np.loadtxt('y_train.txt', delimiter=',')
X_test = np.loadtxt('X_test.txt', delimiter=',')
y_test = np.loadtxt('y_test.txt', delimiter=',')
n = X_train.shape[1]#feature number
#
theta = np.zeros((n+1,1))
assert theta.shape == (n+1,1)
iteration = 100
learning_rate = 0.01

#algo
for i in range(iteration):
    for j in range(n):
        for k in range(X_train.shape[0]):
            theta[j] = theta[j] - learning_rate * (y_train[k] - h(X_train[k], theta)) * X_train[k][j]

#pred
prediction = np.dot(X_test,theta)
