import numpy as np
import matplotlib.pyplot as plt


# model
def model(XX, theta):
    return XX.dot(theta)


# sigmoid (active function)
def sigmoid(z):
    return 1 / (1 + np.exp(-z))


# cost function
def cost_func(h, y):
    m = len(h)
    return -1 / m * np.sum(y * np.log(h) + (1 - y) * np.log(1 - h))


# FP
def FP(XX, theta1, theta2):
    a1 = XX
    z2 = model(a1, theta1)
    a2 = sigmoid(z2)
    z3 = model(a2, theta2)
    a3 = sigmoid(z3)
    return a2, a3


# BP
def BP(XX, y, theta1, theta2, a2, a3, alpha):
    m = len(XX)
    s3 = a3 - y
    s2 = s3.dot(theta2.T) * (a2 * (1 - a2))
    dt2 = 1 / m * a2.T.dot(s3)
    dt1 = 1 / m * XX.T.dot(s2)
    theta2 -= alpha * dt2
    theta1 -= alpha * dt1
    return theta1, theta2


# gradient descent
def grad(XX, y, alpha=0.1, iter0=2000):
    m, n = XX.shape
    group = iter0 // 20
    theta1 = np.random.randn(n, 49)
    theta2 = np.random.randn(49, 1)
    j_his = np.zeros(iter0)

    for i in range(iter0):
        a2, a3 = FP(XX, theta1, theta2)
        j = cost_func(a3, y)
        j_his[i] = j
        if 0 == i % group:
            print(f'#{i + 1} cost func value = {j}')
        theta1, theta2 = BP(XX, y, theta1, theta2, a2, a3, alpha)
    if 0 != i % group:
        print(f'#{i + 1} cost func value = {j}')
    return theta1, theta2, j_his, a3


# accuracy
def score(h, y):
    return np.mean(y == (h > 0.5))


# main
if '__main__' == __name__:
    # load
    data = np.loadtxt('../logist.txt', delimiter=',')

    x = data[:, :-1]
    m = len(x)
    y = data[:, -1:]

    mu = np.mean(x, axis=0)
    sigma = np.std(x, axis=0)
    x -= mu
    x /= sigma

    XX = np.c_[np.ones(m), x]

    np.random.seed(1)
    a = np.random.permutation(m)
    x = x[a]
    y = y[a]
    XX = XX[a]

    m_train = int(0.7 * m)
    XX_train, XX_test = np.split(XX, [m_train])
    y_train, y_test = np.split(y, [m_train])

    theta1, theta2, j_his, h_train = grad(XX_train, y_train)
    print(f'Training score = {score(h_train, y_train)}')

    plt.figure(figsize=[16, 7])
    spr = 1
    spc = 2
    spn = 1
    plt.subplot(spr, spc, spn)
    plt.plot(j_his, label='cost func')
    plt.legend()

    spn += 1
    plt.subplot(spr, spc, spn)
    x1_min, x1_max = np.min(x[:, 0]), np.max(x[:, 0])
    x2_min, x2_max = np.min(x[:, 1]), np.max(x[:, 1])
    xx, yy = np.mgrid[x1_min:x1_max:200j, x2_min:x2_max:200j]
    XXYY = np.c_[np.ones(40000), xx.ravel(), yy.ravel()]
    _, h = FP(XXYY, theta1, theta2)
    h.resize(200, 200)
    plt.contourf(xx, yy, h > 0.5, zorder=0)
    y_ = y.ravel()
    plt.scatter(x[y_ == 1, 0], x[y_ == 1, 1], c='r', label='positive')
    plt.scatter(x[y_ == 0, 0], x[y_ == 0, 1], c='b', label='negative')
    plt.legend()

    plt.show()
