import numpy as np
import matplotlib.pyplot as plt
import math


def model(X, theta):
    return X.dot(theta)


def sigmoid(z):
    return 1 / (1 + np.exp(-z))


def cost_func(h, y):
    m = len(h)
    return -1 / m * np.sum(y * np.log(h) + (1 - y) * np.log(1 - h))


def FP(X, theta1, theta2):
    """Forward propagation."""
    a1 = X
    z2 = model(a1, theta1)
    a2 = sigmoid(z2)
    z3 = model(a2, theta2)
    a3 = sigmoid(z3)
    return a2, a3


def BP(X, y, theta1, theta2, a2, a3, alpha):
    """Backward propagation."""
    m = len(X)
    s3 = a3 - y
    s2 = s3.dot(theta2.T) * (a2 * (1 - a2))
    dt2 = 1 / m * a2.T.dot(s3)
    dt1 = 1 / m * X.T.dot(s2)
    theta2 -= alpha * dt2
    theta1 -= alpha * dt1
    return theta1, theta2


def grad(X, y, alpha=0.1, iter0=2000):
    """Gradient descent algorithm."""
    m, n = X.shape
    group = iter0 // 20
    theta1 = np.random.randn(n, 8)
    theta2 = np.random.randn(8, 1)
    j_his = np.zeros(iter0)
    for i in range(iter0):
        a2, a3 = FP(X, theta1, theta2)
        j = cost_func(a3, y)
        j_his[i] = j
        if 0 == i % group:
            print(f'#{i + 1} cost function value = {j}')
        theta1, theta2 = BP(X, y, theta1, theta2, a2, a3, alpha)
    if 0 != i % group:
        print(f'#{i + 1} cost function value = {j}')
    return theta1, theta2, j_his, a3


def score(h, y):
    return np.mean(y == (h > 0.5))


if '__main__' == __name__:
    # load
    data = np.loadtxt(r'..\..\..\..\..\large_data\机器学习1-周考3-技能\egg.txt', delimiter=',')
    m = len(data)

    # shuffle
    np.random.seed(1)
    np.random.shuffle(data)
    x = data[:, :-1]
    y = data[:, -1:]

    # scale
    mu = x.mean(axis=0)
    sigma = x.std(axis=0)
    x -= mu
    x /= sigma

    # splice
    X = np.c_[np.ones(m), x]

    # split
    m_train = int(math.floor(0.7 * m))
    X_train, X_test = np.split(X, [m_train])
    y_train, y_test = np.split(y, [m_train])

    # train
    alpha = 0.1
    iter0 = 20000
    theta1, theta2, j_his, h_train = grad(X_train, y_train, alpha, iter0)
    print(f'Training score = {score(h_train, y_train)}')

    # plot cost function values in iterations
    plt.plot(j_his, label='cost function')
    plt.xlabel('Iterations')
    plt.legend()
    plt.grid()

    # test
    _, h_test = FP(X_test, theta1, theta2)
    print(f'Testing score = {score(h_test, y_test)}')

    # finally show all drawings
    plt.show()
