# 1. 利用python编写如下程序，
# 现有一个猕猴桃分类回归样本训练集（s1.txt文件）和测试集（s2.txt文件），其中x1、x2、x3为猕猴桃的检验参数（x1为含糖量，X2为含水量，X3重量），Y为分类结果（1为好果，0为坏果）。
# 2. 请通过Python实现逻辑回归模型，并用此模型预测测试集数据，具体要求如下：
# 完成数据集的读取
# 实现Sigmoid函数,并画出该函数
# 实现逻辑回归的代价函数，实现正则化逻辑回归
# 实现梯度下降函数，要求输出迭代过程中的代价函数值
# 通过梯度下降计算回归模型，用所得模型对测试集的数据进行预测，并计算准确率
# 使用X2，X3两组特征画出逻辑回归0-1分布图

import numpy as np
import matplotlib.pyplot as plt


def model(XX, theta):
    return XX.dot(theta)


def sigmoid(z):
    return 1 / (1 + np.exp(-z))


def cost_func(h, y, lam, theta):
    m = len(y)
    theta_cp = theta.copy()
    theta_cp[0] = 0
    r = lam / 2 / m * np.sum(theta_cp ** 2)  # ATTENTION
    return r - 1 / m * np.sum(y * np.log(h) + (1 - y) * np.log(1 - h))  # ATTENTION do not forget r


def grad(XX, y, alpha=0.001, iter0=15000, lam=0):
    m, n = XX.shape
    group = iter0 // 20
    theta = np.zeros(n)
    j_his = np.zeros(iter0)
    for i in range(iter0):
        z = model(XX, theta)
        h = sigmoid(z)
        j = cost_func(h, y, lam, theta)
        j_his[i] = j
        if 0 == i % group:
            print(f'#{i + 1} cost func value = {j}')
        theta_cp = theta.copy()
        theta_cp[0] = 0
        r = lam / m * theta_cp  # ATTENTION not np.sum
        dt = r + 1 / m * XX.T.dot(h - y)  # ATTENTION h - y, not y - h    # ATTENTION do not forget r
        theta -= alpha * dt
    if 0 != i % group:
        print(f'#{i + 1} cost func value = {j}')
    return theta, j_his, h


def score(h, y):
    return np.mean(y == (h > 0.5))


if '__main__' == __name__:
    plt.figure(figsize=[13, 12])
    spr = 2  # subplot row
    spc = 2  # subplot column
    spn = 0  # subplot number

    # draw sigmoid function
    spn += 1
    plt.subplot(spr, spc, spn)
    plt_x = np.linspace(-10, 10, 1000+1)
    plt_y = sigmoid(plt_x)
    plt.plot(plt_x, plt_y, label='sigmoid')
    plt.grid()
    plt.legend()

    # load
    data_train = np.loadtxt('s1.txt', delimiter=',')
    data_test = np.loadtxt('s2.txt', delimiter=',')
    x_train = data_train[:, :-1]
    y_train = data_train[:, -1]
    m_train = len(y_train)
    x_test = data_test[:, :-1]
    y_test = data_test[:, -1]
    m_test = len(y_test)

    # scale
    mu = x_train.mean(axis=0)
    sigma = x_train.std(axis=0)
    x_train -= mu
    x_train /= sigma
    mu = x_test.mean(axis=0)
    sigma = x_test.std(axis=0)
    x_test -= mu
    x_test /= sigma

    # splice
    XX_train = np.c_[np.ones(m_train), x_train]
    XX_test = np.c_[np.ones(m_test), x_test]

    # train
    alpha = 0.001
    iter0 = 15000
    lam = 3
    theta, j_his, h_train = grad(XX_train, y_train, alpha, iter0, lam)
    print(f'Theta = {theta}')
    print(f'Training score = {score(h_train, y_train)}')

    # plot cost function values in training
    spn += 1
    plt.subplot(spr, spc, spn)
    plt.title('Cost func values in training')
    plt.plot(j_his, label='cost function value')
    plt.xlabel('iterations')
    plt.grid()
    plt.legend()

    # test
    h_test = sigmoid(model(XX_test, theta))
    print(f'Testing score = {score(h_test, y_test)}')

    # theta translation
    def get_x3h_from_x2(x2, theta):
        return - (theta[0] + theta[2] * x2) / theta[3]

    # prepare to draw
    plt_x2_train = x_train[:, 1]
    plt_x3_train = x_train[:, 2]
    plt_x2_test = x_test[:, 1]
    plt_x3_test = x_test[:, 2]
    plt_xlim = np.array([np.min([plt_x2_train.min(), plt_x2_test.min()]), np.max([plt_x2_train.max(), plt_x2_test.max()])])
    plt_ylim = np.array([np.min([plt_x3_train.min(), plt_x3_test.min()]), np.max([plt_x3_train.max(), plt_x3_test.max()])])
    plt_x3h = get_x3h_from_x2(plt_xlim, theta)

    # draw x2-x3 of train data
    spn += 1
    plt.subplot(spr, spc, spn)
    plt.title('X2-X3 in training data')
    plt.xlim(plt_xlim)
    plt.ylim(plt_ylim)
    pos_idx = y_train == 1
    neg_idx = np.invert(pos_idx)
    plt.scatter(plt_x2_train[pos_idx], plt_x3_train[pos_idx], s=1, c='y', label='good')
    plt.scatter(plt_x2_train[neg_idx], plt_x3_train[neg_idx], s=1, c='b', label='bad')
    plt.plot(plt_xlim, plt_x3h, 'r-', label='border')
    plt.xlabel('X2')
    plt.ylabel('X3')
    plt.grid()
    plt.legend()

    # draw x2-x3 of testing data
    spn += 1
    plt.subplot(spr, spc, spn)
    plt.title('X2-X3 in testing data')
    plt.xlim(plt_xlim)
    plt.ylim(plt_ylim)
    pos_idx = y_test == 1
    neg_idx = np.invert(pos_idx)
    plt.scatter(plt_x2_test[pos_idx], plt_x3_test[pos_idx], s=1, c='y', label='good')
    plt.scatter(plt_x2_test[neg_idx], plt_x3_test[neg_idx], s=1, c='b', label='bad')
    plt.plot(plt_xlim, plt_x3h, 'r-', label='border')
    plt.xlabel('X2')
    plt.ylabel('X3')
    plt.grid()
    plt.legend()

    # finally show all drawings
    plt.show()
