"""
Logistic Regression
灰度0-255
分类0与其他
入口均在最后
"""
# mnist_train:60000
# mnist_test:10000

# acc: 0.9912
# time: 473s

import pandas as pd
import numpy as np
import time
import random


def loadData(fileName):
    data = pd.read_csv(fileName, header=None)

    data[785] = 1  # constant

    data = data.values
    y_label = data[:, 0]
    x_label = data[:, 1:]
    # 我们仅仅使用二分逻辑回归，只把0 和其余的分开
    y_label[y_label > 0] = 1

    return x_label, y_label


def sigmoid(x):
    return 1 / (1+np.exp(-1*x))


def LR(x_train, y_train, epochs):
    w = np.mat([random.uniform(0, 1)
                for _ in range(len(x_train[0]))]).reshape(-1, 1)
    x_train = np.mat(x_train)
    y_train = np.mat(y_train)

    print('start training')
    lr = 0.001  # learning_rate

    for i in range(epochs):
        # data shape
        # w: 785*1
        # x_train: 60000*785
        # hx: 60000*1
        # hx > 0.5 ? 1 : 0
        # original loss
        # loss=-1*(y_train@np.log(hx)+(1-y_train)@np.log(1-hx))
        hx = sigmoid(x_train@w)
        print(f'in {i} epoch')
        w -= lr*x_train.T@(hx-y_train.T)  # 可以更改为精确的loss
    return w


def predict(x, w):
    # x: 1*785
    # w: 785*1
    hx = sigmoid(x@w)
    if hx >= 0.5:
        return 1
    return 0


def test(x_test, y_test, w):
    acc_num = 0
    acc = 0
    for i in range(len(x_test)):
        x = np.mat(x_test[i])
        y_pred = predict(x, w)
        if y_pred == y_test[i]:
            acc_num += 1
        print(f'find {i}th data cluster:y_pred={y_pred},y={y_test[i]}')
        print('now_acc=', acc_num / (i + 1))


if __name__ == "__main__":
    start = time.time()

    x_train, y_train = loadData('Mnist/mnist_train.csv')
    x_test, y_test = loadData('Mnist/mnist_test.csv')

    # train，得到w -> test
    w = LR(x_train, y_train, 200)
    test(x_test, y_test, w)

    # 获取结束时间
    end = time.time()

    print('run time:', end - start)
