"""
Adaboost: 基于 boost tree
入口均在最后
数据预处理和 NaiveBayes 一样灰度仅有0，1
"""
# mnist_train:60000
# mnist_test:10000

# 实际训练使用 1000
# 实际预测使用 200
# acc: 0.98
# time: 347s

import pandas as pd
import numpy as np
import time
from collections import Counter


def loadData(fileName):
    data = pd.read_csv(fileName, header=None)
    data = data.values
    x_label = data[:, 1:]
    y_label = data[:, 0]

    x_label[x_label < 128] = 0
    x_label[x_label >= 128] = 1

    y_label[y_label == 0] = -1  # 使得能够赋予样本权重
    y_label[y_label >= 1] = 1

    return x_label, y_label


def calc_Gx_e(X, Y, div, rule, D, feature):  # Gx为预测值
    x = X[:, feature]
    Gx = []  # 弱分类器的结果
    e = 0
    if rule == 'LessIsOne':
        L, B = 1, -1
    else:
        L, B = -1, 1
    for i in range(len(x)):
        if x[i] > div:
            Gxi = B
            Gx.append(Gxi)
        else:
            Gxi = L
            Gx.append(Gxi)
        if Gxi != Y[i]:
            e += D[i]

    return np.array(Gx), e


def create_single_BT(X, Y, D):
    sBT = {}
    m, n = X.shape
    sBT['e'] = 1

    # 灰度值无意义
    for i in range(n):
        for rule in ['LessIsOne', 'BiggerIsOne']:
            for div in [-0.5, 0.5, 1.5]:
                temp_Gx, temp_e = calc_Gx_e(X, Y, div, rule, D, i)
                if temp_e < sBT['e']:
                    sBT['div'] = div
                    sBT['rule'] = rule
                    sBT['feature'] = i
                    sBT['e'] = temp_e
                    sBT['Gx'] = temp_Gx
    sBT['alpha'] = 1/2 * np.log((1-sBT['e'])/sBT['e'])
    return sBT


def create_BT(X, Y, tree_num=50):  # 50 个 sBT
    m, n = X.shape
    D = np.array([1/m]*m)
    Fx = [0]*m

    boosting_tree = []  # list of sBT, list of Dict
    # 核心计算
    for i in range(tree_num):
        sBT = create_single_BT(X, Y, D)
        # 泛化因子
        Zm = np.sum(D*np.exp(-1*sBT['alpha']*Y*sBT['Gx']))
        # next D
        D = D/Zm*np.exp(-1*sBT['alpha']*Y*sBT['Gx'])

        boosting_tree.append(sBT)

        # 8.6
        Fx += sBT['alpha']+sBT['Gx']
        # 8.7
        Gx = np.sign(Fx)  # -1，1
        # total error count
        total_error_num = np.sum([1 for i in range(m) if Gx[i] != Y[i]])
        # error rate
        total_error_rate = total_error_num/m

        if total_error_rate == 0:
            return boosting_tree

        print(
            f'in {i}th epoch, error={sBT["e"]}. total error is {total_error_rate}')
    return boosting_tree


def predict(x, tree):  
    fx = 0

    for i in range(len(tree)):
        div = tree[i]['div']
        rule = tree[i]['rule']
        alpha = tree[i]['alpha']
        feature = tree[i]['feature']

        # Gmx: +1，-1。
        # fx=sum（alpha*Gmx）
        # Gx=sign（fx）
        if rule == 'LessIsOne':
            # LessIsOne <div:1，>div:-1
            if x[feature] < div:
                fx += alpha*1
            else:
                fx += alpha*(-1)
        else:  # BiggerIsOne
            if x[feature] < div:
                fx += alpha*(-1)
            else:
                fx += alpha*1

    Gx = np.sign(fx)
    return Gx


def test(X, Y, tree):
    acc = 0  
    acc_num = 0  
    for i in range(len(X)):
        print('testing ***', i)
        Gx = predict(X[i], tree)
        if Gx == Y[i]:
            acc_num += 1
        print(f'testing {i}th data :y_pred={Gx},y={Y[i]}')
        print('now_acc=', acc_num / (i + 1))


if __name__ == '__main__':
    start = time.time()

    X_train, y_train = loadData('Mnist/mnist_train.csv')
    X_test, y_test = loadData('Mnist/mnist_test.csv')

    boosting_tree = create_BT(X_train[0:1000], y_train[0:1000], 30)
    test(X_test[0:200], y_test[0:200], boosting_tree)

    end = time.time()

    print(end-start)
