from typing import Callable, Union
import numpy as np

# read data from csv file
def data_from_csv(file:str) -> np.ndarray:
    return np.loadtxt(file, delimiter=', ')

# loss function
def loss_func(weights:np.ndarray, preds:np.ndarray, y:np.ndarray) -> float:
    return (weights * (preds != y).astype(np.float32)).sum()

# base classifier: stump classifier
class StumpClassifier(object):
    def __init__(self, data: Union[np.ndarray, str], weights:np.ndarray, loss_func:Callable=loss_func) -> None:
        if isinstance(data, str):
            self.data = data_from_csv(data)
        else:
            self.data = data
        self.train_x, self.train_y = self.data[:, :-1], self.data[:, -1]
        self.dim_thresholds = np.array(
            [(dim_feat[1:] + dim_feat[:-1]) / 2 for dim_feat in np.sort(self.train_x.transpose())]
        )   # deltas in different dimension of the data feature, to find the threshold of the classifer
        self.loss_func = loss_func
        self.weights = weights
        self.dim = None
        self.threshold = None
        self.criterion = None

    def train(self) -> float:
        min_error = float('inf')
        for dim, thresholds in enumerate(self.dim_thresholds):
            for threshold in thresholds:
                # 'lt': data less than threshold is positive 
                # 'gt': data greater than threshold is positive 
                for criterion in ['lt', 'gt']:
                    error = self.loss_func(self.weights, 
                                           self(self.train_x, dim, threshold, criterion), 
                                           self.train_y)
                    if error < min_error:
                        min_error = error
                        self.dim = dim
                        self.threshold = threshold
                        self.criterion = criterion

        return min_error

    # predict
    def __call__(self, x:np.ndarray, dim:int=None, threshold:float=None, criterion:str=None) -> np.ndarray:
        if dim is None:
            dim = self.dim
        if threshold is None:
            threshold = self.threshold
        if criterion is None:
            criterion = self.criterion

        if criterion == 'lt':
            return np.where(x[:, dim]<threshold, 1, -1)
        elif criterion == 'gt':
            return np.where(x[:, dim]>threshold, 1, -1)

# AdaBoost classifier
class MyAdaBoost(object):
    # init an AdaBoost classifier based on the given data
    def __init__(self, data_file:str, number_basis:int=3) -> None:
        self.data = data_from_csv(data_file)
        self.train_x, self.train_y= self.data[:, :-1], self.data[:, -1]
        self.weights = np.ones((self.data.shape[0], )) * (1 / self.data.shape[0])
        self.number_basis = number_basis    # number of base classifiers
        self.alpha = []     # coefficients of base classifiers
        self.h = []     # base classifiers

    # train an AdaBoost
    def train(self, base_classifier:str="stump"):
        for t in range(self.number_basis):
            h_t = StumpClassifier(self.data, self.weights)
            error_t = h_t.train()
            if error_t >= 0.5:
                break
            else:
                alpha_t = 0.5 * np.log((1 - error_t) / error_t)
                self.weights = (self.weights * np.exp(-alpha_t * self.train_y * h_t(self.train_x)))
                self.weights = self.weights / sum(self.weights)
            self.h.append(h_t)
            self.alpha.append(alpha_t)

    # test data using a trained Adaboost
    def test(self, test_data:np.ndarray) -> float:
        test_x, test_y= test_data[:, :-1], test_data[:, -1]
        preds = self(test_x)
        accuracy = (preds == test_y).astype(np.float32).sum() / test_data.shape[0]
        return accuracy

    # predict, call the AdaBoost classifier
    def __call__(self, x:np.ndarray) -> np.ndarray:
        preds = np.zeros(x.shape[0])
        for alpha_t, h_t in zip(self.alpha, self.h):
            preds = preds + alpha_t * h_t(x)
        return np.sign(preds)

if __name__ == "__main__":
    ada_boost = MyAdaBoost('./data.csv')
    ada_boost.train()
    accuracy = ada_boost.test(ada_boost.data)
    print(f"Accuracy: {accuracy*100:.2f}%")