import pandas as pd
import numpy as np

class EnsembleLearner():
    '''
    The class include bagging and stacking ensemble methods.
    
    Main methods:
        1. weight_bagging: weight all models' result.
        2. stacking: use two steps to complete the model fusion.
        3. gcforest: not implemented until now
        
    Parameters
    ----------
    train_x : DataFrame, shape(n_samples, n_features)
        The train data.
    train_y : array-like, shape(n_samples, )
        The train labels.
    test_x : DataFrame, shape(n_test, n_features)
        The test data.
    '''
    def __init__(self, train_x, train_y, test_x):
        self.train_x = train_x
        self.train_y = train_y
        self.test_x = test_x
        
    def weight_bagging(self, func_weight_map):
        '''
        Get the bagging predicting results of some models with corresponding weight.
        
        Parameters
        ----------
        func_weight_map : dict
            The mapping from predict func to weight. Predict func should be like func(train_x, train_y, test_x) return 
            predicting result. 
            
        Returns
        -------
        res : array-like, shape(n_test, )
            The result of weighting all models' predicting result. 
        '''
        res = np.zeros_like(self.test_y)
        for func, weight in func_weight_map.items():
            res = res + weight * np.asarray(func(self.train_x, self.train_y, self.test_x))
        res = res / len(func_weight_map)
        return res
    
    def stacking(self, funcs, final_func, n_folds=3, random_state):
        '''
        Stack all models to get robust result.
        
        The process include two steps:
            1. Train all models get the predicting result with n fold cv.
            2. Train a model with the predicting result above as input to get the final res.
        
        Parameters
        ----------
        funcs : list of functions
            Each function is a model like fun(train_x, train_y, eval_x, test_x) return (predict_eval, predict_test).
        final_func : function
            The model to be used in the second layer.
        n_folds : int
            The number of cv.
        random_state : int
            The random_state to keep the result to be reproduced.
        
        Returns
        -------
        res : array-like, shape(n_test, )
            The final predicting result.
        '''
        train_num = self.train_x.shape[0]
        test_num = self.test_x.shape[0]
        model_num = len(funcs)
        next_train = np.zeros((train_num, model_num))
        next_test = np.zeros((test_num, model_num))
        
        skf = StratifiedKFold(n_splits=n_folds, shuffle=True, random_state=random_state)
        for train_index, eval_index in skf.split(self.features.values, self.labels):
            x_train, x_eval = self.train_x.iloc[train_index], self.train_x.iloc[eval_index]
            y_train = self.train_y[train_index]
            for idx, func in enumerate(funcs):
                predict_eval, predict_test = func(x_train, y_train, x_eval, self.test_x)
                next_train[test_index, idx] = predict_eval
                next_test[:, idx] += predict_test
        next_test /= model_num
        res = final_func(next_train, self.train_y, next_test)
        return res
    
    def gcforest(self):
        pass

