#include "JointBoosting.h"
#include "../Math/MathTools.h"
#include "../Tools/ZLogManager.h"

#pragma warning(disable:4244)

GLOBAL_NAMESPACE_BEGIN

NAMESPACE_MACHINE_LEARNING_BEGIN

JointBoosting::JointBoosting()
{
    params_.stepNum = 10;
    params_.roundM  = 10;
}

JointBoosting::~JointBoosting()
{
    destroy();
}

void JointBoosting::destroy()
{
    std::vector<WeakLearner*>::iterator itDS = weakLearners_.begin();
    while(itDS!=weakLearners_.end())
    {
        if (*itDS)
            delete *itDS;
        itDS++;
    }
    weakLearners_.clear();
}

void JointBoosting::setParams(int roundM, int stepNum)
{
    params_.roundM = roundM;
    params_.stepNum = stepNum;
}

void JointBoosting::releaseWeakLearners(std::vector<WeakLearner*>& learners)
{
    std::vector<WeakLearner*>::iterator itDS = learners.begin();
    while(itDS!=learners.end())
    {
        if (*itDS)
            delete *itDS;
        itDS++;
    }
    learners.clear();
}

WeakLearner* JointBoosting::getWeakLearnerAt(int idx)
{
    WeakLearner* ret = NULL;
    if (idx<0 || idx>getLearnerSize())
        return ret;

    return weakLearners_[idx];
}

WeakLearner* JointBoosting::getWeakLearnerAt(int idx) const
{
    WeakLearner* ret = NULL;
    if (idx<0 || idx>getLearnerSize())
        return ret;

    return weakLearners_[idx];
}

int JointBoosting::getLearnerSize()
{
    return weakLearners_.size();
}

int JointBoosting::getLearnerSize() const
{
    return weakLearners_.size();
}

void JointBoosting::init(const Eigen::MatrixXd& data)
{

}

bool JointBoosting::train(const Eigen::MatrixXd& trainData, const Eigen::VectorXi& labels)
{
    // NOTE: Please make sure the input labels is in [0, C), and for each c in [0, C), at least one i s.t. labels[i]=c
    if (trainData.rows() == 0 || trainData.cols() == 0)
        return false;
    if (labels.size() != trainData.rows())
        return false;

    int nSampleSize = trainData.rows();
    int nSampleDim  = trainData.cols();
    int nClass = 0;
    std::vector<int> sortLabel;
    for (size_t i = 0; i < nSampleSize; i++)
    {
        sortLabel.push_back(labels(i));
    }
    std::sort(sortLabel.begin(), sortLabel.end());
    std::vector<int>::iterator uniqueIter = std::unique(sortLabel.begin(), sortLabel.end());
    int diffClass = uniqueIter - sortLabel.begin();
    nClass = (2 << diffClass - 1);
    std::vector< std::set<int> > subClasses(nClass);
    for (size_t i = 0; i < nClass; i++)
    {
        std::set<int> classes;
        // set set
        int count = 0;
        int oneClass = i;
        while (oneClass)
        {
            if (oneClass%2!=0)
                classes.insert(count);
            oneClass = (oneClass >> 1);
            count ++ ;
        }
        subClasses[i] = classes;
    }
    
    // compute parameter
    params_.dimNum      = nSampleDim;
    params_.classNum    = diffClass;
    params_.trainDataDimMaxs.resize(nSampleDim);
    params_.trainDataDimMins.resize(nSampleDim);
    for (size_t i = 0; i < trainData.cols(); i++)
    {
        params_.trainDataDimMins[i] = trainData.col(i).minCoeff();
        params_.trainDataDimMaxs[i] = trainData.col(i).maxCoeff();
    }

    // 1. initialize weights and set
    weights_.resize(nSampleSize, params_.classNum);
    weights_.setOnes();
    Hs_.resize(nSampleSize, params_.classNum);
    Hs_.setZero();

    // 2. repeat for M rounds
    int M = params_.roundM; // 10 for test
    for (int m = 0; m < M; m++)
    {
        prepareParamForWeakLearner();
        double minError = DBL_MAX;
        int bestC = -1;
        DecisionStump* bestWeakLearner = NULL;
        // repeat for each class
        DecisionStump* weakLearner = new DecisionStump;
        for (int c = 1; c < nClass-1; c++)    // do not use empty set (subClasses[0]) and full set (subClasses[-1])
        {
            // 2.1 fit shared stump
            weakLearner->reset();
            weakLearner->setMaxMinCoeffs(params_.trainDataDimMaxs, params_.trainDataDimMins, params_.stepNum);
            weakLearner->setSubClasses(subClasses[c], params_.classNum, c);
            //weakLearner->setWeightPrecomputeData(t0c_, totalWeights_);

            weakLearner->train(trainData, labels, weights_);
            // 2.2 evaluate error
            
            double err = weakLearner->evalError(trainData, labels, weights_);
            if ( err < minError )
            {
                minError = err;
                bestC = c;
                if (bestWeakLearner)
                    delete bestWeakLearner;
                bestWeakLearner = new DecisionStump(*weakLearner);
            }
        }

        if (weakLearner)
            delete weakLearner;
        weakLearner = NULL;

        if (bestC<0)
        {
            DebugLog << "Can not find a weak learner.";
            return false;
        }

        // save the best weak learner
        weakLearners_.push_back(bestWeakLearner);
        DebugLog << bestWeakLearner;

        // update the weights
        updateWeights(trainData, *bestWeakLearner, labels, weights_);
        DebugLog << weights_.row(0);
        DebugLog << weights_.row(1);
    }

    return true;
}

void JointBoosting::updateWeights(const Eigen::MatrixXd& trainData, const WeakLearner& weakLearner, 
                                  const Eigen::VectorXi& labels, Eigen::MatrixXd& weights)
{
    int nDataSize   = trainData.rows();
    int nDataDim    = trainData.cols();
    int nClassSize  = weights.cols();
    for (int i = 0; i < nDataSize; i++)
    {
        int label = labels(i);
        for (int c = 0; c < nClassSize; c++)
        {
            double w = weakLearner.evaluate(trainData, i, c);
            int resp = weakLearner.getLabelResponse(labels(i));
            weights(i, c) *= exp((-1.0)*w*resp);
        }
    }
}

void JointBoosting::prepareParamForWeakLearner()
{
    t0c_.resize(params_.classNum, 0);
    int dataSize = weights_.rows();
    double totalVal = 0;
    for (int c = 0; c < params_.classNum; c++)
    {
        double val = 0;
        for (int i = 0; i < dataSize; i++)
        {
            val += weights_(i, c);
        }
        t0c_[c] = val;
        totalVal += val;
    }
    totalWeights_ = totalVal;
}

int JointBoosting::predict(const Eigen::RowVectorXd& oneData)
{
    int classSize = params_.classNum;
    std::vector<double> evals(classSize, 0);
    double maxEval = -DBL_MAX;
    int  bestClass = -1;
    for (int c = 0; c < classSize; c++)
    {
        double eval = 0;
        for (int i = 0; i < weakLearners_.size(); i++)
        {
            eval += weakLearners_[i]->evaluate(oneData, c);
        }
        evals[c] = eval;
        if (eval > maxEval)
        {
            maxEval = eval;
            bestClass = c;
        }
    }
    //DebugLog << "Best class: " << bestClass << " with energy: " << maxEval;
    return bestClass;
}

void JointBoosting::evaluate(const Eigen::MatrixXd& queryData, Eigen::MatrixXd& evalMat)
{
    int nDataSize = queryData.rows();
    int classSize = params_.classNum;
    evalMat.resize(nDataSize, classSize);
    for (int iData = 0; iData < nDataSize; iData++)
    {
        for (int c = 0; c < classSize; c++)
        {
            double eval = 0;
            for (int i = 0; i < weakLearners_.size(); i++)
            {
                eval += weakLearners_[i]->evaluate(queryData.row(iData), c);
            }
            evalMat(iData, c) = eval;
        }
    }
}

bool JointBoosting::commit()
{
    return false;
}


NAMESPACE_MACHINE_LEARNING_END

GLOBAL_NAMESPACE_END