#include "ML_JointBoosting.h"
#include "ML_JointBoostingParams.h"
#include "ML_WeakLearner.h"
#include "ML_DecisionStump.h"
#include "ML_DecisionStumpPreprocess.h"
#include "ML_SubclassFolder.h"
#include "../Common/ErrorCode.h"
#include "../Tools/ZLogManager.h"
#include <list>

GLOBAL_NAMESPACE_BEGIN
NAMESPACE_MACHINE_LEARNING_BEGIN

class MLJointBoostingImpl
{
public:
    MLJointBoostingImpl();
    ~MLJointBoostingImpl();

    void init(const Eigen::MatrixXd& data, const Eigen::MatrixXi& labels);

    int     train   (const Eigen::MatrixXd& trainData, const Eigen::MatrixXi& labels);
    double  evaluate(const Eigen::MatrixXd& trainData, int dataIdx, int classIdx) const;
    double  evaluate(const Eigen::RowVectorXd& oneData, int classIdx) const;
    Eigen::MatrixXd evaluate(const Eigen::MatrixXd& oneData) const;

    double evalError(const Eigen::MatrixXd& trainData, const Eigen::MatrixXi& labels);

    void setParams(int roundM, int stepNum);
    double getEvalThreshold() const;

    int getWeakLearnerCount() const;
    MLWeakLearner* getWeakLearnerOf(int idx);
    MLJointBoostingParams* getParams();
    void addWeakLearner(MLWeakLearner* weakLearner);

private:
    void destroy();
    void updateWeights(MLWeakLearner* weakLearner, const Eigen::MatrixXd& trainData, const Eigen::MatrixXi& labels, Eigen::MatrixXd& weights);

private:
    Eigen::MatrixXd weights_;
    std::vector<MLWeakLearner*> weakLearners_;
    MLJointBoostingParams params_;

    Eigen::MatrixXd Hs_;
    double evalThreshold_;
};

MLJointBoostingImpl::MLJointBoostingImpl()
{
    params_.bUseUserDefinedSubClasses = false;
    params_.bUseUserDefinedThreshold  = false;
    params_.roundM = 30;
    params_.stepNum = 10;
    evalThreshold_ = 0;
}

MLJointBoostingImpl::~MLJointBoostingImpl()
{
    destroy();
}

void MLJointBoostingImpl::destroy()
{
    for (int i = 0; i < weakLearners_.size(); i++)
    {
        if (weakLearners_[i])
            delete weakLearners_[i];
    }
    weakLearners_.clear();
}

void MLJointBoostingImpl::setParams(int roundM, int stepNum)
{
    params_.roundM = roundM;
    params_.stepNum = stepNum;
}

double MLJointBoostingImpl::getEvalThreshold() const
{
    return evalThreshold_;
}

int MLJointBoostingImpl::train(const Eigen::MatrixXd& trainData, const Eigen::MatrixXi& labels)
{
    int ret = ErrorCode::ERROR_NO_Error;
    int nDataSize = trainData.rows();
    int nDataDim  = trainData.cols();
    int nClassSize = labels.cols();
    if (nDataSize==0 || nDataDim==0 || nClassSize ==0)
        return ErrorCode::ERROR_Invalid_Parameter;

    params_.classSize = nClassSize;
    // initialize weights as Ones(nDataSize, nClassSize)
    weights_.resize(nDataSize, nClassSize);
    weights_.setOnes();
    Hs_.resize(nDataSize, nClassSize);
    Hs_.setZero();

    MLDecisionStumpPreprocess preProcess;
    preProcess.setMaxClassNum(nClassSize);

    // repeat for M rounds
    for (int m = 0; m < params_.roundM; m++)
    {
        preProcess.preProcess(trainData, labels, weights_);
        //double minErrorLastTry = DBL_MAX;
        //double maxErrorReduction = 0;
        double minError = DBL_MAX;
        //MLWeakLearner* bestWeakLearner = NULL;
        MLDecisionStump* bestWeakLearner = NULL;
        std::list<int> remainingClass;
        std::list<int> addedClass;
        for (int c = 0; c < nClassSize; c++)
        {
            remainingClass.push_back(c);
        }
        MLDecisionStump* stump = new MLDecisionStump;
        //while(!remainingClass.empty())
        while(remainingClass.size()>1)  // do not use background label at current time
        {
            MLSubClassFolder candidateSubClasses;
            for (std::list<int>::iterator itC = remainingClass.begin(); itC!=remainingClass.end(); itC++)
            {
                std::vector<int> oneSubClass;
                oneSubClass.insert(oneSubClass.end(), addedClass.begin(), addedClass.end());
                oneSubClass.push_back(*itC);
                candidateSubClasses.addSubClass(oneSubClass, *itC);
            }

            stump->reset();
            stump->setSubClassFolder(&candidateSubClasses);
            stump->setPreprocessData(&preProcess);
            ret = stump->train(trainData, labels, weights_);
            if (ret!=ErrorCode::ERROR_NO_Error)
            {
                DebugLog << "Training failed.";
                return ret;
            }
            double error = stump->evalError(trainData, labels, weights_);
            if (error < minError)
            {
                // first add class try
                minError = error;
                if (bestWeakLearner)
                    delete bestWeakLearner;
                bestWeakLearner = new MLDecisionStump(*stump);
            }
            // update remainingClass and addedClass
            int theAddedClass = candidateSubClasses.getTheNewAddedClass(stump->getTheBestClassIdx());
            remainingClass.erase(std::find(remainingClass.begin(), remainingClass.end(), theAddedClass));
            addedClass.push_back(theAddedClass);
        }

        updateWeights(bestWeakLearner, trainData, labels, weights_);
        DebugLog << bestWeakLearner;

        // save the best weak learner
        //weakLearners_.push_back(bestWeakLearner);
        addWeakLearner(bestWeakLearner);
    }

    return ret;
}

double MLJointBoostingImpl::evaluate(const Eigen::MatrixXd& data, int dataIdx, int classIdx) const
{
    double eval = 0;
    for (int i = 0; i < weakLearners_.size(); i++)
    {
        eval += weakLearners_[i]->evaluate(data, dataIdx, classIdx);
    }
    return eval;
}

double MLJointBoostingImpl::evaluate(const Eigen::RowVectorXd& oneData, int classIdx) const
{
    double eval = 0;
    for (int i = 0; i < weakLearners_.size(); i++)
    {
        eval += weakLearners_[i]->evaluate(oneData, classIdx);
    }
    return eval;
}

Eigen::MatrixXd MLJointBoostingImpl::evaluate(const Eigen::MatrixXd& data) const
{
    int nDataSize = data.rows();
    int nClassSize = params_.classSize;
    Eigen::MatrixXd ret(nDataSize, nClassSize);
    for (int iData = 0; iData < nDataSize; iData++)
    {
        for (int i = 0; i < nClassSize; i++)
        {
            ret(iData, i) = evaluate(data, iData, i);
        }
    }
    return ret;
}

double MLJointBoostingImpl::evalError(const Eigen::MatrixXd& trainData, const Eigen::MatrixXi& labels)
{
    return 0;
}

void MLJointBoostingImpl::updateWeights(MLWeakLearner* weakLearner, const Eigen::MatrixXd& data, const Eigen::MatrixXi& labels, Eigen::MatrixXd& weights)
{
    int nDataSize = data.rows();
    int nDataDim  = data.cols();
    int nClassSize = labels.cols();
    for (int i = 0; i < nDataSize; i++)
    {
        for (int c = 0; c < nClassSize; c++)
        {
            double w = weakLearner->evaluate(data, i, c);
            int response = weakLearner->getLabelResponse(labels.row(i), c);
            weights(i, c) *= exp((-1.0)*w*response);
        }
    }
}

int MLJointBoostingImpl::getWeakLearnerCount() const
{
    return weakLearners_.size();
}

MLWeakLearner* MLJointBoostingImpl::getWeakLearnerOf(int idx)
{
    return weakLearners_[idx];
}

MLJointBoostingParams* MLJointBoostingImpl::getParams()
{
    return &params_;
}

void MLJointBoostingImpl::addWeakLearner(MLWeakLearner* weakLearner)
{
    if (weakLearner)
        weakLearners_.push_back(weakLearner);
}

///////////////////////////////////////////
//

MLJointBoosting::MLJointBoosting()
    : impl_(new MLJointBoostingImpl)
{
    
}

MLJointBoosting::~MLJointBoosting()
{
}

void MLJointBoosting::init(const Eigen::MatrixXd& data)
{
    // NOT used
}

void MLJointBoosting::init(const Eigen::MatrixXd& data, const Eigen::MatrixXi& labels)
{
    // NOT used
}

bool MLJointBoosting::commit()
{
    return false;
}

void MLJointBoosting::setParams(int roundM, int stepNum)
{
    impl_->setParams(roundM, stepNum);
}

int     MLJointBoosting::train   (const Eigen::MatrixXd& trainData, const Eigen::MatrixXi& labels)
{
    return impl_->train(trainData, labels);
}

double  MLJointBoosting::evaluate(const Eigen::MatrixXd& trainData, int dataIdx, int classIdx) const
{
    return impl_->evaluate(trainData, dataIdx, classIdx);
}

double  MLJointBoosting::evaluate(const Eigen::RowVectorXd& oneData, int classIdx) const
{
    return impl_->evaluate(oneData, classIdx);
}

Eigen::RowVectorXd MLJointBoosting::evaluate(const Eigen::RowVectorXd& oneData) const
{
    return impl_->evaluate(oneData);
}

double MLJointBoosting::evalError(const Eigen::MatrixXd& trainData, const Eigen::MatrixXi& labels)
{
    return impl_->evalError(trainData, labels);
}

Eigen::MatrixXi MLJointBoosting::predict(const Eigen::MatrixXd& data, Eigen::MatrixXd& evalmat) const
{
    evalmat = impl_->evaluate(data);
    Eigen::MatrixXi ret(data.rows(), evalmat.cols());
    ret.setZero();
    for (int iData = 0; iData < data.rows(); iData++)
    {
        double maxValue = -DBL_MAX;
        int maxC = -1;
        for (int i = 0; i < evalmat.cols(); i++)
        {
            //ret(iData, i) = evalmat(iData, i)>impl_->getEvalThreshold() ? 1 : 0;
            double value = evalmat(iData, i);
            if (value > maxValue)
            {
                maxValue = value;
                maxC = i;
            }
        }
        ret(iData, maxC) = 1;
    }
    return ret;
}

int MLJointBoosting::getWeakLearnerCount() const
{
    return impl_->getWeakLearnerCount();
}

MLWeakLearner* MLJointBoosting::getWeakLearnerOf(int idx) const
{
    return impl_->getWeakLearnerOf(idx);
}

MLJointBoostingParams* MLJointBoosting::getParams() const
{
    return impl_->getParams();
}

void MLJointBoosting::addWeakLearner(MLWeakLearner* weakLearner)
{
    impl_->addWeakLearner(weakLearner);
}

NAMESPACE_MACHINE_LEARNING_END
GLOBAL_NAMESPACE_END
