#include "DecisionStump.h"
#include "../Tools/ZLogManager.h"
#include "../Math/MathTools.h"
#include "../Common/ErrorCode.h"

GLOBAL_NAMESPACE_BEGIN

NAMESPACE_MACHINE_LEARNING_BEGIN

DecisionStump::DecisionStump()
{}

DecisionStump::~DecisionStump()
{}

DecisionStump::DecisionStump(const DecisionStump& rhs)
{
    param_.subClass   = rhs.param_.subClass;
    param_.dimF       = rhs.param_.dimF;
    param_.bGreater   = rhs.param_.bGreater;
    param_.threshold  = rhs.param_.threshold;
    param_.a = rhs.param_.a;
    param_.b = rhs.param_.b;
    param_.k = rhs.param_.k;

    maxCoeffs_ = rhs.maxCoeffs_;
    minCoeffs_ = rhs.minCoeffs_;
    stepNum_   = rhs.stepNum_;
    maxLabel_ = rhs.maxLabel_;
    classIdx_ = rhs.classIdx_;
}

void DecisionStump::reset()
{
    param_.subClass.clear();
    param_.dimF       = -1;
    param_.bGreater   = false;
    param_.threshold  = 0;
    param_.a = 0;
    param_.b = 0;
    param_.k = 0;

    maxCoeffs_ = NULL;
    minCoeffs_ = NULL;
    stepNum_   = 0;
    maxLabel_ = -1;
    classIdx_ = -1;

}

void DecisionStump::setSubClasses(const std::set<int>& classes, int maxLabel, int thisClassIdx)
{
    param_.subClass = classes;
    maxLabel_ = maxLabel;
    classIdx_ = thisClassIdx;
}

void DecisionStump::setMaxMinCoeffs(const std::vector<double>& maxCoeffs, const std::vector<double>& minCoeffs, int stepNum)
{
    maxCoeffs_ = &maxCoeffs;
    minCoeffs_ = &minCoeffs;
    stepNum_ = stepNum;
}

void DecisionStump::setWeightPrecomputeData(const std::vector<double>& t0c, double totalWeight)
{
    t0c_ = &t0c;
    totalWeight_ = totalWeight;
}

bool DecisionStump::isLabelInStump(int label, bool bUseTmpParam) const
{
    if (bUseTmpParam)
        return tmpParam_.subClass.find(label)!=tmpParam_.subClass.end();
    else
        return param_.subClass.find(label)!=param_.subClass.end();
}

int DecisionStump::getLabelResponse(int label) const
{
    if (isLabelInStump(label))
        return 1;
    else
        return -1;
}

int DecisionStump::getLabelResponse(int label, bool useTmpParam)
{
    if (useTmpParam)
        return tmpParam_.subClass.find(label)!=tmpParam_.subClass.end();
    else
        return param_.subClass.find(label)!=param_.subClass.end();
}

int DecisionStump::train(const Eigen::MatrixXd& trainData, const Eigen::VectorXi& labels, const Eigen::MatrixXd& weights)
{
    int ret = -1;
    if (!maxCoeffs_ || !minCoeffs_)
        return ret;
    ret = buildStump(trainData, labels, weights, *maxCoeffs_, *minCoeffs_, stepNum_, true);
    if (ret!=0)
        return ret;

    ret = trainStump(trainData, labels, weights);
    return ret;
}

int DecisionStump::buildStump(const Eigen::MatrixXd& trainData, const Eigen::VectorXi& labels, const Eigen::MatrixXd& weights,
        const std::vector<double>& maxCoeffs, const std::vector<double>& minCoeffs, int stepNum)
{
    int nDataSize = trainData.rows();
    int nDataDim  = trainData.cols();
    if (nDataSize==0 || nDataDim==0)
    {
        DebugLog << "No data input.";
        return 1;
    }
    if (labels.size()!=nDataSize)
    {    
        DebugLog << "label and data dimensions are not the same";
        return 2;
    }

    double  minError = DBL_MAX;
    int     bestDim = -1;
    bool    bestGreater = false;
    double  bestThreshold = 0;

    std::vector<int> classes;
    classes.insert(classes.begin(), param_.subClass.begin(), param_.subClass.end());
    for (int j = 0; j < stepNum; j++)   // theta
    {
        for (int d = 0; d < nDataDim; d++)  // dimension
        {
            double stepSize = (maxCoeffs[d] - minCoeffs[d]) / stepNum;
            double rangeMin = minCoeffs[d];
            bool   greater = true;

            for (int kk = 0; kk < 2; kk++, greater = false) // for greater or lesser (in weak learner)
            {
                double threshold = rangeMin + stepSize*j;
                double s0c=0, s1c=0, t0c=0, t1c=0;
                for (int x = 0; x < nDataSize; x++)
                {
                    for (int cc = 0; cc < classes.size(); cc++)
                    {
                        int c = classes[cc];
                        //if (labels[x]==c)
                        if ( getLabelResponse(labels(x))==1 )
                        {
                            t1c += weights(x, c);
                            if (greater && trainData(x, d) > threshold)
                                s1c += weights(x, c);
                            else if (!greater && trainData(x, d) < threshold)
                                s1c += weights(x, c);
                        }
                        else
                        {
                            t0c += weights(x, c);
                            if (greater && trainData(x, d) <= threshold)
                                s0c += weights(x, c);
                            else if (!greater && trainData(x, d) >= threshold)
                                s0c += weights(x, c);
                        }
                    }
                }

                double error = MathTools::max(s1c+t0c-s0c, s0c+t1c-s1c);
                if (error < minError)
                {
                    minError = error;
                    bestDim = d;
                    bestGreater = greater;
                    bestThreshold = threshold;
                }            
            }
        }
    }

    assert(bestDim >= 0);
    param_.dimF = bestDim;
    param_.bGreater = bestGreater;
    param_.threshold = bestThreshold;

    return 0;
}

int DecisionStump::trainStump(const Eigen::MatrixXd& trainData, const Eigen::VectorXi& labels, const Eigen::MatrixXd& weights)
{
    int nDataSize = trainData.rows();
    int nDataDim  = trainData.cols();
    int nClassSize = weights.cols();
    double numeratorP = 0, denominatorP = 0;    // c in S(n)
    double numeratorN = 0, denominatorN = 0;    // c in S(n)
    double numeratorQ = 0, denominatorQ = 0;    // c not in S(n)
    for ( int c = 0; c < maxLabel_; c++ )
    {
        if ( isLabelInStump(c) )
        {
            for (int i = 0; i < nDataSize; i++)
            {
                double value = trainData(i, param_.dimF);
                int predict = getLabelResponse(labels(i));
                if ( (param_.bGreater && value > param_.threshold) 
                    || (!param_.bGreater && value < param_.threshold) )
                {
                    numeratorP   += weights(i, c) * predict;
                    denominatorP += weights(i, c);
                }
                else
                {
                    numeratorN   += weights(i, c) * predict;
                    denominatorN += weights(i, c);
                }

            }
        }
        else
        {
            for (int i=0; i < nDataSize; i++)
            {
                int predict = getLabelResponse(labels(i));
                numeratorQ   += weights(i, c) * predict;
                denominatorQ += weights(i, c);
            }
        }
    }

    param_.a = MathTools::isZero(denominatorP) ? 0 : numeratorP/denominatorP;
    param_.b = MathTools::isZero(denominatorN) ? 0 : numeratorN/denominatorN;
    param_.k = MathTools::isZero(denominatorQ) ? 0 : numeratorQ/denominatorQ;
    return 0;
}

int DecisionStump::buildStump(const Eigen::MatrixXd& trainData, const Eigen::VectorXi& labels, const Eigen::MatrixXd& weights,
        const std::vector<double>& maxCoeffs, const std::vector<double>& minCoeffs, int stepNum, bool method2)
{
    int nDataSize = trainData.rows();
    int nDataDim  = trainData.cols();
    if (nDataSize==0 || nDataDim==0)
    {
        DebugLog << "No data input.";
        return 1;
    }
    if (labels.size()!=nDataSize)
    {    
        DebugLog << "label and data dimensions are not the same";
        return 2;
    }

    double  minError = DBL_MAX;
    int     bestDim = -1;
    bool    bestGreater = false;
    double  bestThreshold = 0;

    //std::vector<int> classes;
    //classes.insert(classes.begin(), param_.subClass.begin(), param_.subClass.end());
    tmpParam_.subClass = param_.subClass;
    for (int j = 0; j < stepNum; j++)   // theta
    {
        for (int d = 0; d < nDataDim; d++)  // dimension
        {
            double stepSize = (maxCoeffs[d] - minCoeffs[d]) / stepNum;
            double rangeMin = minCoeffs[d];
            bool   greater = true;

            for (int kk = 0; kk < 2; kk++, greater = false) // for greater or lesser (in weak learner)
            {
                double threshold = rangeMin + stepSize*j;
                tmpParam_.bGreater = greater;
                tmpParam_.threshold = threshold;
                tmpParam_.dimF = d;
                trainStump(trainData, labels, weights, true);

                double error = evalError(trainData, labels, weights, true);//MathTools::max(s1c+t0c-s0c, s0c+t1c-s1c);
                if (error < minError)
                {
                    minError = error;
                    bestDim = d;
                    bestGreater = greater;
                    bestThreshold = threshold;
                }            
            }
        }
    }

    assert(bestDim >= 0);
    param_.dimF = bestDim;
    param_.bGreater = bestGreater;
    param_.threshold = bestThreshold;

    return 0;
}

int DecisionStump::trainStump(const Eigen::MatrixXd& trainData, const Eigen::VectorXi& labels, const Eigen::MatrixXd& weights, bool method2)
{
    int nDataSize = trainData.rows();
    int nDataDim  = trainData.cols();
    int nClassSize = weights.cols();
    double numeratorP = 0, denominatorP = 0;    // c in S(n)
    double numeratorN = 0, denominatorN = 0;    // c in S(n)
    double numeratorQ = 0, denominatorQ = 0;    // c not in S(n)
    for ( int c = 0; c < maxLabel_; c++ )
    {
        if ( isLabelInStump(c, true) )
        {
            for (int i = 0; i < nDataSize; i++)
            {
                double value = trainData(i, tmpParam_.dimF);
                int predict = getLabelResponse(labels(i));
                if ( (tmpParam_.bGreater && value > tmpParam_.threshold) 
                    || (!tmpParam_.bGreater && value < tmpParam_.threshold) )
                {
                    numeratorP   += weights(i, c) * predict;
                    denominatorP += weights(i, c);
                }
                else
                {
                    numeratorN   += weights(i, c) * predict;
                    denominatorN += weights(i, c);
                }

            }
        }
        else
        {
            for (int i=0; i < nDataSize; i++)
            {
                int predict = getLabelResponse(labels(i));
                numeratorQ   += weights(i, c) * predict;
                denominatorQ += weights(i, c);
            }
        }
    }

    tmpParam_.a = MathTools::isZero(denominatorP) ? 0 : numeratorP/denominatorP;
    tmpParam_.b = MathTools::isZero(denominatorN) ? 0 : numeratorN/denominatorN;
    tmpParam_.k = MathTools::isZero(denominatorQ) ? 0 : numeratorQ/denominatorQ;
    return 0;
}

double DecisionStump::evalError(const Eigen::MatrixXd& data, const Eigen::VectorXi& labels, const Eigen::MatrixXd& weights) const
{
    double err = 0;
    int nDataSize = data.rows();
    int nDataDim  = data.cols();
    for (int i = 0; i < nDataSize; i++)
    {
        int label = labels(i);
        for (int c = 0; c < maxLabel_; c++)
        {
            int response = getLabelResponse(label);
            double hm = evaluate(data, i, c);
            err += weights(i, c) * ( response - hm ) * ( response - hm );
        }
    }
    return err;
}

double DecisionStump::evalError(const Eigen::MatrixXd& data, const Eigen::VectorXi& labels, const Eigen::MatrixXd& weights, bool method2)
{
    double err = 0;
    int nDataSize = data.rows();
    int nDataDim  = data.cols();
    for (int i = 0; i < nDataSize; i++)
    {
        int label = labels(i);
        for (int c = 0; c < maxLabel_; c++)
        {
            int response = getLabelResponse(label, method2);
            double hm = evaluate(data, i, c, method2);
            err += weights(i, c) * ( response - hm ) * ( response - hm );
        }
    }
    return err;
}

double DecisionStump::evaluate(const Eigen::MatrixXd& data, int dataIdx, int classIdx) const
{
    bool contained = isLabelInStump(classIdx);
    if (!contained)
        return param_.k;
    else
    {
        if (param_.bGreater)
        {
            if ( data(dataIdx, param_.dimF) > param_.threshold )
                return param_.a;
            else
                return param_.b;
        }
        else
        {
            if ( data(dataIdx, param_.dimF) < param_.threshold )
                return param_.a;
            else
                return param_.b;
        }
    }
}

double DecisionStump::evaluate(const Eigen::MatrixXd& data, int dataIdx, int classIdx, bool method2)
{
    if (!method2)
        return evaluate(data, dataIdx, classIdx);

    bool contained = isLabelInStump(classIdx, method2);
    if (!contained)
        return tmpParam_.k;
    else
    {
        if (tmpParam_.bGreater)
        {
            if ( data(dataIdx, tmpParam_.dimF) > tmpParam_.threshold )
                return tmpParam_.a;
            else
                return param_.b;
        }
        else
        {
            if ( data(dataIdx, tmpParam_.dimF) < tmpParam_.threshold )
                return tmpParam_.a;
            else
                return tmpParam_.b;
        }
    }
}

int DecisionStump::predict(const Eigen::RowVectorXd& oneData) const
{
    return -1;
}

double DecisionStump::evaluate(const Eigen::RowVectorXd& oneData, int classIdx) const
{
    if (isLabelInStump(classIdx))
    {
        if (param_.bGreater)
        {
            return oneData(param_.dimF) > param_.threshold ? param_.a : param_.b;
        }
        else
        {
            return oneData(param_.dimF) < param_.threshold ? param_.a : param_.b;
        }
    }
    else
        return param_.k;
}

void DecisionStump::output(std::ostream& out)
{
    out << " subclasses: ";
    for (std::set<int>::iterator it = param_.subClass.begin(); it!=param_.subClass.end(); it++)
    {
        out << *it << " ";
    }
    out << "\n";
    out << " featureDim: " << param_.dimF << "\n";
    out << " threshold:  " << param_.threshold << "\n";
    out << " greater:    " << param_.bGreater << "\n";
    out << " parameters: " << param_.a << " " << param_.b << " " << param_.k << "\n";
}

const DecisionStump::Parameters* DecisionStump::getParameters() const
{
    return &param_;
}

NAMESPACE_MACHINE_LEARNING_END

GLOBAL_NAMESPACE_END
