#include "core/main.h"

void DecisionTreeNode::Train(const DecisionTreeConfig &config, const LearnerDataset &examples, const vector<UINT> &activeExampleIndices, UINT depthRemaining)
{
    bool makeLeafNode = (depthRemaining == 0 || activeExampleIndices.size() <= config.leafNodeCountCutoff);
    if(!makeLeafNode)
    {
        if(ChooseVariableAndThreshold(config, examples, activeExampleIndices))
        {
            for(UINT childIndex = 0; childIndex < childCount; childIndex++)
            {
                TrainChild(config, examples, activeExampleIndices, childIndex, depthRemaining);
            }
        }
        else
        {
            makeLeafNode = true;
        }
    }
    if(makeLeafNode)
    {
        InitLeafProbabilities(config, examples, activeExampleIndices);
    }
}

bool DecisionTreeNode::ChooseVariableAndThreshold(const DecisionTreeConfig &config, const LearnerDataset &examples, const vector<UINT> &activeExampleIndices)
{
    int bestDecisionIndex = -1;
    int bestDecisionValue = 0;
    double bestInformationGain = 0.0;

    const UINT variableCount = examples.examples[0].input.length;
    const UINT activeCount = activeExampleIndices.size();

    for(UINT variableIndex = 0; variableIndex < variableCount; variableIndex++)
    {
        set<int> valuesToTest;
        for(UINT activeIndex = 0; activeIndex < activeCount && valuesToTest.size() < config.maxTestsPerDimension; activeIndex++)
        {
            int candidateValue = examples.examples[activeExampleIndices[activeIndex]].input.data[variableIndex];
            valuesToTest.insert(candidateValue);
        }

        if(valuesToTest.size() > 1)
        {
            for(auto it = valuesToTest.begin(); it != valuesToTest.end(); it++)
            {
                int partitionTotalNodes[childCount];
                int partitionClass1WeightedSum[childCount];
                for(UINT childIndex = 0; childIndex < childCount; childIndex++)
                {
                    partitionTotalNodes[childIndex] = 0;
                    partitionClass1WeightedSum[childIndex] = 0;
                }

                _decisionVariableIndex = variableIndex;
                _decisionVariableValue = *it;

                int totalClass1WeightedSum = 0, totalCount = 0;
                for(UINT activeIndex = 0; activeIndex < activeCount; activeIndex++)
                {
                    const LearnerExample &curExample = examples.examples[activeExampleIndices[activeIndex]];
                    UINT childNodeIndex = ComputeChildIndex(curExample.input);

                    partitionTotalNodes[childNodeIndex]++;
                    totalCount++;

                    partitionClass1WeightedSum[childNodeIndex] += curExample.output;
                    totalClass1WeightedSum += curExample.output;
                }

                double totalEntropy = 0.0;
                if(totalCount > 0.0)
                {
                    totalEntropy = Entropy(double(totalClass1WeightedSum) / double(totalCount));
                }

                double leftEntropy = 0.0, rightEntropy = 0.0;
                if(partitionTotalNodes[0] > 0)
                {
                    leftEntropy = Entropy(double(partitionClass1WeightedSum[0]) / double(partitionTotalNodes[0]));
                }
                if(partitionTotalNodes[1] > 0)
                {
                    rightEntropy = Entropy(double(partitionClass1WeightedSum[1]) / double(partitionTotalNodes[1]));
                }
                double informationGain = totalCount * totalEntropy +
                    -partitionTotalNodes[0] * leftEntropy +
                    -partitionTotalNodes[1] * rightEntropy;

                if(informationGain > bestInformationGain)
                {
                    bestInformationGain = informationGain;
                    bestDecisionValue = _decisionVariableValue;
                    bestDecisionIndex = variableIndex;
                }
            }
        }
    }

    _decisionVariableIndex = bestDecisionIndex;
    _decisionVariableValue = bestDecisionValue;

    return (bestDecisionIndex != -1);
}

void DecisionTreeNode::InitLeafProbabilities(const DecisionTreeConfig &config, const LearnerDataset &examples, const vector<UINT> &activeExampleIndices)
{
    _class0Count = 0;
    _totalCount = activeExampleIndices.size();
    for (UINT exampleIndex = 0; exampleIndex < _totalCount; exampleIndex++)
    {
        const LearnerExample &curExample = examples.examples[activeExampleIndices[exampleIndex]];
        _class0Count += 1 - curExample.output;
    }
}

void DecisionTreeNode::TrainChild(const DecisionTreeConfig &config, const LearnerDataset &examples, const vector<UINT> &activeExampleIndices, UINT childIndex, UINT DepthRemaining)
{
    UINT childDatasetSize = 0;
    const UINT exampleCount = activeExampleIndices.size();
    for (UINT exampleIndex = 0; exampleIndex < exampleCount; exampleIndex++)
    {
        const LearnerExample &curExample = examples.examples[activeExampleIndices[exampleIndex]];
        if(ComputeChildIndex(curExample.input) == childIndex)
        {
            childDatasetSize++;
        }
    }

    vector<UINT> childActiveExampleIndices(childDatasetSize);

    UINT childExampleIndex = 0;
    for (UINT exampleIndex = 0; exampleIndex < exampleCount; exampleIndex++)
    {
        const LearnerExample &curExample = examples.examples[activeExampleIndices[exampleIndex]];
        if(ComputeChildIndex(curExample.input) == childIndex)
        {
            childActiveExampleIndices[childExampleIndex++] = activeExampleIndices[exampleIndex];
        }
    }

    _children[childIndex] = new DecisionTreeNode;
    _children[childIndex]->Train(config, examples, childActiveExampleIndices, DepthRemaining - 1);
}

void DecisionTree::Train(const LearnerDataset &dataset)
{
    const UINT exampleCount = dataset.examples.size();
    vector<UINT> allExampleIndices(exampleCount);
    for(UINT exampleIndex = 0; exampleIndex < exampleCount; exampleIndex++)
    {
        allExampleIndices[exampleIndex] = exampleIndex;
    }


    _root.Train(_config, dataset, allExampleIndices, _config.maxTreeDepth);
}

OutputDataStream& operator << (OutputDataStream &stream, const DecisionTreeNode &node)
{
    if(node.Leaf())
    {
        stream << UINT(1);
        stream << node._class0Count << node._totalCount;
    }
    else
    {
        stream << UINT(0);
        stream << node._decisionVariableIndex << node._decisionVariableValue;
        for(UINT childIndex = 0; childIndex < DecisionTreeNode::childCount; childIndex++)
        {
            stream << *node._children[childIndex];
        }
    }
    return stream;
}

InputDataStream& operator >> (InputDataStream &stream, DecisionTreeNode &node)
{
    UINT IsLeaf;
    stream >> IsLeaf;
    if(IsLeaf == 1)
    {
        stream >> node._class0Count >> node._totalCount;
    }
    else
    {
        stream >> node._decisionVariableIndex >> node._decisionVariableValue;
        for(UINT childIndex = 0; childIndex < DecisionTreeNode::childCount; childIndex++)
        {
            node._children[childIndex] = new DecisionTreeNode;
            stream >> *node._children[childIndex];
        }
    }
    return stream;
}

OutputDataStream& operator << (OutputDataStream &stream, const DecisionTree &tree)
{
    stream.WriteData(tree._config);
    stream << tree._root;
    return stream;
}

InputDataStream& operator >> (InputDataStream &stream, DecisionTree &tree)
{
    stream.ReadData(tree._config);
    stream >> tree._root;
    return stream;
}
