﻿    using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Parallel = System.Threading.Tasks.Parallel;

namespace DecisionTree
{
    /// <summary>
    ///   C4.5 Learning algorithm for <see cref="DecisionTree">Decision Trees</see>.
    /// </summary>
    /// 
    /// <remarks>
    /// <para>
    ///   References:
    ///   <list type="bullet">
    ///     <item><description>
    ///       Quinlan, J. R. C4.5: Programs for Machine Learning. Morgan
    ///       Kaufmann Publishers, 1993.</description></item>
    ///     <item><description>
    ///       Quinlan, J. R. C4.5: Programs for Machine Learning. Morgan
    ///       Kaufmann Publishers, 1993.</description></item>
    ///     <item><description>
    ///       Quinlan, J. R. Improved use of continuous attributes in c4.5. Journal
    ///       of Artificial Intelligence Research, 4:77-90, 1996.</description></item>
    ///     <item><description>
    ///       Mitchell, T. M. Machine Learning. McGraw-Hill, 1997. pp. 55-58. </description></item>
    ///     <item><description><a href="http://en.wikipedia.org/wiki/ID3_algorithm">
    ///       Wikipedia, the free enclyclopedia. ID3 algorithm. Available on 
    ///       http://en.wikipedia.org/wiki/ID3_algorithm </a></description></item>
    ///   </list>
    /// </para>   
    /// </remarks>
    ///
    /// <see cref="ID3Learning"/>
    ///
    [Serializable]
    public class C45Learning
    {
        public List<int> NonZeroAttributes;

        public byte[][] inputs;
        public byte[] outputs;

        private DecisionTree tree;
        private Range[] inputRanges;
        private byte outputClasses;
        private Random rnd = new Random();

        List<DecisionNode> JobList = new List<DecisionNode>();
        
        /// <summary>
        ///   Creates a new C4.5 learning algorithm.
        /// </summary>
        /// 
        /// <param name="tree">The decision tree to be generated.</param>
        /// 
        public C45Learning(DecisionTree tree)
        {
            this.tree = tree;
           
            this.inputRanges = new Range[tree.InputCount];
            this.outputClasses = tree.OutputClasses;

            for (int i = 0; i < inputRanges.Length; i++)
                inputRanges[i] = tree.Attributes[i].Range;
        }

        /// <summary>
        ///   Runs the learning algorithm, creating a decision
        ///   tree modeling the given inputs and outputs.
        /// </summary>
        /// 
        /// <param name="inputs">The inputs.</param>
        /// <param name="outputs">The corresponding outputs.</param>
        /// 
        /// <returns>The error of the generated tree.</returns>
        /// 
        public int Run(byte[][] inputs, byte[] outputs, int[] NodeInputOutputIDs = null)
        {
            this.inputs = inputs;
            this.outputs = outputs;

            // 1. Create a root node for the tree
            tree.Root = new DecisionNode(tree);
            tree.Root.Level = 0;

            NonZeroAttributes = new List<int>();

            tree.Root.NodeInputOutputIDs = NodeInputOutputIDs;
            if (NodeInputOutputIDs == null)
            {
                tree.Root.NodeInputOutputIDs = new int[outputs.Length];
                for (int i = 0; i < tree.Root.NodeInputOutputIDs.Length; i++)
                    tree.Root.NodeInputOutputIDs[i] = i;
            }

            // 2. If all examples are for the same class, return the single-node
            // tree with the output label corresponding to this common class.
            double entropy = Tools.Entropy(this.outputs, tree.Root.NodeInputOutputIDs, outputClasses);

            double[] scores = new double[Parameters.InputLength];
            int[][][] partitions = new int[Parameters.InputLength][][];

            for (int i = 0; i < scores.Length; i++)
            {
                scores[i] = computeGainRatio(this.inputs, this.outputs, tree.Root.NodeInputOutputIDs, new Dictionary<int, byte>() { { i, 1 } }, entropy, out partitions[i]);
                if (scores[i] > 0)
                    NonZeroAttributes.Add(i);
            }         
            
            ////For each attribute in the data set
            //Parallel.For(0, scores.Length, i =>
            //{
            //    scores[i] = computeGainRatio(this.inputs, this.outputs,
            //        tree.Root.NodeInputOutputIDs, i, entropy, out partitions[i]);
            //    if (scores[i] == 0)
            //        tree.Root.NodeAttributes[i] = true;
            //});
      
            JobList.Add(tree.Root);

            DoWork();

            return ComputeError(inputs, outputs, NodeInputOutputIDs);            
        }

        private void DoWork()
        {
            while (JobList.Count != 0)
            {
                DecisionNode currentNode = JobList[0];

                split(currentNode);

                JobList.RemoveAt(0);
            }
        }

        /// <summary>
        ///   Computes the prediction error for the tree
        ///   over a given set of input and outputs.
        /// </summary>
        /// 
        /// <param name="inputs">The input points.</param>
        /// <param name="outputs">The corresponding output labels.</param>
        /// 
        /// <returns>The percentual error of the prediction.</returns>
        /// 
        public int ComputeError(byte[][] inputs, byte[] outputs, int[] idxs = null)
        {
            int miss = 0;

            if (idxs == null)
            {
                for (int i = 0; i < inputs.Length; i++)
                    if (tree.Compute(inputs[i]) != outputs[i])
                        miss++;
            }
            else
            {
                for (int i = 0; i < idxs.Length; i++)
                {
                    if (tree.Compute(inputs[idxs[i]]) != outputs[idxs[i]])
                    {
                        miss++;
                    }
                }
            }

            return miss;
        }
        
        private void split(DecisionNode root)
        {
            // 2. If all examples are for the same class, return the single-node
            // tree with the output label corresponding to this common class.
            double entropy = Tools.Entropy(this.outputs, root.NodeInputOutputIDs, outputClasses);

            if (entropy == 0)
            {
                if (root.NodeInputOutputIDs.Length == 0)
                {
                    root.Output = Tools.MostCommon(this.outputs, root.Parent.NodeInputOutputIDs, outputClasses);
                }
                else
                {
                    root.Output = this.outputs[root.NodeInputOutputIDs[0]];
                }
                return;
            }

            if (entropy < Parameters.EntropyTermination)
            {
                root.Output = Tools.MostCommon(this.outputs, root.NodeInputOutputIDs, outputClasses);
                return;
            }

            // 3. If number of predicting attributes is empty, then return the single-node
            //    tree with the output label corresponding to the most common value of
            //    the target attributes in the examples.
            int predictors = NonZeroAttributes.Count;

            if (predictors < Parameters.UsedFeatureCountAtEveryNode)
            {
                root.Output = Tools.MostCommon(this.outputs, root.NodeInputOutputIDs, outputClasses);
                return;
            }

            // 4. Otherwise, try to select the attribute which
            //    best explains the data sample subset.

            double[] scores = new double[Parameters.UsedFeatureCountAtEveryNode];
            int[][][] partitions = new int[Parameters.UsedFeatureCountAtEveryNode][][];

            // Retrieve candidate  attribute indices
            Dictionary<int, byte>[] attributeCandidates = CreateCandidates();

            for (int i = 0; i < attributeCandidates.Length; i++)
            {
                scores[i] = computeGainRatio(this.inputs, this.outputs,
                    root.NodeInputOutputIDs, attributeCandidates[i], entropy, out partitions[i]);
            }

            ////For each attribute in the data set
            //Parallel.For(0, scores.Length, i =>
            //{
            //    scores[i] = computeGainRatio(this.inputs, this.outputs,
            //        root.NodeInputOutputIDs, attributeCandidates[i], entropy, out partitions[i]);
            //});
            
            // Select the attribute with maximum gain ratio
            int maxGainIndex = scores.MaxIndex();
            var maxGainPartition = partitions[maxGainIndex];
            var maxGainAttribute = attributeCandidates[maxGainIndex];

            root.RightAttributeValues = maxGainAttribute;
            
            DecisionNode[] children = new DecisionNode[2];

            for (int i = 0; i < children.Length; i++)
            {
                children[i] = new DecisionNode(tree)
                {
                    Parent = root,
                    Level = root.Level + 1,
                    NodeInputOutputIDs = maxGainPartition[i],
                };

                JobList.Add(children[i]);
            } 

            root.Branches.AddRange(children);
        }

        private Dictionary<int, byte>[] CreateCandidates()
        {
            Dictionary<int, byte>[] rVal = new Dictionary<int, byte>[110];
            
            for (int i = 0; i < 10; i++)
            {
                rVal[i] = new Dictionary<int, byte>() { { NonZeroAttributes[rnd.Next(NonZeroAttributes.Count)], 1 } };
            }
            for (int i = 10; i < 110; i++)
            {
                rVal[i] = new Dictionary<int, byte>() { { NonZeroAttributes[rnd.Next(NonZeroAttributes.Count)], (byte)rnd.Next(2) }, { NonZeroAttributes[rnd.Next(NonZeroAttributes.Count)], (byte)rnd.Next(2) } };
            }

            return rVal;
        }

        private double computeGainRatio(byte[][] inputs, byte[] outputs, int[] idxs, Dictionary<int, byte> attribute, double entropy, out int[][] partitions)
        {
            double infoGain = computeInfoGain(inputs, outputs, idxs, attribute, entropy, out partitions);
            double splitInfo = Tools.SplitInformation(idxs.Length, partitions);

            return splitInfo == 0 ? 0 : infoGain / splitInfo;
        }

        private double computeInfoGain(byte[][] inputs, byte[] outputs, int[] idxs, Dictionary<int, byte> attribute, double entropy, out int[][] partitions)
        {
            return entropy - computeInfo(inputs, outputs, idxs, attribute, out partitions);
        }

        private double computeInfo(byte[][] inputs, byte[] outputs, int[] idxs, Dictionary<int, byte> attribute, out int[][] partitions)
        {
            partitions = new int[2][];
            
            List<int> tempPartitions0 = new List<int>();
            List<int> tempPartitions1 = new List<int>();

            for (int i = 0; i < idxs.Length; i++)
            {
                bool temp = true;
                for (int j = 0; j < attribute.Count; j++)
                {
                    if (inputs[idxs[i]][attribute.Keys.ElementAt(j)] != attribute.Values.ElementAt(j))
                    {
                        temp = false;
                        break;
                    }
                }
                if (temp)
                    tempPartitions1.Add(idxs[i]);
                else
                    tempPartitions0.Add(idxs[i]);
            }

            partitions[0] = tempPartitions0.ToArray();
            partitions[1] = tempPartitions1.ToArray();

            // Compute the information gain obtained by using
            // this current attribute as the next decision node.
            double info = 0;

            for (int i = 0; i < partitions.Length; i++)
			{
                // Check the entropy gain originating from this partitioning
                double e = Tools.Entropy(this.outputs, partitions[i], outputClasses);

                info += ((double)partitions[i].Length / idxs.Length) * e;
			}

            return info;
        }
    }
}
