﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
using System.Diagnostics;
using System.Collections;

namespace Learning.Classifiers
{
    class ID3 : IClassifier
    {
        //int m_minLeafObs; // stop condition - minimum
        Model_Tree m_model; // the best model
        TextReader m_dataFile; // data file given with obs
        int m_logBase=0;
        Attribute[] m_Attribute_List; // list of attributes
        int[,] m_data; // the obs

        private void read_file_to_Class() // this function reads the m_dataFile file to the array and list
        {
            string string_tmp = m_dataFile.ReadToEnd();


            m_dataFile = new StringReader(string_tmp);


            
            if (m_dataFile == null)
                throw new Exception("No file was given");

            Attribute[] Attribute_List;
            int dataLines = 0;
            
            while (m_dataFile.ReadLine() != null) // counting number of lines to allocate an array
                dataLines++;
            dataLines -= 3; // removing titles
            m_dataFile.Close();
            m_dataFile = new StringReader(string_tmp);

            string line1 = m_dataFile.ReadLine();
            string[] values1 = line1.Split(',');
            string line2 = m_dataFile.ReadLine();
            string[] values2 = line2.Split(',');
            int[,] data = new int[dataLines,values1.Length];
            //Debug.Assert(values2.Length == values1.Length);
            Attribute_List = new Attribute[values2.Length];

            int max_domain_val = -1;
            for (int i = 0; i < values1.Length; i++)
            {
                Attribute_List[i] = new Attribute(values1[i], int.Parse(values2[i]), i); // adding all attributes by order
                if (int.Parse(values2[i]) > max_domain_val) max_domain_val = int.Parse(values2[i]);
            }
            //Debug.Assert(max_domain_val > 0);
            m_logBase = max_domain_val;

            //Debug.Assert(Attribute_List[Attribute_List.Count()-1].AttributeName.Equals("classification")); // the last attribute is the class

            for (int z = 0; z < 1; z++) // skipping the title
                m_dataFile.ReadLine();

            for (int i = 0; i < dataLines; i++) // for each line
            {
                string line = m_dataFile.ReadLine();
                string[] values = line.Split(',');
                //Debug.Assert(values.Length == values1.Length);

                for (int j = 0; j < values.Length; j++)// for each column
                {
                    data[i, j] = int.Parse(values[j]);
                }
            }
            m_Attribute_List = Attribute_List;
            m_data = data;          
        }


        public ID3(int minLeafObs) //constructor should also receive the data (in any form you wish)
        {

            //your code here
        }

        public ID3() //constructor should also receive the data (in any form you wish)
        {

            
        }

        public IClassifier getBestModel(TextReader dataFile)
        {
            m_dataFile = dataFile;
            read_file_to_Class(); // filling m_data,m_listatt with the data

            const int split_data_to = 3; // 

            //int average_model_time = 0;
            int minLeafNum = this.m_data.GetLength(0);
            bool goOn = true;
            List<ExpermintalModel> ModelList = new List<ExpermintalModel>();
            int Global_start_Time = Environment.TickCount;
            while (goOn == true)
            {
                ExpermintalModel currModel = new ExpermintalModel();
                int experment_start_Time = Environment.TickCount;
                ModelList.Add(currModel);
                currModel.m_model = (createModel(0, 0, minLeafNum)); // adding the main model
                for (int exclude_part = 0; exclude_part < split_data_to; exclude_part++)
                {   
                    Model_Tree curr_model = createModel(split_data_to, exclude_part, minLeafNum); // creating a model for each kind part of the data
                    currModel.m_ModelList.Add(curr_model);
                }
                calcExpermentModelScore(currModel);
                freeModelMem(currModel);

                currModel.m_calcualteTime = (Environment.TickCount - experment_start_Time)/1000;
                {
                    if (minLeafNum > 1000)
                        minLeafNum /= 2;
                    else
                        if (minLeafNum > 500)
                            minLeafNum -= 200;
                        else
                            if (minLeafNum > 250)
                                minLeafNum -= 50;
                            else
                                if (minLeafNum > 150)
                                    minLeafNum -= 50;
                                else
                                    if (minLeafNum > 100)
                                        minLeafNum -= 15;
                                    else

                                        if (minLeafNum > 50)
                                            minLeafNum -= 10;
                                        else
                                            minLeafNum -= 3;
                }
                if (minLeafNum <= 1)
                    goOn = false;
                double minleafratio = 1;
                if (minLeafNum != currModel.m_model.getMinLeafNum())
                {
                    minleafratio = (((double)currModel.m_model.getMinLeafNum() / (double)minLeafNum));
                }
                //Debug.Assert(minleafratio >= 1);
                if (((Environment.TickCount - Global_start_Time) / 1000) + minleafratio*currModel.m_calcualteTime > 5 * 60)
                {
                    goOn = false;
                }
            }
            set_best_model(ModelList); // classifing data rest of the data from each model and storing the best one int the this object
            return this;
        }

        private void freeModelMem(ExpermintalModel currModel)
        {
            //throw new NotImplementedException();
            currModel.m_ModelList = null;
        }




        private Model_Tree createModel(int split_data_to, int exclude_part, int minLeafNum) // this function taking care of extreme scenrios
        {
            Model_Tree model = null;
            Tree_Node root = null;
            model = new Model_Tree(null, split_data_to, exclude_part, minLeafNum);
            root = new Tree_Node();
            root.setModel(model);
            model.Set_Root(root);
            
            calc_stats(model, root); // calc stats for the root
            set_node_up(root); // decides one of the folliwing: its leaf, its attribute. set the right values int the root.
            expand_node(model,root); // grow childs to that attribute
            return model;
        }


        private void expand_node(Model_Tree model,Tree_Node node)
        {
            if (node == null) throw new Exception("canot expand null node");
            if (node.get_leaf_Val() != -1) return; // its a leaf, no need to expand

            for (int i = 0; i < node.get_child_number(); i++)
            {
                Tree_Node child = new Tree_Node();
                child.Set_father(node);

                ArrayList Att_in_use_child = new ArrayList();
                for (int k = 0; k < node.m_Att_in_use.Count; k++)
                {
                    Att_in_use_child.Add(node.m_Att_in_use[k]);
                }
                //Debug.Assert(Att_in_use_child.Count > 0);
                Att_in_use_child[Att_in_use_child.Count - 1] = new Chosen_Attribute(((Chosen_Attribute)Att_in_use_child[Att_in_use_child.Count - 1]).Get_att(), i);

                child.m_Att_in_use = Att_in_use_child;
                child.setModel(model);
                node.set_Child_at(i, child);

                update_ptr_list(model, child);
                calc_stats(model, child); // calc stats for the root
                set_node_up(child); // decides one of the folliwing: its leaf, its attribute. set the right values int the root.
                expand_node(model,child); // grow childs to that attribute
            }

        }

        private void update_ptr_list(Model_Tree model, Tree_Node child)
        {
            foreach (int x in child.Get_father().m_data_ptr_lst)
            {
                bool constrains = true;
                foreach (Chosen_Attribute C_att in child.m_Att_in_use)
                {
                    if (m_data[x, C_att.Get_att().get_index()] != C_att.get_value())
                    {
                        constrains = false;
                    }

                }
                if (constrains == true)
                    child.m_data_ptr_lst.Add(x);
            }

        }

        /// <summary>
        /// that functin setting the node up, deviding if its a leaf, or adding an attribute to it
        /// </summary>
        /// <param name="node">a node to set up</param>
        private void set_node_up(Tree_Node node)
        {
            // first trying to see if its a leaf
            if (node==null) throw new Exception("canot set up null node");
            List<Attribute_Stats> stats=node.m_stats;
            int non_Zero_index=-1;
            int num_non_Zero = 0;
            int max_Value=-1;
            int max_Value_index = -1;
            int num_of_obs = 0;
            Attribute_Stats classification = null;

            foreach(Attribute_Stats at_st in stats)
            {
                if (at_st.get_att().AttributeName.Equals("classification")) // checking if all the obs are pos or neg
                {
                    for (int i = 0; i < at_st.get_att().Num_of_vals; i++)
                    {
                        classification = at_st; // saving pointer to the classification for entropy later
                        num_of_obs += at_st.get_stats_at(i, i); // calculating the number of observation (for calculating the entropy later)
                        if (at_st.get_stats_at(i,i) != 0) // does all obs have the save calassification?
                        {
                            num_non_Zero++;
                            non_Zero_index = i;
                        }
                        if (at_st.get_stats_at(i,i) > max_Value) // calculating majorty for the option its a leaf
                        {
                            max_Value = at_st.get_stats_at(i,i);
                            max_Value_index = i;
                        }
                    }
                    break;
                }
            }
            //Debug.Assert(num_non_Zero > 0 || num_of_obs==0);
            if (num_non_Zero == 1) // this is a leaf: all with same value, 
            {
                //Debug.Assert(non_Zero_index != -1);
                node.set_leaf_Val(non_Zero_index);
                return;
            }

            //if no more attributes are left or minimumnumber of obs, retrun majorty.
            if (this.m_Attribute_List.Count() - 1 == node.m_Att_in_use.Count || num_of_obs < node.getModel().getMinLeafNum())
            {
                //Debug.Assert(max_Value_index != -1 || num_of_obs==0);
                //Debug.Assert(max_Value != -1 || num_of_obs == 0);
                if (num_of_obs == 0)// should take from father
                {
                    node.set_leaf_Val(get_major_from_father(node));
                }
                else
                {
                    node.set_leaf_Val(max_Value_index);
                }
                
                return;
            }
            // by now, i know i must choose an attribute.


            double entpropy_S = calc_entr(classification, num_of_obs); //calculate entropy
            Dictionary<Attribute, double> gain; //for each attribute calculate gain
            gain = calc_gain(entpropy_S,num_of_obs, stats);
            Attribute best_att;
            best_att = get_best_att(gain); // get the best attribute
            if (best_att == null)// it should be a leaf
            {
                node.set_leaf_Val(max_Value_index);
            }
            else
            {
                node.m_Att_in_use.Add(new Chosen_Attribute(best_att, -1)); // adding the attribute as used, with -1. it must be changed for each child
                node.set_Child_Num(); // this func creates the array in the node
            }

            

        }

        private int get_major_from_father(Tree_Node node)
        {
            List<Attribute_Stats> stats = node.m_stats;
            int max_Value = -1;
            int max_Value_index = -1;
            foreach (Attribute_Stats at_st in stats)
            {
                if (at_st.get_att().AttributeName.Equals("classification")) // checking if all the obs are pos or neg
                {
                    for (int i = 0; i < at_st.get_att().Num_of_vals; i++)
                    {
                        if (at_st.get_stats_at(i, i) > max_Value) // calculating majorty for the option its a leaf
                        {
                            max_Value = at_st.get_stats_at(i, i);
                            max_Value_index = i;
                        }
                    }
                    break;
                }
            }
            //Debug.Assert(max_Value != -1);
            return max_Value_index;
        }

        /// <summary>
        /// returning pointer to the maximal gain attribute
        /// </summary>
        /// <param name="gain">gain list for attributes that can be used</param>
        /// <returns>pointer to the maximal gain attribute</returns>
        private Attribute get_best_att(Dictionary<Attribute, double> gain)
        {
            Attribute ans = null;
            double value = double.MinValue;
            foreach (KeyValuePair<Attribute, double> entry in gain)
            {
                if (entry.Value > value) // this is a better attribute
                {
                    value = entry.Value;
                    ans = entry.Key;
                }
            }

            //Console.WriteLine(value);
            //Debug.Assert(ans != null);
            if (value < 0.03)
                return null;
            return ans;
        }

        /// <summary>
        /// using the list of statisitcs this func calculates the gain for each attribute
        /// </summary>
        /// <param name="stats">a list with statistics regarding all free attributes</param>
        /// <returns>dictonary made of pairs - attribute and its gain</returns>
        private Dictionary<Attribute, double> calc_gain(double entpropy_S, int num_of_obs,List<Attribute_Stats> stats)
        {
            Dictionary<Attribute, double> ans = new Dictionary<Attribute,double>();
            //Debug.Assert(num_of_obs != 0);

            foreach (Attribute_Stats stat in stats) // for each attribute
            {
                if (stat.get_att().AttributeName.Equals("classification")) // but not for the classification
                {
                    break;
                }
                double gain = 0; // its gain
                gain += entpropy_S;
                
                // calc number of obs in leaf
                for (int num_of_att_val = 0; num_of_att_val < stat.get_att().Num_of_vals; num_of_att_val++) // for each value of this attribute
                {
                    
                    int num_obs_in_leaf = 0;
                    for (int num_of_ans_val = 0; num_of_ans_val < get_num_of_vals_in_ans_domain(); num_of_ans_val++) // for each value in ans domain
                    {
                        num_obs_in_leaf += stat.get_stats_at(num_of_att_val, num_of_ans_val);
                    }
                    double ratio = ((double)num_obs_in_leaf) / ((double)num_of_obs);
                    
                    double entro = 0;

                    for (int num_of_ans_val = 0; num_of_ans_val < get_num_of_vals_in_ans_domain(); num_of_ans_val++) // for each value in ans domain
                    {
                        double curr_stats = (double)stat.get_stats_at(num_of_att_val, num_of_ans_val);
                        double px;
                        double logpx;
                        if (num_obs_in_leaf == 0 || curr_stats==0)
                        {
                            px = 0;
                            logpx = 0;
                        }
                        else
                        {
                            px = curr_stats / (double)num_obs_in_leaf;
                            logpx = Math.Log(px, m_logBase);
                        }

                        entro += (-1) * px * logpx;

                    }
                    
                    gain -= ratio * entro;
                    num_obs_in_leaf = 0;
                    if (gain < 0 && gain > -0.00001)
                        gain = 0;
                    //Debug.Assert(gain >= 0); 
                }

                ans.Add(stat.get_att(), gain);
            }
            return ans;
        }

        int get_num_of_vals_in_ans_domain()
        {
            return this.m_Attribute_List[this.m_Attribute_List.Count() - 1].Num_of_vals;
        }

        /// <summary>
        /// calculating antorpy for the classification
        /// </summary>
        /// <param name="classification"> statistics about classification</param>
        /// <param name="num_of_obs">number of elements in group S</param>
        /// <returns>the entropy</returns>
        private double calc_entr(Attribute_Stats classification, int num_of_obs)
        {
            //Debug.Assert(num_of_obs != 0);
            double ans = 0;
            
            for (int i = 0; i < classification.get_att().Num_of_vals; i++) // calculating intropy
            {
                double px = (double)classification.get_stats_at(i, i) / (double)num_of_obs;
                double logpx;
                if (px == 0)
                {
                    logpx = 0;
                }
                else
                {
                    logpx = Math.Log(px, m_logBase);
                }
                ans += (-1) * px * logpx;
                
            }
            
            //Debug.Assert(ans >= 0 && ans <= 1);
            return ans;
        }

        /// <summary>
        /// this function is calculating the stats for all the unused attributes, while taking into considiration
        /// all the value assigined to the used attributes
        /// </summary>
        /// <param name="model">the input model - for the info regarding the data slice</param>
        /// <param name="node">the node to calc the stats (and stor in)</param>
        void calc_stats(Model_Tree model,Tree_Node node) 
        {
            if (node == null) throw new Exception("cannot calculate null node");
            if (node.get_leaf_Val() != -1) throw new Exception("this is a leaf node");
            if (node.m_stats != null) throw new Exception("stats already calculated");
            List<Attribute_Stats> stats = new List<Attribute_Stats>();
            ArrayList Att_in_use=node.m_Att_in_use;
            int classification_Domain = get_num_of_vals_in_ans_domain();

            foreach (Attribute att in this.m_Attribute_List) // creating a list of attributes that need stats
            {
                bool toadd = true;
                foreach (Chosen_Attribute ca in Att_in_use)
                {
                    if (ca.Get_att().AttributeName.Equals(att.AttributeName)) toadd = false;
                }
                if (toadd==true)
                    stats.Add(new Attribute_Stats(att, classification_Domain));
            }
            //Debug.Assert(stats.Count > 0);

            int lines = this.m_data.GetLength(0);
            int rows = this.m_data.GetLength(1);
            int split_factor = model.get_Split_data();
            int exclude_part = model.get_Exclude_Part();
            int classificationPos = this.m_data.GetLength(1)-1;
            bool constarins;
            int classification;

            int[] chosen_attributes_array = new int[rows]; // array for attributes
            for (int i = 0; i < rows; i++)
                chosen_attributes_array[i] = -1;
            foreach (Chosen_Attribute ca in Att_in_use) // iterating through constrains
            {
                chosen_attributes_array[ca.Get_att().get_index()] = ca.get_value();
            }
            int lastAtt_in_use = -1; // the maximum number in the inner loop
            for (int i = rows-1; i > -1; i--)
            {
                if (chosen_attributes_array[i] != -1)
                {
                    lastAtt_in_use = i;
                    break;
                }      
            }
            if (node.Get_father()==null) // only iterate them all for the root node
            {
                for (int i = 0; i < lines; i++) // counting number of instances for each value
                {
                    
                    if (split_factor != 0 && i % split_factor == exclude_part) // taking into account only( 1 to split_factor - 1) part of the data
                        continue;
                    node.m_data_ptr_lst.Add(i);
                    constarins = true;
                    for (int j = 0; constarins == true && j <= lastAtt_in_use; j++) // iterating through constrains
                    {
                        if (chosen_attributes_array[j] != -1)
                        {
                            if (m_data[i, j] != chosen_attributes_array[j])
                            {
                                constarins = false;
                                break;
                            }
                        }

                    }
                    if (constarins == true) // this line (obs) should be updated
                    {
                        classification = m_data[i, classificationPos];
                        foreach (Attribute_Stats at_sta in stats)
                        {
                            at_sta.m_domain_Stats[m_data[i, at_sta.get_att().get_index()], classification]++;
                        }
                    }

                }
            }
            else // this is not a root node, iterate through list
            {
                foreach (int x in node.m_data_ptr_lst)
                {
                    //Debug.Assert(split_factor ==0 || x % split_factor != exclude_part); // it was already excluded
                        classification = m_data[x, classificationPos];
                        foreach (Attribute_Stats at_sta in stats)
                        {
                            at_sta.m_domain_Stats[m_data[x, at_sta.get_att().get_index()], classification]++;
                        }
                }
            }
            node.m_stats = stats;
        }

        private void set_best_model(List<ExpermintalModel> ModelList)
        {
            //Debug.Assert(ModelList!=null);
            //Debug.Assert(ModelList.Count>0);
            double bestScore = double.MinValue;
            ExpermintalModel bestModel = null;
            /*
            foreach (ExpermintalModel expModel in ModelList) // calculating scores
            {
                calcExpermentModelScore(expModel);
            }
             */

            foreach (ExpermintalModel expModel in ModelList) // calculating scores
            {
                if (expModel.getScore() > bestScore)
                {
                    bestScore = expModel.getScore();
                    bestModel = expModel;
                }

                //Console.Write(expModel.m_model.getMinLeafNum() + " ,");
                //Console.WriteLine(expModel.getScore());

            }
            //Debug.Assert(bestModel != null);
            this.m_model = bestModel.m_model;


        }

        private void calcExpermentModelScore(ExpermintalModel expModel)
        {
            List<Model_Tree> exp_models = expModel.m_ModelList;
            double ScoreSum = 0;
            double ScoreNum = 0;

            foreach (Model_Tree curr_model in exp_models) // for each model in list
            {
                double curr_score = 0;
                curr_score = evaluate_model(curr_model); ///calculate its score
                ScoreSum += curr_score;
                ScoreNum++;
            }

            //Debug.Assert(ScoreNum > 0);
            //Debug.Assert(ScoreSum > 0);
            expModel.setScore(ScoreSum / ScoreNum);
            //Console.Write("minLeaf: ");
            //Console.Write( exp_models[0].getMinLeafNum().ToString());
            //Console.Write(" score:");
            //Console.WriteLine((ScoreSum / ScoreNum));
        }

        private double evaluate_model(Model_Tree curr_model)
        {
            if (curr_model.get_Split_data() == 0) throw new Exception("dont evaluate the main model");
            double ans = 0;
            int num_of_hits=0;
            int num_of_miss=0;

            int lines = this.m_data.GetLength(0);
            int rows = this.m_data.GetLength(1);
            int split_factor = curr_model.get_Split_data();
            int exclude_part = curr_model.get_Exclude_Part();

            for (int i = 0; i < lines; i++) // counting number of instances for each value
            {
                if (i % split_factor != exclude_part) // taking into account only the lines not used in model
                {
                    continue;
                }
                bool curr_score = false;
                curr_score = test_model_with_line(curr_model, i);
                if (curr_score == true)
                {
                    num_of_hits++;

                }
                else
                {
                    num_of_miss++;
                }
                // this line should be evaluated
            }
            if(((double)num_of_hits + (double)num_of_miss )!= 0)
                ans = (double)num_of_hits / ((double)num_of_hits + (double)num_of_miss);
            //Console.WriteLine(ans);
            return ans;
        }

        private bool test_model_with_line(Model_Tree curr_model, int i)
        {
            Tree_Node curr_node = curr_model.Get_Root();
            bool ans = false;

            while (curr_node.get_leaf_Val() == -1) // while its not a leaf
            {
                int curr_att_index = ((Chosen_Attribute)curr_node.m_Att_in_use[curr_node.m_Att_in_use.Count - 1]).Get_att().get_index();
                curr_node = curr_node.get_Child_at(m_data[i, curr_att_index]); // go down to the right child
            }

            if (m_data[i, m_data.GetLength(1) - 1] == curr_node.get_leaf_Val()) // is the answer is right?
            {
                ans = true;
            }
            else
            {
                ans = false;
            }
            return ans;
        }


        public int classify(ushort[] values)
        {
            Tree_Node curr_node = this.m_model.Get_Root();
            int ans = -1;

            while (curr_node.get_leaf_Val() == -1) // while its not a leaf
            {
                int curr_att_index = ((Chosen_Attribute)curr_node.m_Att_in_use[curr_node.m_Att_in_use.Count - 1]).Get_att().get_index();
                curr_node = curr_node.get_Child_at((int)values[curr_att_index]); // go down to the right child
            }

            ans = curr_node.get_leaf_Val();

            return ans;
        }
    }
}
