﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.IO;
using MathNet.Numerics.Random;
using MachineLearning;
using PorterStemmerAlgorithm;
using CPAIS.Updater;

namespace CPAIS.Algorithm
{
     public class HLDAFixedTree : HLDAGibbs
    {
        public new HLDATopicFixedTree root { get; set; }

        public override void InitPath()
        {
            //create a binary tree of fixed depth
            int depth = Global.maxLevel;
            root = new HLDATopicFixedTree();
            root.Split(depth);
            foreach (HLDADoc doc in documents)
            {
                List<HLDATopic> path = new List<HLDATopic>();
                HLDATopic next = root;
                path.Add(next);
                next.customers++;
                for (int l = 1; l < Global.maxLevel; l++)
                {
                    //next = next.children[Global.random.Next(0, 2)];
                    next = next.SampleChild();
                    path.Add(next);
                    next.customers++;
                }
                doc.path = path;
            }
            Console.WriteLine("Tree size\t{0}", root.CountTree());
        }
        
        public void WriteTopicToFile(string file)
        {
            StreamWriter sw = new StreamWriter(file);
            sw.WriteLine("Tree size\t{0}", root.CountTree());
            sw.WriteLine("Level\tChildren\tCustomers\tProbability\tWords");
            root.WriteTopWords("", vocabularyIndex, sw);
            sw.Close();
        }
        
        protected override void SamplePath(HLDADoc doc)
        {
            //doc.UnassignPath();
            foreach (HLDATopic t in doc.path)
            {
                t.customers--;
            }
            foreach (HLDAWord word in doc.words)
            {
                word.UnassignTopic();
            }
            //initialize
            root.CalculatePathToLeaves(doc.f);

            //get the paths to leaves only
            List<HLDATopic> all = new List<HLDATopic>();
            root.GetLeaves(all);
            HLDATopic path = ChoosePath(all);
            doc.AssignPath(path);
        }

        /*
        static void Main(string[] args)
        {
            HLDAFixedTree corpus = new HLDAFixedTree();

            #region Add Documents
            string docFile = @"..\Books Reviews.txt";
            Regex regex = new Regex("[^a-zA-Z]");
            PorterStemmer porterStemmer = new PorterStemmer();
            StreamReader sr = new StreamReader(docFile);
            string line = sr.ReadLine();
            line = sr.ReadLine();
            while (line != null)
            {
                string[] tokens = line.Split('\t');
                HldaDoc doc = new HldaDoc(string.Format("{0}_{1}", tokens[0], tokens[1]), Global.maxLevel);
                int summaryIndex = tokens.Length - 2;
                int textIndex = tokens.Length - 1;
                string longString = string.Format("{0} {1}", tokens[summaryIndex], tokens[textIndex]);
                string[] words = regex.Split(longString);
                foreach (string word in words)
                {
                    if (word.Equals("")) continue;
                    doc.AddWord(word);
                }
                corpus.AddDocument(doc);
                line = sr.ReadLine();
            }
            sr.Close();
            #endregion

            corpus.StartProcessing(100);

            //now we should find out whether the algorithm work
            //print out the topic hierarchy

            Console.WriteLine("Tree size\t{0}", corpus.root.CountTree());
            Console.WriteLine("Level\tChildren\tCustomers\tProbability\tWords");
            corpus.root.PrintTopWords("", corpus.vocabularyIndex);
        }
        */
    }
     
     public class HLDAGibbs
     {
            public int numTopic { get; set; }
            public HLDATopic root { get; set; }
            public List<HLDADoc> documents { get; set; }
            public List<string> vocabularyIndex { get; set; }
            public Dictionary<string, int> vocabulary { get; set; }

            public HLDAGibbs()
            {
                root = null;
                documents = new List<HLDADoc>();
                vocabulary = new Dictionary<string, int>();
                vocabularyIndex = new List<string>();
            }

        /*
            static void Main(string[] args)
            {
                HLDAGibbs corpus = new HLDAGibbs();

                corpus.ReadFromDirectory(@"..\..\HLDA\Corpus");
                corpus.StartProcessing(1000);
                //now we should find out whether the algorithm work
                //print out the topic hierarchy
                corpus.WriteTopicToFile(@"..\..\HLDA\topic.txt");
            }*/

            public int AddWord(string word)
            {
                if (!vocabulary.ContainsKey(word))
                {
                    vocabulary[word] = vocabulary.Count;
                    vocabularyIndex.Add(word);
                }
                return vocabulary[word];
            }

            public void AddDocument(HLDADoc doc)
            {
                foreach (HLDAWord w in doc.words)
                {
                    w.index = AddWord(w.text);
                }
                documents.Add(doc);
            }

            public virtual void InitPath()
            {
                root = new HLDATopic();
                foreach (HLDADoc doc in documents)
                {
                    List<HLDATopic> path = new List<HLDATopic>();
                    HLDATopic next = root;
                    path.Add(next);
                    next.customers++;
                    for (int l = 1; l < Global.maxLevel; l++)
                    {
                        next = next.SampleChild();
                        path.Add(next);
                        next.customers++;
                    }
                    doc.path = path;
                }
                Console.WriteLine("Tree size\t{0}", root.CountTree());
            }

            public void InitWordTopics()
            {
                foreach (HLDADoc doc in documents)
                {
                    doc.f = new int[Global.maxLevel, Global.vocabSize];
                    for (int t = 0; t < Global.maxLevel; t++)
                    {
                        for (int v = 0; v < Global.vocabSize; v++)
                        {
                            doc.f[t, v] = 0;
                        }
                    }
                    List<HLDATopic> path = doc.path;
                    foreach (HLDAWord word in doc.words)
                    {
                        int p = Global.random.Next(0, path.Count);
                        doc.f[p, word.index]++;
                        word.AssignTopic(path[p]);
                        doc.topicCount[p]++;
                    }
                    for (int k = 0; k < Global.maxLevel; k++)
                    {
                        double tmp = (Global.mpi + doc.topicCount[k]) / (Global.pi + doc.words.Count - 1);
                        doc._stickLength[k] = Math.Log(tmp);
                        doc._stickRemaining[k] = Math.Log(1 - tmp);
                    }
                }
            }

            protected HLDATopic ChoosePath(List<HLDATopic> all)
            {
                double[] logP = new double[all.Count];
                double r = Global.random.NextDouble();
                for (int i = 0; i < logP.Length; i++)
                {
                    logP[i] = all[i].ncrp + all[i].weights;
                }
                int selected = Sampling.SampleLog(logP, r);
                return all[selected];
            }

            protected virtual void SamplePath(HLDADoc doc)
            {
                doc.UnassignPath();
                //initialize

                root.CalculatePathToLeaves(doc.f);
                root.CalculatePathToInternalNodes(doc.f);

                //get the full list of topics
                List<HLDATopic> all = new List<HLDATopic>();
                root.GetAllTopics(all);
                HLDATopic path = ChoosePath(all);
                doc.AssignPath(path);
            }

            protected void SampleLevel(HLDADoc doc)
            {
                foreach (HLDAWord w in doc.words)
                {
                    w.SampleLevel(doc);
                }
            }

            protected void GibbsSampling()
            {
                for (int i = 0; i < documents.Count; i++)
                {
                    //Console.WriteLine("Documents:\t{0}/{1}", i, documents.Count);
                    SamplePath(documents[i]);
                    SampleLevel(documents[i]);
                }
            }

            public void StartProcessing(int iterations, ProgressUpdater updater)
            {
                Global.vocabSize = vocabulary.Count;
                Global.Veta = Global.vocabSize * Global.eta;
                InitPath();
                InitWordTopics();
                if (updater != null)
                    updater.UpdateMessage("Gibbs sampling...");
                for (int i = 0; i < iterations; i++)
                {
                    if (updater != null)
                    {
                        updater.UpdateProgress((double)(i + 1) / iterations);
                    }

                    Console.WriteLine("Iteration\t{0}\t{1}", i, iterations);
                    GibbsSampling();
                }
                if (updater != null)
                    updater.UpdateMessage("Calcaulating result...");
            }

            public string WriteTopicToString()
            {
                StringBuilder sb = new StringBuilder();
                sb.Append("Tree size:  ");
                sb.Append(root.CountTree());
                sb.AppendLine();

                sb.Append("Level\tChildren\tCustomers\tProbability\tWords");
                sb.AppendLine();
                foreach (string sz in vocabularyIndex)
                {
                    sb.Append(sz);
                    sb.AppendLine();
                }
                return sb.ToString();
            }

            public double GetWordProb(HLDATopic hTopic, string szWord)
            {
                int nIndex = vocabulary[szWord];
                return  hTopic.GetWordProb(nIndex);
            }

            public void ImportLogModel(Model.LogModel logModel)
            {
                foreach (Model.Trace trace in logModel.Traces)
                {
                    HLDADoc doc = new HLDADoc(trace.ID, Global.maxLevel);
                    foreach(Model.ClinicalEvent e in trace.Actions)
                    {
                        doc.AddWord(e.TypeName + " : " + e.Day.ToString());
                    }
                    AddDocument(doc);
                }
            }
            public HLDADoc GetDocument(string szID)
            {
                foreach (var doc in documents)
                {
                    if (doc.title.Equals(szID))
                        return doc;
                }
                return null;
            }
    }
}
