﻿using Common;
using Linguist.Acoustic;
using Linguist.Dictionary;
using Linguist.Language.NGram;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using Util;

namespace Linguist.LexTree
{
    /// <summary>
    /// Represents the vocabulary as a lex tree with nodes in the tree representing either words (WordNode) or units
    /// (HMMNode). HMMNodes may be shared.
    /// </summary>
    public class HMMTree
    {
        public  HMMPool hmmPool;
        public InitialWordNode initialNode;
        public  IDictionary dictionary;

        private ILanguageModel lm;
        private Boolean addFillerWords;
        private Boolean addSilenceWord = true;
        public List<IUnit> entryPoints = new List<IUnit>();
        public List<IUnit> exitPoints = new List<IUnit>();
        private List<IWord> allWords;
        private EntryPointTable entryPointTable;
        private Boolean debug;
        private float languageWeight;
        private Dictionary<Object, HMMNode[]> endNodeMap;
        public  WordNode sentenceEndWordNode;


        /**
        /// Creates the HMMTree
         *
        /// @param pool           the pool of HMMs and units
        /// @param dictionary     the dictionary containing the pronunciations
        /// @param lm             the source of the set of words to add to the lex tree
        /// @param addFillerWords if <code>false</code> add filler words
        /// @param languageWeight the languageWeight
         */
        public HMMTree(HMMPool pool, IDictionary dictionary, ILanguageModel lm,
                Boolean addFillerWords, float languageWeight) 
        {
            this.hmmPool = pool;
            this.dictionary = dictionary;
            this.lm = lm;
            this.endNodeMap = new Dictionary<Object, HMMNode[]>();
            this.addFillerWords = addFillerWords;
            this.languageWeight = languageWeight;
        
            TimerPool.getTimer(this,"Create HMM Tree").start();
            compile();
            TimerPool.getTimer(this,"Create HMM Tree").stop();
        }


        /**
        /// Given a base unit and a left context, return the set of entry points into the lex tree
         *
        /// @param lc   the left context
        /// @param base the center unit
        /// @return the set of entry points
         */
        public Node[] getEntryPoint(IUnit lc, IUnit _base) 
        {
            EntryPoint ep = entryPointTable.getEntryPoint(_base);
            return ep.getEntryPointsFromLeftContext(lc).getSuccessors();
        }


        /**
        /// Gets the  set of hmm nodes associated with the given end node
         *
        /// @param endNode the end node
        /// @return an array of associated hmm nodes
         */
        public HMMNode[] getHMMNodes(EndNode endNode) 
        {
            HMMNode[] results = endNodeMap[endNode.getKey()];
            if (results == null) 
            {
                // System.out.println("Filling cache for " + endNode.getKey()
                //        + " size " + endNodeMap.size());
                Dictionary<IHMM, HMMNode> resultMap = new Dictionary<IHMM, HMMNode>();
                IUnit baseUnit = endNode.getBaseUnit();
                IUnit lc = endNode.getLeftContext();
                foreach (IUnit rc in entryPoints) 
                {
                    IHMM hmm = hmmPool.getHMM(baseUnit, lc, rc, HMMPosition.END);
                    HMMNode hmmNode = resultMap[hmm];
                    if (hmmNode == null) 
                    {
                        hmmNode = new HMMNode(hmm, LogMath.LOG_ONE);
                        resultMap.Add(hmm, hmmNode);
                    }
                    hmmNode.addRC(rc);
                    foreach (Node node in endNode.getSuccessors()) 
                    {
                        WordNode wordNode = (WordNode)node;
                        hmmNode.addSuccessor(wordNode);
                    }
                }

                // cache it
                results = resultMap.Values.ToArray();
                endNodeMap.Add(endNode.getKey(), results);
            }

            // System.out.println("GHN: " + endNode + " " + results.length);
            return results;
        }


        /**
        /// Returns the word node associated with the sentence end word
         *
        /// @return the sentence end word node
         */
        public WordNode getSentenceEndWordNode() 
        {
            Trace.Assert(sentenceEndWordNode != null);
            return sentenceEndWordNode;
        }


    //    private Object getKey(EndNode endNode) {
    //        Unit base = endNode.getBaseUnit();
    //        Unit lc = endNode.getLeftContext();
    //        return null;
    //    }


        /** Compiles the vocabulary into an HMM Tree */
        private void compile() 
        {
            collectEntryAndExitUnits();
            entryPointTable = new EntryPointTable(entryPoints,this);
            addWords();
            entryPointTable.createEntryPointMaps();
            freeze();
        }


        /** Dumps the tree */
        void dumpTree() 
        {
            Trace.WriteLine("Dumping Tree ...");
            Dictionary<Node, Node> dupNode = new Dictionary<Node, Node>();
            dumpTree(0, getInitialNode(), dupNode);
            Trace.WriteLine("... done Dumping Tree");
        }


        /**
        /// Dumps the tree
         *
        /// @param level   the level of the dump
        /// @param node    the root of the tree to dump
        /// @param dupNode map of visited nodes
         */
        private void dumpTree(int level, Node node, Dictionary<Node, Node> dupNode) 
        {
            if (dupNode[node] == null) 
            {
                dupNode.Add(node, node);
                Trace.WriteLine(Utilities.pad(level) + node);
                if (!(node is WordNode)) 
                {
                    foreach (Node nextNode in node.getSuccessors()) 
                    {
                        dumpTree(level + 1, nextNode, dupNode);
                    }
                }
            }
        }


        /// <summary>
        /// Collects all of the entry and exit points for the vocabulary.
        /// </summary>
        private void collectEntryAndExitUnits() 
        {
            foreach (IWord word in getAllWords()) 
            {
                foreach (IPronunciation p in word.getPronunciations())
                {
                    entryPoints.Add(p.getUnits().First());
                    exitPoints.Add(p.getUnits().Last());
                }
            }

            if (debug) {
                Trace.WriteLine("Entry Points: " + entryPoints.Count);
                Trace.WriteLine("Exit Points: " + exitPoints.Count);
            }
        }


        /**
        /// Called after the lex tree is built. Frees all temporary structures. After this is called, no more words can be
        /// added to the lex tree.
         */
        private void freeze() 
        {
            entryPointTable.freeze();
            dictionary = null;
            lm = null;
            exitPoints = null;
            allWords = null;
        }


        /// <summary>
        /// Adds the given collection of words to the lex tree
        /// </summary>
        private void addWords() 
        {
            foreach (IWord word in getAllWords()) 
            {
                addWord(word);
            }
        }


        /**
        /// Adds a single word to the lex tree
         *
        /// @param word the word to add
         */
        private void addWord(IWord word) 
        {
            float prob = getWordUnigramProbability(word);
            foreach (IPronunciation pronunciation in word.getPronunciations()) 
            {
                addPronunciation(pronunciation, prob);
            }
        }


        /**
        /// Adds the given pronunciation to the lex tree
         *
        /// @param pronunciation the pronunciation
        /// @param probability   the unigram probability
         */
        private void addPronunciation(IPronunciation pronunciation,float probability) 
        {
            IUnit baseUnit;
            IUnit lc;
            IUnit rc;
            Node curNode;
            WordNode wordNode;

            IUnit[] units = pronunciation.getUnits();
            baseUnit = units[0];
            EntryPoint ep = entryPointTable.getEntryPoint(baseUnit);

            ep.TotalProbability = probability;

            if (units.Length > 1) 
            {
                curNode = ep.BaseNode;
                lc = baseUnit;
                for (int i = 1; i < units.Length - 1; i++) 
                {
                    baseUnit = units[i];
                    rc = units[i + 1];
                    IHMM hmm = hmmPool.getHMM(baseUnit, lc, rc, HMMPosition.INTERNAL);
                    if (hmm == null) 
                    {
                        if(debug)
                            Trace.TraceError("Missing HMM for unit " + baseUnit.getName() + " with lc=" + lc.getName() + " rc=" + rc.getName());
                    } 
                    else {
                        curNode = curNode.addSuccessor(hmm, probability);
                    }
                    lc = baseUnit;          // next lc is this baseUnit
                }

                // now add the last unit as an end unit
                baseUnit = units[units.Length - 1];
                EndNode endNode = new EndNode(baseUnit, lc, probability);
                curNode = curNode.addSuccessor(endNode, probability);
                wordNode = curNode.addSuccessor(pronunciation, probability);
                if (wordNode.getWord().isSentenceEndWord()) {
                    sentenceEndWordNode = wordNode;
                }
            } else {
                ep.addSingleUnitWord(pronunciation);
            }
        }

    
        /**
        /// Gets the unigram probability for the given word
         *
        /// @param word the word
        /// @return the unigram probability for the word.
         */
        public float getWordUnigramProbability(IWord word) 
        {
            float prob = LogMath.LOG_ONE;
            if (!word.isFiller()) 
            {
                IWord[] wordArray = new IWord[1];
                wordArray[0] = word;
                prob = lm.getProbability((new WordSequence(wordArray)));
                // System.out.println("gwup: " + word + " " + prob);
                prob *= languageWeight;
            }
            return prob;
        }


        /// <summary>
        /// Returns the entire set of words, including filler words
        /// </summary>
        /// <returns>the set of all words (as Word objects)</returns>
        private List<IWord> getAllWords() 
        {
            if (allWords == null) 
            {
                allWords = new List<IWord>();
                foreach (String spelling in lm.getVocabulary()) 
                {
                    IWord word = dictionary.getWord(spelling);
                    if (word != null) 
                    {
                        allWords.Add(word);
                    }
                }

                if (addFillerWords) 
                {
                    allWords.AddRange(dictionary.getFillerWords());
                } 
                else if (addSilenceWord) 
                {
                    allWords.Add(dictionary.getSilenceWord());
                }
            }
            return allWords;
        }

        /**
        /// Returns the initial node for this lex tree
         *
        /// @return the initial lex node
         */
        public InitialWordNode getInitialNode() {
            return initialNode;
        }


    }
}
