﻿using System;
using System.Collections;
using System.Data;
using System.Linq;
using System.Text.RegularExpressions;
using DGen;
using System.Diagnostics;
using NLog;

namespace Tab2Sql
{


    /// <summary>
    /// The WordStats class is intended to analyse a list of words to determine (for example) how many duplicates exist within list, which are the most common of the elements and so on.
    /// 
    /// </summary>
    internal class WordStats
    {
         private static Logger logger = LogManager.GetCurrentClassLogger ();
        private readonly Hashtable stemmedWordsHash = new Hashtable();
        private readonly Hashtable wordsHash = new Hashtable();
        public int charCount;

        /// <summary>
        /// Public fields.
        /// </summary>
        /// 
        public string EndString
        {
            get { return PrettifyStats(); }
   
        }
        readonly string wordBoundary;
        readonly bool useSpecialBoundary;

        //private fields
        //HACK : hard coded values
        private string[] wipArray = new string[10000];

        /// <summary>
        /// count Of word
        /// </summary>
        public int wordCount;
        public long GlobalTimer;
        public int globalCalculationCount;


        public ArrayList myArray;

        /// <summary>
        /// default constructor.
        /// </summary>
        public WordStats()
        {
            useSpecialBoundary = false;

           // generateStats(testString);
        }
        public WordStats(string inString,string customBoundary)
        {
            myArray = new ArrayList(inString.Split('\n') as ICollection);
            myArray.Sort();
            GlobalTimer = 0;
            wordBoundary = customBoundary;
            useSpecialBoundary = true;
            generateStats();
        }

        public WordStats (string inString)
        {
            myArray = new ArrayList (inString.Split ('\n') as ICollection);
             wordBoundary= @"[^'\b]";
             generateStats(inString);
        }

        /// <summary>
        /// returns stats as text block.
        /// </summary>
        /// <returns></returns>
        public string PrettifyStats()
        {

            System.Text.StringBuilder meh = new System.Text.StringBuilder(stemmedWordsHash.Count * 50);
            meh.AppendLine(charCount.ToString() + " chars");
            meh.AppendLine(wordCount.ToString() + " words");
            //meh.AppendLine(globalCalculationCount.ToString() + " calculations");
            var dt = new DataTable();
            dt.Columns.Add("Word");
            dt.Columns.Add("Occurs", Type.GetType("System.Int32"));
            dt.BeginLoadData();
            foreach (DictionaryEntry word in stemmedWordsHash)
            {
                dt.Rows.Add(word.Key.ToString(), word.Value);
                
            }
            dt.EndLoadData();
            dt.AcceptChanges();
            DataTable postSort = dt.Clone();
            postSort.BeginLoadData();
            foreach (DataRow dr in  dt.Select("1=1", "Occurs DESC"))
            {
                postSort.ImportRow(dr);
                meh.Append(dr[0].ToString().PadRight(50));
                meh.AppendLine(dr[1].ToString());
            }


            return meh.ToString();
        }

        public ICollection SortedHashTable(Hashtable ht)
        {
            var sorter = new ArrayList();
            sorter.AddRange(ht);
            sorter.Sort(1, ht.Count, null);
            return sorter;
        }

        private void generateStats (string input)
        {
            try
            {
                charCount = input.Length;
                WordAnalysis ();
            }
            catch (Exception ex)
            {
                logger.Error(ex.Message);
            }
        }

        private void generateStats ()
        {
            try
            {
                charCount = myArray.ToArray ().Length;
                WordAnalysis ();
            }
            catch (Exception ex)
            {
                logger.Error (ex.Message);
            }

        }



        //private string[] DoSplit(string Start)
        //{
        //    // Regex search and replace

        //    RegexOptions options = RegexOptions.IgnoreCase | RegexOptions.Multiline |
        //                           RegexOptions.IgnorePatternWhitespace;
        //    var regex = new Regex(wordBoundary, options);

        //    string[] myArray = regex.Split(Start);

        //    int length = myArray.Count();

        //    var myArray2 = new string[length];
        //    int x = 0;
        //    for (int i = 0; i < length; i++)
        //    {
        //        if (myArray[i].Trim().Length > 0)
        //        {
        //            myArray2[x] = myArray[i].Trim();
        //            x++;
        //        }
        //    }
        //    var myArray3 = new string[x];
        //    myArray3 = (String[]) DGen.DGen.ResizeArray(myArray2, x);

        //    return myArray3;
        //}

        /// <summary>
        /// Main process.
        /// </summary>
        public void WordAnalysis ()
        {
            if (myArray == null)
            {
                myArray = new ArrayList ();
                myArray.Add ("MyArrayIsBlankFuckWit");
                logger.Error ("WordAnalysis has been called and myArray is null.");


            }

            var doc = new Document(myArray,useSpecialBoundary);
            int localCounter = doc.GlobalCounter;
            localCounter = localCounter + 1;
            localCounter--;

            string[] stopWords = getBadWords();
            int wordCountStatic = doc.Words.Count;

            for (int i = 0; i <wordCountStatic; ++i)
            {
                // normalize each word to lowercase
                string key = doc.Words[i].ToLower();
                
                if (!stopWords.Contains (key))
                {
                    //either add to hash or increment frequency
                    if (!stemmedWordsHash.Contains(key))
                    {
                        //add new word
                        stemmedWordsHash.Add(key, 1);
                        wordsHash.Add(key, key);
                    }
                    else
                    {
                        //increment word count
                        stemmedWordsHash[key] = (int)stemmedWordsHash[key] + 1;
                    }
                }

                if (i > 0 && (i % 100 == 0))
                {
                    doc.stopClock.Stop();
                    GlobalTimer += doc.stopClock.ElapsedMilliseconds;
                    Trace.TraceInformation("{0} Stemmed Words Complete - {1} ms. {2} total", new object[] {i,doc.stopClock.ElapsedMilliseconds,GlobalTimer});
                    doc.stopClock.Reset();
                    doc.stopClock.Start();
                }



            }
            wordCount = doc.Words.Count;
            localCounter = doc.GlobalCounter;
            localCounter = localCounter + 1;
            localCounter--;
            globalCalculationCount = localCounter;
        }

        public static string[] getBadWords ()
        {
            // Top 100 odd most common English words of four characters or less.
            // Originally from: http://ucrel.lancs.ac.uk/bncfreq/lists/2_3_writtenspoken.txt
            // But subject to considerable fiddling to produce below list.
            string[] stopWords = {
                "a", "all", "also", "an", "and", "any", "are", "as", "as", "at",
            "back", "be", "been", "both", "but", "by", "came", "can", "case", "come",
            "day", "did", "do", "does", "down", "each", "end", "even", "few", "find",
            "for", "from", "get", "give", "go", "good", "got", "had", "has", "have",
            "he", "her", "here", "high", "him", "his", "how", "i", "if", "in",
            "into", "is", "it", "its", "just", "know", "last", "life", "like", "made",
            "make", "man", "many", "may", "me", "men", "more", "most", "mr", "much",
            "must", "my", "new", "next", "no", "not", "now", "of", "off", "old",
            "on", "one", "only", "or", "our",
            "out", "over", "own", "part", "put",
            "said", "same", "say", "see", "seen",
            "she", "so", "some", "such", "take",
            "than", "that", "that", "the", "them",
            "then", "they", "this", "time", "to",
            "too", "took", "two", "up", "us",
            "used", "very", "want", "was", "way",
            "we", "well", "went", "were", "what",
            "when", "who", "why", "will", "with",
            "work", "year", "you", "your"
            };







            return stopWords;
        }
    }
}