﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;

namespace myAC.PreProcessing.DocLevel
{
    public class TextHelperWenjin
    {
        public static string EnglishStopwords =
            "x,i,me,my,myself,we,our,ours,ourselves,you,your,yours,yourself,yourselves,he,him,his,himself,she,her,hers,herself,it,its,itself,they,them,their,theirs,themselves,what,which,who,whom,this,that,these,those,am,is,are,was,were,be,been,being,have,has,had,having,do,does,did,doing,would,should,could,ought,i'm,you're,he's,she's,it's,we're,they're,i've,you've,we've,they've,i'd,you'd,he'd,she'd,we'd,they'd,i'll,you'll,he'll,she'll,we'll,they'll,isn't,aren't,wasn't,weren't,hasn't,haven't,hadn't,doesn't,don't,didn't,won't,wouldn't,shan't,shouldn't,can't,cannot,couldn't,mustn't,let's,that's,who's,what's,here's,there's,when's,where's,why's,how's,a,an,the,and,but,if,or,because,as,until,while,of,at,by,for,with,about,against,between,into,through,during,before,after,above,below,to,from,up,down,in,out,on,off,over,under,again,further,then,once,here,there,when,where,why,how,all,any,both,each,few,more,most,other,some,such,no,nor,not,only,own,same,so,than,too,very,-";//,office,microsoft,MS,hi,Hello,exchange,-,can,using

        public static string EnglishTerms =
            "c#:CSharp";//",vb.net:VBDotNet,Entity Framework:EntityFramework,Visual Studio:VisualStudio,windows:windows,asp.net:AspDotNet";//,.net:dotnet,xml:XML,api:API,web services:Web Serivce,web service:Web Serivce,emails:email


        public static string Terms = "C#,csharp";

        public readonly Dictionary<string, int> key_Id = new Dictionary<string, int>();
        public readonly Dictionary<int, string> Id_Key = new Dictionary<int, string>();
        public readonly Dictionary<int, int> Id_Length = new Dictionary<int, int>();
        public readonly Dictionary<int, int> wordInDocs = new Dictionary<int, int>();
        private static string[] stopwords = EnglishStopwords.ToLower().Split(',').Distinct().Select(s => "\\s" + s + "\\s|^" + s + "\\s|\\s" + s + "$").OrderByDescending(p => p.Length).ToArray();
        private static Tuple<string, string>[] terms = EnglishTerms.ToLower().Split(',').Distinct().Select(s => new Tuple<string, string>("[^a-zA-Z0-9]" + s.Split(':')[0] + "[^a-zA-Z0-9]|^" + s.Split(':')[0] + "[^a-zA-Z0-9]|[^a-zA-Z0-9]" + s.Split(':')[0] + "$", s.Split(':')[1])).OrderByDescending(p => p.Item1.Length).ToArray();

        public string WordPrefix { get; set; }
        public static void Reset()
        {
            stopwords = EnglishStopwords.ToLower().Split(',').Distinct().Select(s => "\\s" + s + "\\s|^" + s + "\\s|\\s" + s + "$").OrderByDescending(p => p.Length).ToArray();
            terms = EnglishTerms.ToLower().Split(',').Distinct().Select(s => new Tuple<string, string>("[^a-zA-Z0-9]" + s.Split(':')[0] + "[^a-zA-Z0-9]|^" + s.Split(':')[0] + "[^a-zA-Z0-9]|[^a-zA-Z0-9]" + s.Split(':')[0] + "$", s.Split(':')[1])).OrderByDescending(p => p.Item1.Length).ToArray();


        }

        public static string RemoveStopWord(string input)
        {
            var result = input;
            foreach (var s in stopwords)
            {
                result = Regex.Replace(result, s, " ", RegexOptions.IgnoreCase);
            }
            return result;
        }

        public static Regex r = new Regex(@"(?<=\b)[A-Z]\w*(?=\b)");
        public static String SimpleCaseProcess(String s)
        {

            String wordPattern = @"(?<=\b)[A-Z]\w*(?=\b)"; //@"\b[A-Z]\w*\b"; // all the words start with a capital letter.
            return Regex.Replace(s, wordPattern, m =>
            {
                if (CapAtStart(m.Value))
                {
                    return m.Value.ToLower();
                }
                return m.Value;
            });
        }

        public static Boolean CapAtStart(String s)
        {
            bool b1 = true;
            bool b = false;
            if (((int)s[0]) >= 65 && ((int)s[0]) <= 90)
            {
                b1 = false;
            }

            for (int i = 1; i < s.Length; i++)
            {
                if (!(((int)s[i]) >= 97 && ((int)s[i]) <= 122))
                {
                    b = true;
                    continue;
                }
            }

            return !b1 && !b;
        }


        public static string FormatTerm(string input)
        {
            var result = input;
            foreach (var s in terms)
            {
                result = Regex.Replace(result, s.Item1, " " + s.Item2 + " ", RegexOptions.IgnoreCase);
            }
            return result;
        }

        public int SizeOfVocabularies
        {
            get { return key_Id.Count + 1; }

        }


        /// <summary>
        /// This is document level pre-processing.
        /// </summary>
        /// <param name="s"></param>
        /// <param name="limit"></param>
        /// <returns></returns>
        public static String PreProcessText(string s, bool limit = false)
        {
            string s1 = s;
            s1 = FormatTerm(s1);
            s1 = PreProcessLimit(s1);
            if (!limit)
            {
                s1 = ProcessPunctuations(s1);
                s1 = Regex.Replace(s1, @"\.\s", " ");

            }
            s1 = SimpleCaseProcess(s1);
            s1 = RemoveStopWord(s1);

            return s1;
        }

        public static String PreProcessLimit(string s)
        {
            string s1 = s;

            s1 = s1.Replace("?", "? ");
            s1 = Regex.Replace(s1, @"\.\s", ". ");
            s1 = s1.Replace(",", ", ");
            s1 = s1.Replace("!", "! ");
            s1 = s1.Replace("\t", " ");
            s1 = s1.Replace("\r\n\r\n", "\r\n");
            return s1;
        }
        //public static String PreProcessHTML(string s, bool limit = false)
        //{
        //    string s1 = HtmlHelper.RemoveHTMLTag(s);
        //    s1 = PreProcessText(s1, limit);
        //    return s1;
        //}

        //public static ConcurrentDictionary<string, string> LemmarizedDict = new ConcurrentDictionary<string, string>();


        /// <summary>
        /// Generate the doc-term vectors for a collection of documents, one vector for a document.
        /// </summary>
        /// <param name="contents"></param>
        /// <param name="ngram"></param>
        /// <param name="re"></param>
        /// <param name="Lemmarized"></param>
        /// <returns></returns>

//        public Dictionary<int, double>[] GenerateWordRepresentation(IEnumerable<string> contents, NgramConfig ngram, Dictionary<int, double>[] re = null, Dictionary<string, double[]> wr = null)
//        {
//            var length = contents.Count();
//            Dictionary<int, double>[] results = new Dictionary<int, double>[length];

//            if (re != null)
//                results = re;
//            var data = contents.ToArray();
//            Parallel.For(0,
//                length,
//                x =>
//                {

//                    var t = data[x];
//                    var lines = t.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries).ToList();
//                    var p = new List<int>();
//                    double[] values = null;
//                    foreach (var n in ngram.Ngarm)
//                    {

//                        for (int k = 0; k < lines.Count; k++)
//                        {
//                            var words1 = lines[k].Split(new[] { ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries).ToList();
//                            string[] ss = new string[n];
//                            for (var i = 0; i < words1.Count - n + 1; i++)
//                            {

//                                for (int j = 0; j < n; j++)
//                                {
//                                    if (wr.ContainsKey(words1[i + j]))
//                                    {
//                                        if (values == null)
//                                            values = wr[words1[i + j]];
//                                        else
//                                        {
//                                            var temp = wr[words1[i + j]];
//                                            for (int l = 0; l < values.Length; l++)
//                                            {
//                                                values[l] += temp[l];
//                                            }
//                                        }
//                                    }
//                                    else
//                                    {

//                                    }

//                                }

//                            }
//                        }

//                        //t.Title_OneHotRepresentation = p.ToArray();
//                    }

//                    if (values == null)
//                    {
//                        results[x] = null;
//                    }
//                    else
//                    {
//                        Dictionary<int, double> dict = new Dictionary<int, double>();
//                        for (int l = 0; l < values.Length; l++)
//                        {
//                            dict[l] = values[l];
//                        }

//                        results[x] = dict;
//                    }
//                });
//            return results;

//        }
//        public Dictionary<int, int>[] GenerateRepresentation(IEnumerable<string> contents, NgramConfig ngram, Dictionary<int, int>[] re = null, bool Lemmarized = false)
//        {


//            var length = contents.Count();
//            Dictionary<int, int>[] results = new Dictionary<int, int>[length]; // vectors[]
//            if (re != null)
//                results = re;
//            var data = contents.ToArray();
//            Parallel.For(0,
//                length,
//                x =>
//                {
//                    LemmatizerPrebuiltFull l = null;
//                    // LemmatizerPrebuiltFull l = null;
//                    if (Lemmarized)
//                    {
//                        l = new LemmatizerPrebuiltFull(LanguagePrebuilt.English);
//                        //  l = new LemmatizerPrebuiltFull(LanguagePrebuilt.English);
//                    }
//                    var t = data[x]; // doc -> lines -> words
//                    var lines = t.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries).ToList();
//                    Dictionary<int, int> representation = results[x];
//                    if (representation == null) representation = new Dictionary<int, int>();
//                    var p = new List<int>();
//                    foreach (var n in ngram.Ngarm)
//                    {
//                        //add skip-level word-gram feature into vector
//                        if (ngram.SkipLevel)
//                        {

//                            if (n == 2)
//                            {
//                                for (int k = 0; k < lines.Count; k++)
//                                {
//                                    var words1 = lines[k].Split(new[] { ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries).ToList();
//                                    if (Lemmarized)
//                                        words1 = words1.Select(q =>
//                                        {
//                                            if (LemmarizedDict.ContainsKey(q))
//                                                return LemmarizedDict[q];
//                                            else
//                                            {
//                                                var temp = l.Lemmatize(q);
//                                                LemmarizedDict.TryAdd(q, temp);
//                                                return temp;
//                                            }
//                                        }).ToList();

//                                    for (var i = 0; i < words1.Count - n + 1; i++)
//                                    {
//                                        for (var j = i + 1; j < words1.Count; j++)
//                                        {
//                                            if (words1[i] == words1[j]) continue;

//                                            //stemming
//#if stemming
//                                            String words1_i = Stemming.Process(words1[i]);
//                                            String words1_j = Stemming.Process(words1[j]);
//                                            var id = GetId(words1_i, words1_j);
//#else
//                                            var id = GetId(words1[i], words1[j]);
//#endif
//                                            representation.InitialOrIncrease(id);
//                                            p.Add(id);
//                                        }
//                                    }
//                                }

//                            }
//                        }

//                        //add non-skip-level ngram feature to vector
//                        for (int k = 0; k < lines.Count; k++)
//                        {
//                            var words1 = lines[k].Split(new[] { ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries).ToList();
//                            if (Lemmarized)
//                                words1 = words1.Select(q => l.Lemmatize(q)).ToList();
//                            string[] ss = new string[n];
//                            for (var i = 0; i < words1.Count - n + 1; i++)
//                            {
//                                for (int j = 0; j < n; j++)
//                                {
//                                    ss[j] = words1[i + j];
//                                }
//#if stemming
//                                //stemming
//                                for (Int32 stem_i = 0; stem_i < ss.Length; stem_i++)
//                                {
//                                    ss[stem_i] = Stemming.Process(ss[stem_i]);
//                                }
//#endif


//                                var id = GetId(ss);
//                                representation.InitialOrIncrease(id);
//                                p.Add(id);
//                            }
//                        }

//                        //t.Title_OneHotRepresentation = p.ToArray();
//                    }
//                    results[x] = representation;
//                });
//            return results;
//        }
        private static string ProcessPunctuations(string input)
        {
            var sb = new StringBuilder();
            for (int i = 0; i < input.Length; i++)
            {
                char c = input[i];
                if (Char.IsLetterOrDigit(c) || c == '-' || c == '.' || c == '\'')
                {
                    sb.Append(c);
                }
                else
                {
                    sb.Append(" ");
                }
            }
            return sb.ToString();
        }
        private object asyncLock = new object();
        private int GetId(params string[] ss)
        {
            var s = WordPrefix + String.Join(" ", ss);
            lock (asyncLock)
            {
                if (!key_Id.ContainsKey(s))
                {
                    int c = key_Id.Count; //id starts with 0.
                    key_Id[s] = c;
                    Id_Key[c] = s;
                    Id_Length[c] = ss.Length;
                }
                return key_Id[s];
            }

        }

        public string GetWord(int id)
        {
            if (Id_Key.ContainsKey(id))
            {
                return Id_Key[id];
            }
            return null;
        }



        //internal double[] GetIDF(List<ForumThread> threads)
        //{
        //    double[] counts = new double[this.SizeOfVocabularies];

        //    Parallel.For(0, counts.Length, i =>
        //    {
        //        var c = threads.Count(p => p.TitleRepresentation.ContainsKey(i));
        //        if (c == 0)
        //        {
        //            counts[i] = 0;
        //        }
        //        else
        //        {
        //            counts[i] = Math.Log(threads.Count/c);
        //        }
        //    });
        //    return counts;

        //}
    }
}
