﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using NewsMine.DomainObjects;
using System.IO;
using Lucene.Net.Documents;

namespace NewsDiscoveryEngine.Index
{
    public class KeywordDensityInfoManager
    {
        static SortedDictionary<string, int> dictDateWiseKeywordCountCached = null;

        public List<NewsMine.DomainObjects.KeyWordTokenDensity> GetKeyWordDensityByChannelName(string channelName)
        {
            return null; //todo need to implement after the below funciton is tested well.
        }

        public static List<NewsMine.DomainObjects.KeyWordTokenDensity> GetKeyWordDensityByChannelName( string query = "", int pastHours = 0, string field = "")
        {
            List<KeyWordTokenDensity> densityInfoList = new List<KeyWordTokenDensity>();

            //lets see whether we have this in query cached...

            string keypreperationString = query + " " + pastHours.ToString();

            string hashCode = NewsMine.Utilities.Extensions.CreateMD5Hash(keypreperationString);

            //see if this file is availabled in cache.

            if (File.Exists("c:\\" + hashCode))
            {
                densityInfoList = (List<KeyWordTokenDensity>)NewsMine.Utilities.FileExetensions.DeserializeFromFile("c:\\" + hashCode);

                return densityInfoList;
            }

            FacetReader facetReader1 = new FacetReader();

            var reader = FacetReader.GetIndexReader();
            List<KeyWordTokenDensity> involvedDomainCloud = new List<KeyWordTokenDensity>();

            involvedDomainCloud = facetReader1.GetKeyWordDensityCloudByTimeContext(query, "domain", pastHours, reader);

            List<KeyWordTokenDensity> domainVsKeywordMappingInfo = new List<KeyWordTokenDensity>();

            int tempCount = 0;
            SortedDictionary<string, int> dictDomainKeywordCount = new SortedDictionary<string, int>();

            foreach (var item in involvedDomainCloud)
            {
                string contextQuery = FacetReader.GetTimeLimitQuery(DateTime.UtcNow.AddHours(-1 * pastHours), DateTime.UtcNow, "pubdate");

                string domainSpecificQuery = "(domain:" + item.TokenName + ") AND (" + contextQuery + ")";

                var dictionary = facetReader1.GetFacets(domainSpecificQuery, field, out tempCount, reader);

                foreach (var dictItem in dictionary)
                {
                    if (dictDomainKeywordCount.ContainsKey(dictItem.Key))
                    {
                        dictDomainKeywordCount[dictItem.Key.Trim()] = dictDomainKeywordCount[dictItem.Key.Trim()] + 1;
                    }
                    else
                        dictDomainKeywordCount.Add(dictItem.Key.Trim(), 1);
                }
            }

            tempCount = 0;
            SortedDictionary<string, int> dictDateWiseKeywordCount = new SortedDictionary<string, int>();

            string cachingKey = string.Format("{0}{1}", DateTools.DateToString(DateTime.UtcNow.AddDays(-1 * 0), DateTools.Resolution.DAY),
      DateTools.DateToString(DateTime.UtcNow.AddDays(-1 * 29), DateTools.Resolution.DAY));


            string FilePath = "C:\\" + cachingKey + ".dat";

            if (File.Exists(FilePath) == true)
            {
                if (dictDateWiseKeywordCountCached != null)
                {
                    dictDateWiseKeywordCount = dictDateWiseKeywordCountCached;
                }
                else
                {

                    dictDateWiseKeywordCount = (SortedDictionary<string, int>)NewsMine.Utilities.FileExetensions.DeserializeFromFile<SortedDictionary<string, int>>(FilePath);
                }
            }
            else
            {

                for (int i = 0; i < 30; i++)
                {

                    string contextQuery = string.Format("pubdate:[{0}00 TO {1}23]", DateTools.DateToString(DateTime.UtcNow.AddDays(-1 * i), DateTools.Resolution.DAY),
                DateTools.DateToString(DateTime.UtcNow.AddDays(-1 * i), DateTools.Resolution.DAY));

                    string dateWiseQuery = contextQuery;

                    var dictionary = facetReader1.GetFacets(dateWiseQuery, field, out tempCount, reader);

                    foreach (var dictItem in dictionary)
                    {
                        if (dictDateWiseKeywordCount.ContainsKey(dictItem.Key))
                        {
                            dictDateWiseKeywordCount[dictItem.Key.Trim()] = dictDateWiseKeywordCount[dictItem.Key.Trim()] + 1;
                        }
                        else
                            dictDateWiseKeywordCount.Add(dictItem.Key.Trim(), 1);
                    }
                }

                NewsMine.Utilities.FileExetensions.SerializeObjectToFileUsingProtoBuff(FilePath, dictDateWiseKeywordCount);

            }

            dictDateWiseKeywordCountCached = dictDateWiseKeywordCount;

            List<KeyWordTokenDensity> involvedTags = new List<KeyWordTokenDensity>();
            involvedTags = facetReader1.GetKeyWordDensityCloudByTimeContext(query, "atag", pastHours, reader);

            tempCount = 0;
            SortedDictionary<string, int> dictATagsKeywordCount = new SortedDictionary<string, int>();

            foreach (var item in involvedTags)
            {
                string contextQuery = FacetReader.GetTimeLimitQuery(DateTime.UtcNow.AddHours(-1 * pastHours), DateTime.UtcNow, "pubdate");

                string domainSpecificQuery = "(atag:" + item.TokenName + ") AND (" + contextQuery + ")";

                var dictionary = facetReader1.GetFacets(domainSpecificQuery, field, out tempCount, reader);

                foreach (var dictItem in dictionary)
                {
                    if (dictATagsKeywordCount.ContainsKey(dictItem.Key))
                    {
                        dictATagsKeywordCount[dictItem.Key.Trim()] = dictATagsKeywordCount[dictItem.Key.Trim()] + 1;
                    }
                    else
                        dictATagsKeywordCount.Add(dictItem.Key.Trim(), 1);
                }
            }

            densityInfoList = facetReader1.GetKeyWordDensityCloudByTimeContext(query, field, pastHours, reader);

            reader.Close();


            List<KeyWordTokenDensity> tempInfo = new List<KeyWordTokenDensity>();

            for (int i = 0; i < densityInfoList.Count; i++)
            {
                var temp = densityInfoList[i];

                if (dictDomainKeywordCount.ContainsKey(temp.TokenName))
                {
                    temp.DomaintDensity = Convert.ToInt32(dictDomainKeywordCount[temp.TokenName]);
                }

                if (dictDateWiseKeywordCount.ContainsKey(temp.TokenName))
                {
                    temp.DailyDensity = Convert.ToInt32(dictDateWiseKeywordCount[temp.TokenName]);
                }

                if (dictATagsKeywordCount.ContainsKey(temp.TokenName))
                {
                    temp.AtagDensity = Convert.ToInt32(dictATagsKeywordCount[temp.TokenName]);
                }

                temp.Score = (temp.TokenCount / (temp.TokenUniversalCount / (temp.DailyDensity + 1))) * temp.DomaintDensity * temp.DomaintDensity;

                tempInfo.Add(temp);

            }

            densityInfoList = tempInfo;

            densityInfoList = (from item in densityInfoList
                               orderby
                               item.Score descending
                               // (item.TokenCount / (item.TokenUniversalCount / (item.DailyDensity + 1))) * item.DomaintDensity * item.DomaintDensity descending
                               // Math.Pow(item.TokenCount, item.DomaintDensity - 1 * item.DomaintDensity) * item.TokenUniversalPercentage descending
                               //item.DomaintDensity * item.TokenUniversalPercentage descending,  item.TokenCount * item.TokenUniversalPercentage  descending
                               select item).ToList();


            string stopWordCategory = "stopwords";

            List<string> stopWords = NewsMine.DataManagers.KeyWordManager.GetAllKeywordsByCategory(stopWordCategory);

            stopWords = stopWords.Distinct().ToList();

            densityInfoList = (from d in densityInfoList where !stopWords.Contains(d.TokenName.ToLower()) select d).ToList();

            NewsMine.Utilities.FileExetensions.SerializeObjectToFile("c:\\" + hashCode, densityInfoList);

            return densityInfoList;

        }
    }
}
