﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using BoboBrowse.Api;
using BoboBrowse.Facets;
using BoboBrowse.Facets.impl;
using Lucene.Net.Analysis;
using Lucene.Net.Index;
using Lucene.Net.QueryParsers;
using Lucene.Net.Search;
using Lucene.Net.Store;
using WebApp1.Models;
using NewsMine.DomainObjects;
using NewsDiscoveryEngine.Index;
using Lucene.Net.Documents;
using NewsDiscoveryEngine;
using NewsMine.Utilities;
using System.Collections;
using System.IO;
using System.Security.Cryptography;
using System.Text;

namespace WebApp1.AdminSection
{
    public partial class TestHotNews : System.Web.UI.Page
    {
        #region Keyword related stuff

        private void BindCategoriesToDropDownListBox(bool refresh = false)
        {
            //cmbCategoryName.DataTextField = ".";
            if (refresh == true || cmbCategoryName.Items == null || cmbCategoryName.Items.Count == 0)
            {
                List<string> keyWordCategories = NewsMine.DataManagers.KeyWordManager.GetAllCategories();

                cmbCategoryName.DataSource = keyWordCategories;
                cmbCategoryName.DataBind();

                cmbCategoryName.SelectedIndex = -1;
            }
        }

        protected void btnGetKeyWords_Click(object sender, EventArgs e)
        {
            if (cmbCategoryName.SelectedIndex > -1)
            {
                string selectedCategory = cmbCategoryName.SelectedValue.ToString();

                SelectCategory(selectedCategory);
            }
        }

        protected void cmbCategoryName_SelectedIndexChanged(object sender, EventArgs e)
        {
            if (cmbCategoryName.SelectedIndex > -1)
            {
                string selectedCategory = cmbCategoryName.SelectedValue.ToString();

                SelectCategory(selectedCategory);
            }
        }

        private void SelectCategory(string category)
        {
            string selectedCategory = category;

            lblSelectedCategory.Text = cmbCategoryName.SelectedValue;

            cmbCategoryName.SelectedValue = category;

            var keywords = NewsMine.DataManagers.KeyWordManager.GetAllKeywordsByCategory(selectedCategory);

            string query = "titlelink:" + String.Join(" OR titlelink:", keywords);

            txtSearchBox.Text = query;
        }

        #endregion

        static SortedDictionary<string, int> dictDateWiseKeywordCountCached = null;

        protected void Page_Load(object sender, EventArgs e)
        {

            if (SecurityCheckManager.IsAdminPortalEabled() == false)
            {
                Response.Write("You should be the admin....");
                return;
            }


            if (string.IsNullOrEmpty(txtSearchBox.Text))
            {
                txtSearchBox.Text = "language:telugu";
            }

            if (string.IsNullOrEmpty(txtFieldName.Text))
            {
                txtFieldName.Text = "title";
            }

            if (string.IsNullOrEmpty(txtGlobalFilter.Text))
            {
                txtGlobalFilter.Text = "8";
            }

            if (string.IsNullOrEmpty(txtMaxRecords.Text))
            {
                txtMaxRecords.Text = "1000";
            }


            string query = txtSearchBox.Text;
            int pastHours = Convert.ToInt32(txtGlobalFilter.Text);
            int maxrecs = Convert.ToInt32(txtMaxRecords.Text);
            string field = txtFieldName.Text;

            if (string.IsNullOrEmpty(txtSearchBox.Text))
            {
                txtSearchBox.Text = "language:telugu";
            }

            #region instant Grouping logic

            if (true)
            {
                var indxer = FeedIndexSearcher.Instance;
                int totalRecs = 0;

                string contextQuery1 = FacetReader.GetTimeLimitQuery(DateTime.UtcNow.AddHours(-1 * pastHours), DateTime.UtcNow, "pubdate");

                string finalQueryForFeedsRetrival = "(" + query + ") AND (" + contextQuery1 + ")";

                var feeds = indxer.GetFeedItemsByQueryOrderByLatest(finalQueryForFeedsRetrival, 1, 1000, out totalRecs, true);

                Response.Write("total records : " + totalRecs.ToString() + " <br/>");

                //CreateFeedGroups(feeds);
                CreateFeedGroupsV2(feeds);
                return; //test..
            }
            #endregion


            List<KeyWordTokenDensity> densityInfoList = new List<KeyWordTokenDensity>();

            densityInfoList = NewsDiscoveryEngine.Index.KeywordDensityInfoManager.GetKeyWordDensityByChannelName(query, pastHours, field);

            goto DisplayLable;

            //lets see whether we have this in query cached...

            string keypreperationString = query + " " + pastHours.ToString();

            string hashCode = NewsMine.Utilities.Extensions.CreateMD5Hash(keypreperationString);

            //see if this file is availabled in cache.

            if (File.Exists("c:\\" + hashCode))
            {
                densityInfoList = (List<KeyWordTokenDensity>)NewsMine.Utilities.FileExetensions.DeserializeFromFile("c:\\" + hashCode);

                goto DisplayLable;
            }


            FacetReader facetReader1 = new FacetReader();

            var reader = FacetReader.GetIndexReader();
            List<KeyWordTokenDensity> involvedDomainCloud = new List<KeyWordTokenDensity>();
            using (TimeTracker t = new TimeTracker(this, "getting domains"))
            {
                involvedDomainCloud = facetReader1.GetKeyWordDensityCloudByTimeContext(query, "domain", pastHours, reader);
            }

            List<KeyWordTokenDensity> domainVsKeywordMappingInfo = new List<KeyWordTokenDensity>();


            int tempCount = 0;
            SortedDictionary<string, int> dictDomainKeywordCount = new SortedDictionary<string, int>();

            using (TimeTracker t = new TimeTracker(this, "for each domain getting keywords"))
            {
                foreach (var item in involvedDomainCloud)
                {
                    string contextQuery = FacetReader.GetTimeLimitQuery(DateTime.UtcNow.AddHours(-1 * pastHours), DateTime.UtcNow, "pubdate");

                    string domainSpecificQuery = "(domain:" + item.TokenName + ") AND (" + contextQuery + ")";

                    var dictionary = facetReader1.GetFacets(domainSpecificQuery, field, out tempCount, reader);

                    foreach (var dictItem in dictionary)
                    {
                        if (dictDomainKeywordCount.ContainsKey(dictItem.Key))
                        {
                            dictDomainKeywordCount[dictItem.Key.Trim()] = dictDomainKeywordCount[dictItem.Key.Trim()] + 1;
                        }
                        else
                            dictDomainKeywordCount.Add(dictItem.Key.Trim(), 1);
                    }
                }
            }

            tempCount = 0;
            SortedDictionary<string, int> dictDateWiseKeywordCount = new SortedDictionary<string, int>();



            using (TimeTracker t = new TimeTracker(this, "for each DAY WISE getting keywords"))
            {

                string cachingKey = string.Format("{0}{1}", DateTools.DateToString(DateTime.UtcNow.AddDays(-1 * 0), DateTools.Resolution.DAY),
          DateTools.DateToString(DateTime.UtcNow.AddDays(-1 * 29), DateTools.Resolution.DAY));




                string FilePath = "C:\\" + cachingKey + ".dat";

                if (File.Exists(FilePath) == true)
                {
                    if (dictDateWiseKeywordCountCached != null)
                    {
                        dictDateWiseKeywordCount = dictDateWiseKeywordCountCached;
                    }
                    else
                    {

                        dictDateWiseKeywordCount = (SortedDictionary<string, int>)NewsMine.Utilities.FileExetensions.DeserializeFromFile<SortedDictionary<string, int>>(FilePath);
                    }
                }
                else
                {

                    for (int i = 0; i < 30; i++)
                    {

                        string contextQuery = string.Format("pubdate:[{0}00 TO {1}23]", DateTools.DateToString(DateTime.UtcNow.AddDays(-1 * i), DateTools.Resolution.DAY),
                    DateTools.DateToString(DateTime.UtcNow.AddDays(-1 * i), DateTools.Resolution.DAY));

                        string dateWiseQuery = contextQuery;

                        var dictionary = facetReader1.GetFacets(dateWiseQuery, field, out tempCount, reader);

                        foreach (var dictItem in dictionary)
                        {
                            if (dictDateWiseKeywordCount.ContainsKey(dictItem.Key))
                            {
                                dictDateWiseKeywordCount[dictItem.Key.Trim()] = dictDateWiseKeywordCount[dictItem.Key.Trim()] + 1;
                            }
                            else
                                dictDateWiseKeywordCount.Add(dictItem.Key.Trim(), 1);
                        }
                    }

                    NewsMine.Utilities.FileExetensions.SerializeObjectToFileUsingProtoBuff(FilePath, dictDateWiseKeywordCount);

                }

                dictDateWiseKeywordCountCached = dictDateWiseKeywordCount;

            }

            List<KeyWordTokenDensity> involvedTags = new List<KeyWordTokenDensity>();
            using (TimeTracker t = new TimeTracker(this, "getting ATAG s"))
            {
                involvedTags = facetReader1.GetKeyWordDensityCloudByTimeContext(query, "atag", pastHours, reader);
            }

            tempCount = 0;
            SortedDictionary<string, int> dictATagsKeywordCount = new SortedDictionary<string, int>();

            using (TimeTracker t = new TimeTracker(this, "for each ATAG getting keywords"))
            {
                foreach (var item in involvedTags)
                {
                    string contextQuery = FacetReader.GetTimeLimitQuery(DateTime.UtcNow.AddHours(-1 * pastHours), DateTime.UtcNow, "pubdate");

                    string domainSpecificQuery = "(atag:" + item.TokenName + ") AND (" + contextQuery + ")";

                    var dictionary = facetReader1.GetFacets(domainSpecificQuery, field, out tempCount, reader);

                    foreach (var dictItem in dictionary)
                    {
                        if (dictATagsKeywordCount.ContainsKey(dictItem.Key))
                        {
                            dictATagsKeywordCount[dictItem.Key.Trim()] = dictATagsKeywordCount[dictItem.Key.Trim()] + 1;
                        }
                        else
                            dictATagsKeywordCount.Add(dictItem.Key.Trim(), 1);
                    }
                }
            }



            using (TimeTracker t = new TimeTracker(this, " Actual keywords query"))
            {

                densityInfoList = facetReader1.GetKeyWordDensityCloudByTimeContext(query, field, pastHours, reader);
            }


            reader.Close();

            List<KeyWordTokenDensity> tempInfo = new List<KeyWordTokenDensity>();



            using (TimeTracker t = new TimeTracker(this, "Mapping domains counts with keyword info"))
            {
                for (int i = 0; i < densityInfoList.Count; i++)
                {
                    var temp = densityInfoList[i];

                    if (dictDomainKeywordCount.ContainsKey(temp.TokenName))
                    {
                        temp.DomaintDensity = Convert.ToInt32(dictDomainKeywordCount[temp.TokenName]);
                    }

                    if (dictDateWiseKeywordCount.ContainsKey(temp.TokenName))
                    {
                        temp.DailyDensity = Convert.ToInt32(dictDateWiseKeywordCount[temp.TokenName]);
                    }

                    if (dictATagsKeywordCount.ContainsKey(temp.TokenName))
                    {
                        temp.AtagDensity = Convert.ToInt32(dictATagsKeywordCount[temp.TokenName]);
                    }

                    temp.Score = (temp.TokenCount / (temp.TokenUniversalCount / (temp.DailyDensity + 1))) * temp.DomaintDensity * temp.DomaintDensity;

                    tempInfo.Add(temp);

                }
            }

            Response.Write("TEst test");


            densityInfoList = tempInfo;

            for (int i = 0; i < densityInfoList.Count; i++)
            {

            }

            using (TimeTracker t = new TimeTracker(this, " Sorting finally."))
            {

                densityInfoList = (from item in densityInfoList
                                   orderby
                                   item.Score descending
                                   // (item.TokenCount / (item.TokenUniversalCount / (item.DailyDensity + 1))) * item.DomaintDensity * item.DomaintDensity descending
                                   // Math.Pow(item.TokenCount, item.DomaintDensity - 1 * item.DomaintDensity) * item.TokenUniversalPercentage descending
                                   //item.DomaintDensity * item.TokenUniversalPercentage descending,  item.TokenCount * item.TokenUniversalPercentage  descending
                                   select item).ToList();
            }


            string stopWordCategory = "stopwords";

            List<string> stopWords = NewsMine.DataManagers.KeyWordManager.GetAllKeywordsByCategory(stopWordCategory);

            stopWords = stopWords.Distinct().ToList();

            densityInfoList = (from d in densityInfoList where !stopWords.Contains(d.TokenName.ToLower()) select d).ToList();

            NewsMine.Utilities.FileExetensions.SerializeObjectToFile("c:\\" + hashCode, densityInfoList);

            //int maxRecsPerpage = maxrecs;

            DisplayLable:

            Response.Write("<table>");
            Response.Write("<tr><th></th><th>Token Name</th><th>Domains</th><th>Tags</th><th>Count</th><th>Global Count</th><th>Universal Count</th><th>Dates</th><th> Relative %</th><th>Global %</th><th>Universal %</th><th>Score</th> </tr>");

            int count = 0;

            foreach (var item in densityInfoList)
            {
                if (count >= maxrecs)
                    break;

                if (item.TokenUniversalPercentage * item.TokenCountPercentage > 0)
                {
                    Response.Write("<tr style='background-color:#fff9f9;'>");
                }
                else
                {
                    Response.Write("<tr>");
                }


                Response.Write("<td>");
                Response.Write((++count).ToString());
                Response.Write("</td>");

                Response.Write("<td>");
                Response.Write(item.TokenName);
                Response.Write("</td>");


                Response.Write("<td>");

                Response.Write(item.DomaintDensity.ToString());

                Response.Write("</td>");



                Response.Write("<td>");

                Response.Write(item.AtagDensity.ToString());

                Response.Write("</td>");

                Response.Write("<td>");
                Response.Write(item.TokenCount.ToString());
                Response.Write("</td>");

                Response.Write("<td>");
                Response.Write(item.ContextualTokenCount.ToString());
                Response.Write("</td>");

                Response.Write("<td>");
                Response.Write(item.TokenUniversalCount.ToString());
                Response.Write("</td>");

                Response.Write("<td>");

                Response.Write(item.DailyDensity.ToString());

                Response.Write("</td>");

                Response.Write("<td>");
                Response.Write(item.TokenCountPercentage.ToString());
                Response.Write(" %</td>");
                Response.Write("<td>");
                Response.Write(item.ContextualTokenPercentage.ToString());
                Response.Write(" %</td>");

                Response.Write("<td>");
                Response.Write(item.TokenUniversalPercentage.ToString());
                Response.Write(" %</td>");

                Response.Write("<td>");
                Response.Write(((item.TokenCount / (item.TokenUniversalCount / (item.DailyDensity + 1))) * item.DomaintDensity * item.DomaintDensity).ToString());
                Response.Write(" points</td>");





                Response.Write("</tr>");

            }
            Response.Write("</table>");






        }




        class FeedConnection
        {
            public string Source { get; set; }
            public string Destination { get; set; }
            public double Score { get; set; }
            public List<string> Matchedkeywords { get; set; }
        }

        class keyValue
        {
            public string Key { set; get; }
            public int NumberOfTokens { set; get; }
            public int Value { set; get; }
        }



        public void CreateFeedGroupsV2(List<FeedItem> feeds)
        {
            List<string> uniqueFeedIds = (from f in feeds
                                          select f.ID).ToList();

            int feedsCount = feeds.Count;

            Dictionary<int, List<double>> scoresAgainstFeedIndex = new Dictionary<int, List<double>>();


            List<FeedConnection> feedConnections = new List<FeedConnection>();

            for (int i = 0; i < feedsCount; i++)
            {
                for (int j = 0; j < feedsCount; j++)
                {
                    if (i == j)
                        continue;
                    string source = TempAnalyzer.CleanString(feeds[i].Title, true, false, true, true);
                    string dest = TempAnalyzer.CleanString(feeds[j].Title, true, false, true, true);
                    var score = NewsMine.Utilities.CosineSimilarity.FindSimilarity(source, dest);

                    FeedConnection fc = new FeedConnection { Source = feeds[i].ID, Destination = feeds[j].ID, Score = score };

                    feedConnections.Add(fc);
                }
            }

            var resultedStrongFeedConnections = new List<FeedConnection>();

            for (int i = 0; i < feedsCount; i++)
            {
                var sourceScores = feedConnections.Where(fc => fc.Source == feeds[i].ID).ToList();

                var topScore = (from score in sourceScores orderby score.Score descending select score).First();

                resultedStrongFeedConnections.Add(topScore);
            }

            //foreach (var fc in resultedStrongFeedConnections)
            //{
            //    Response.Write("<br/>" + fc.Source + " - " + fc.Destination + " - " + fc.Score);
            //}

            //Response.Write("<hr/>");



            Dictionary<string, FeedConnection> dedupeDictionary = new Dictionary<string, FeedConnection>();

            foreach (var score in resultedStrongFeedConnections)
            {
                string key = string.Join(",", (new string[] { score.Source, score.Destination }).OrderBy(f => f)) + score.Score.ToString();
                if (dedupeDictionary.ContainsKey(key) == false)
                {
                    dedupeDictionary.Add(key, score);
                }
            }

            resultedStrongFeedConnections.Clear();

            foreach (var item in dedupeDictionary)
            {
                resultedStrongFeedConnections.Add(item.Value);
            }


            foreach (var fc in resultedStrongFeedConnections)
            {
                Response.Write("<br/>" + fc.Source + " - " + fc.Destination + " - " + fc.Score);
            }

            Response.Write("<hr/>");

            var groupss = (from sc in resultedStrongFeedConnections
                           group sc by sc.Destination
                               into scg
                               select new { Parent = scg.Key, Children = scg.ToList().ToList() }).ToList();

            List<string> parentIdsToBeRemoved = new List<string>();

          


            foreach (var g in groupss)
            {
                Response.Write("<hr/>");
                if (g.Children.Count == 0)
                    continue;

                var similarItems = (from c in g.Children
                                    orderby c.Score descending
                                    select c).ToList();

                similarItems = similarItems.Where(s => s.Score > 0.4).ToList();

                var mainFeed = feeds.Where(f => f.ID == g.Parent).First();
                Response.Write("<br/><b>" + mainFeed.Title + "</b>");
                foreach (var simItem in similarItems)
                {
                    var selectedFeeds = feeds.Where(f => f.ID == simItem.Source).ToList();
                    foreach (var sFeed in selectedFeeds)
                    {
                        Response.Write("<br/>" + sFeed.Title);
                    }
                    Response.Write("Score : " + simItem.Score + "<br/>");
                }



            }

            //foreach (var item in resultedStrongFeedConnections)
            //{
            //    Response.Write("<br/>" + item.Source + " - " + item.Destination + " - " + item.Score);
            //    if (item.Score > 0)
            //    {
            //        var selectedFeeds = feeds.Where(f => f.ID == item.Destination || f.ID == item.Source).ToList();

            //        foreach (var sFeed in selectedFeeds)
            //        {
            //            Response.Write("<br/>" + sFeed.Title);
            //        }
            //    }
            //}

        }

        public void CreateFeedGroups(List<FeedItem> feeds)
        {
            //var titles = (from fd in feeds

            //              select new
            //              {
            //                  FeedItem = fd,
            //                  SimilarDocs = feeds.Select(f => NewsMine.Utilities.FuzzySearch.Search(f.Title, feeds.Select(ff => ff.Title).ToList(), 0.70))
            //              }).ToList();

            List<FeedConnection> feedConnection = new List<FeedConnection>();

            Dictionary<string, bool> dictProcessDedupeController = new Dictionary<string, bool>();

            using (TimeTracker t = new TimeTracker(this, "For each feed to find its similarities For feeds count " + feeds.Count.ToString()))
            {
                foreach (var fd in feeds)
                {
                    var compareList = (from ff in feeds where ff.Title != fd.Title select ff).ToList();



                    foreach (var compareStr in compareList)
                    {
                        //string key = string.Join(",", (new List<string> { fd.ID, compareStr.ID }).OrderBy(x=>x).ToList());

                        //if (dictProcessDedupeController.ContainsKey(key))
                        //    continue;
                        //dictProcessDedupeController.Add(key, true);

                        List<string> matchedKeywords = new List<string>();
                        //var similarity = NewsMine.Utilities.FuzzySearch.LevenshteinDistanceWordLevel(fd.Title, compareStr);
                        var similarity = SimilarityOfTwoStrings(fd.Title, compareStr.Title, out  matchedKeywords);


                        feedConnection.Add(new FeedConnection { Source = fd.ID, Destination = compareStr.ID, Score = Math.Pow(similarity, 1), Matchedkeywords = matchedKeywords });

                        //if (similarity < 1)
                        continue;
                        Response.Write("<b>" + fd.Title + "</b><br/>");
                        Response.Write("<table>");
                        Response.Write("<tr>");
                        Response.Write("<td><b>");
                        Response.Write(fd.Title);
                        Response.Write("</b></td>");
                        Response.Write("<td>");
                        Response.Write(compareStr.Title);
                        Response.Write("</td>");

                        Response.Write("<td>");
                        Response.Write(similarity.ToString());
                        Response.Write("</td>");


                        Response.Write("<td> keywords : ");
                        Response.Write(string.Join(",", matchedKeywords.ToArray()));
                        Response.Write("</td>");



                        Response.Write("</tr>");

                    }
                }

                Response.Write("</table>");
            }

            var matched = (from fc in feedConnection
                           group fc by string.Join(",", fc.Matchedkeywords)
                               into fcg
                               select new { Key = fcg.Key, Value = fcg.ToList() }
                           ).ToList();

            List<keyValue> listOfFinalKeyValues = new List<keyValue>();



            foreach (var item in matched)
            {
                var count = item.Value.Select(fc => fc.Source).ToList().Union(item.Value.Select(fc1 => fc1.Destination)).ToList().Count;
                listOfFinalKeyValues.Add(new keyValue { Key = item.Key, Value = count, NumberOfTokens = item.Key.Split(',').Length });
            }


            listOfFinalKeyValues = (from kv in listOfFinalKeyValues
                                    where !string.IsNullOrEmpty(kv.Key) && !string.IsNullOrEmpty(kv.Key.Trim())
                                    orderby kv.Value * kv.NumberOfTokens descending
                                    select kv).ToList();


            foreach (var item in listOfFinalKeyValues)
            {
                Response.Write("<br/> Keywords : " + item.Key + " - Count " + item.Value + " - Token count : " + item.NumberOfTokens.ToString() + " - Score :" + item.Value * item.NumberOfTokens);
            }

        }

        public static double SimilarityOfTwoStrings(string src, string dest, out List<string> matchedKeywords)
        {
            src = src.ToLower();
            dest = dest.ToLower();
            src = TempAnalyzer.CleanString(src, true, true, true, true);


            dest = TempAnalyzer.CleanString(dest, true, true, true, true);
            var srcArray = src.Split(' ');
            var destArray = dest.Split(' ');
            matchedKeywords = srcArray.Intersect(destArray).ToList();
            return matchedKeywords.Count;
            //return (srcArray.Intersect(destArray).Count() * 100) / Math.Max(srcArray.Length, destArray.Length);
        }
    }


    public class TimeTracker : IDisposable
    {
        DateTime startTime;
        DateTime endTime;
        string _message;
        Page _page;
        public TimeTracker(Page page, string message)
        {
            _page = page;
            _message = message;
            startTime = DateTime.UtcNow;
            page.Response.Write("<br/>" + message + " starting...<br/>");
        }


        public void Dispose()
        {
            endTime = DateTime.UtcNow;
            _page.Response.Write("<br/>" + _message + " end...");
            _page.Response.Write("<br/><b>Time taken : " + (endTime - startTime).TotalMilliseconds + " Milliseconds.</b>");

        }
    }

}