﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using NewsMine.DomainObjects;
using NewsMine.Storage;
using LuceneWrap.Lib;
using Lucene.Net.Search;
using NewsMine.Storage.SQLite;
using System.IO;
using NewsDiscoveryEngine.Index;
using NewsDiscoveryEngine.Jobs;
using NewsMine.Utilities;
using Lucene.Net.Documents;

namespace NewsDiscoveryEngine
{
    public class FeedIndexer
    {
        IndexStoreManagerOnFileSystem indexStoreMgr = new IndexStoreManagerOnFileSystem();
        static LuceneWrap.Lib.LuceneManager<FeedItem> luceneManager;
        private static FeedIndexer feedIndexer;
        private LuceneWrap.Lib.LuceneSearcher searcher;
        string indexFolder;
        string folder;
        string folderForReIndexing;
        string stagingFolder;

        string sqliteFeedStoreFilePath;
        string sqliteFeedStoreFolder;
        SQLiteFeedStoreHelper sqliteFeedStoreHelper;

        string indexConfigurationFilePath = string.Empty;

        private FeedIndexer()
        {
            indexStoreMgr.CreateIndexStore(IndexerConstants.indexFolderName);

            folder = indexStoreMgr.GetIndexStoreFolderByName(IndexerConstants.indexFolderName);

            //this file is useful to record the active index and other stuff, and also this will be communication to seachers.
            indexConfigurationFilePath = Path.Combine(folder, IndexerConstants.indexConfigurationFileName);

            folderForReIndexing = Path.Combine(folder, "ReIndexStaging");

            //make sure we create the folderForReIndexing
            if (Directory.Exists(folderForReIndexing) == false)
            {
                Directory.CreateDirectory(folderForReIndexing);
            }



            indexFolder = folder;

            sqliteFeedStoreFolder = Path.Combine(folder + "SqliteFeedStore");
            sqliteFeedStoreFilePath = Path.Combine(folder + "\\SqliteFeedStore\\sqliteFeedStore.db");
            sqliteFeedStoreHelper = new SQLiteFeedStoreHelper(sqliteFeedStoreFilePath);

            luceneManager = new LuceneWrap.Lib.LuceneManager<FeedItem>(folder);

            luceneManager.FinalizeWriter(false);

            searcher = new LuceneWrap.Lib.LuceneSearcher(folder);

        }

        [Obsolete("only for maintainance task")]
        public LuceneManager<FeedItem> GetLuceneManager()
        {
            return luceneManager;
        }

        public static FeedIndexer Instance
        {
            get
            {
                if (feedIndexer == null)
                    feedIndexer = new FeedIndexer();

                return feedIndexer;
            }
        }

        //todo: fault handling.. what if the indexwriter already working not able to open it.
        public void IndexFeedItem(FeedItem feedItem)
        {

            using (FolderSyncLocker syncLocker = new FolderSyncLocker(indexFolder))
            {

                feedItem = sqliteFeedStoreHelper.AddFeedToStore(feedItem);

                if (NewsMine.Utilities.Extensions.IsNumeric(feedItem.ID) == true)
                {
                    //feedItem.ID = "ID" + feedItem.ID;
                    feedItem.ID = feedItem.ID;
                }

                feedItem.Sanitize();

                MakeSureImageProcessed(feedItem);

                luceneManager.Create();

                //making sure that the existing item with the same is got deleted from the system.
                luceneManager.DeleteObjectFromIndexByQuery("id:" + feedItem.ID.ToString());

                luceneManager.AddItemToIndex(feedItem);
                luceneManager.FinalizeWriter(true);
            }
        }

        //todo: fault handling.. what if the indexwriter already working not able to open it.
        public void IndexFeedItems(List<FeedItem> feedItems)
        {
            using (FolderSyncLocker syncLocker = new FolderSyncLocker(indexFolder))
            {

                feedItems = sqliteFeedStoreHelper.AddFeedToStore(feedItems);

                for (int i = 0; i < feedItems.Count; i++)
                {
                    var feedItem = feedItems[i];

                    if (NewsMine.Utilities.Extensions.IsNumeric(feedItem.ID) == true)
                    {
                        //feedItem.ID = "ID" + feedItem.ID;
                        feedItem.ID = feedItem.ID;
                    }
                    feedItem.Sanitize();
                    MakeSureImageProcessed(feedItem);
                }

                luceneManager.Create();

                for (int i = 0; i < feedItems.Count; i++)
                {
                    luceneManager.DeleteObjectFromIndexByQuery("id:" + feedItems[i].ID.ToString());
                }

                //todo: instead of sending feeds to the lucecemanager, we do prepare document date and send it. So, we have more control on indexing.

                List<Document> analyzedLuceneDocuments = AnalyzeFeedsForIndexing(feedItems);
                //work in progress.... todo:

                //luceneManager.AddItemsToIndex(feedItems);
                luceneManager.AddDocumentsToIndex(analyzedLuceneDocuments);
                luceneManager.FinalizeWriter(true);
            }
        }

        #region Analyzer and document preperation.

        private List<Document> AnalyzeFeedsForIndexing(List<FeedItem> feedItems)
        {
            List<Document> analyzedLuceneDocuments = new List<Document>();

            foreach (var feed in feedItems)
            {
                analyzedLuceneDocuments.Add(AnalyzeFeedForIndexing(feed));
            }

            return analyzedLuceneDocuments;

        }

        private Document AnalyzeFeedForIndexing(FeedItem feed)
        {
            Document doc = new Document();

           
            #region FeedItem's index Fields

            //feed.ID
            if (!string.IsNullOrEmpty(feed.ID))
            {
                doc.Add(new Field("id", feed.ID.ToLower(), Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
            }

            //feed.Title
            if (!string.IsNullOrEmpty(feed.Title))
            {
                doc.Add(new Field("title", TempAnalyzer.CleanString(feed.Title.ToLower(), true, false, false), Field.Store.NO, Field.Index.ANALYZED));
                //doc.Add(new Field("title1", TempAnalyzer.RemoveAllSpecialCharacterForIndexing(feed.Title.ToLower()), Field.Store.NO, Field.Index.NOT_ANALYZED));
            }

            //feed.Link
            if (!string.IsNullOrEmpty(feed.Link))
            {
                string cleanedLink = TempAnalyzer.CleanString(feed.Link.ToLower(), true, true, true, false);

                
                doc.Add(new Field("link",cleanedLink , Field.Store.NO, Field.Index.ANALYZED_NO_NORMS));

                string titleAndLink = TempAnalyzer.CleanString(feed.Title.ToLower() + " " + feed.Link);

                titleAndLink = TempAnalyzer.RemoveNoiceWordsFromString(titleAndLink);

                titleAndLink = String.Join(" ", titleAndLink.Split(' ').Distinct());

                doc.Add(new Field("titlelink", titleAndLink, Field.Store.NO, Field.Index.ANALYZED_NO_NORMS));
            }

            //feed.Description
            if (!string.IsNullOrEmpty(feed.Description))
            {
                doc.Add(new Field("description", TempAnalyzer.CleanString(feed.Description.ToLower(), true, false, false), Field.Store.NO, Field.Index.ANALYZED));
                //doc.Add(new Field("description1", feed.Description.ToLower(), Field.Store.NO, Field.Index.ANALYZED));
            }

            //feed.category
            var cats = feed.category.Split(',').ToArray();
            foreach (var cat in cats)
            {
                doc.Add(new Field("category", feed.category.ToLower(), Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
            }

            //feed.Domain
            if (!string.IsNullOrEmpty(feed.Domain))
            {
                doc.Add(new Field("domain", feed.Domain.ToLower(), Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
                //doc.Add(new Field("domain1", feed.Domain.ToLower().Replace('.', ' '), Field.Store.NO, Field.Index.ANALYZED));
            }

            //feed.Language
            if (!string.IsNullOrEmpty(feed.Language))
            {
                doc.Add(new Field("language", feed.Language.ToLower(), Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
            }

         

            //feed.RefUrl

            if (!string.IsNullOrEmpty(feed.RefUrl))
            {
                doc.Add(new Field("refurl", TempAnalyzer.CleanString(feed.RefUrl.ToLower(),true,true,true,false), Field.Store.NO, Field.Index.ANALYZED_NO_NORMS));
            }



            //feed.ImageUrl
            if (!string.IsNullOrEmpty(feed.ImageUrl))
            {
                doc.Add(new Field("imageurl", TempAnalyzer.CleanString(feed.ImageUrl.ToLower(),true,true,true,false), Field.Store.NO, Field.Index.ANALYZED_NO_NORMS));
            }

            //feed.PubDate

            doc.Add(new Field("pubdate",
               DateTools.DateToString(feed.PubDate, DateTools.Resolution.HOUR),
               Field.Store.NO, Field.Index.NOT_ANALYZED));

            ////this field value is to map the news agaisnt indian time zone.
            //doc.Add(new Field("pubdate_ist",
            //    DateTools.DateToString(feed.PubDate.AddHours(5).AddMinutes(30), DateTools.Resolution.DAY),
            //    Field.Store.NO, Field.Index.NOT_ANALYZED));

            //doc.Add(new Field("pubdate_d",
            //      DateTools.DateToString(feed.PubDate, DateTools.Resolution.DAY),
            //      Field.Store.YES, Field.Index.NOT_ANALYZED));

            //feed.Tags
            if (feed.Tags != null && feed.Tags.Count > 0)
            {
                foreach (var tg in feed.Tags)
                {
                    if (string.IsNullOrEmpty(tg))
                        continue;
                    doc.Add(new Field("tags", tg.ToLower(), Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
                }
            }

            try
            {
                CategorizeFeedBySupervisorDataAnalytics(doc, feed);
            }
            catch (Exception ex)
            {
                TempLogger.Log("CategorizationError while categorizing feed from analytics : " + Environment.NewLine + Environment.StackTrace, ex);
            }
            #endregion

            #region un indexed fields in FeedItem class


            // UN INDEXED FIELDS NEED TO BE REVISITED...

            //feed.Author
            //doc.Add(new Field("author", feed.Author.ToLower(), Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
            //doc.Add(new Field("author1", TempAnalyzer.RemoveAllSpecialCharacterForIndexing(feed.Author.ToLower()), Field.Store.YES, Field.Index.ANALYZED));

            //feed.Body

            //feed.ImageHeight
            //feed.ImageUrl
            //feed.ImageWidth
            //feed.Keyworkds
            //feed.LastTouchTime"
            //feed.Rank

            //feed.ThumbImageUrl
            //feed.UpdatedDateTime
            #endregion


            //to support current design ... this needs to be changed. for this to avoid we need to have changes to the seacher not to collect this binary field.
            #region Storing Object data as a field inside the index itself.

            Field clrTypeField = new Field(CommonConstants.ClrObjectTypeName, feed.GetType().FullName.ToLower(), Field.Store.NO, Field.Index.NOT_ANALYZED);
            doc.Add(clrTypeField);

            Field isProtoBuffedField = new Field(CommonConstants.isProtoBuffed, "1", Field.Store.YES, Field.Index.NO);
            doc.Add(isProtoBuffedField);

            Field entireObjectToPersist = new Field(CommonConstants.ObjectBinaryFieldName, ObjectByteArrayConverter.ObjectToByteArrayProtoBuff<Feed>(feed), Field.Store.YES);
            //Field entireObjectToPersist = new Field(CommonConstants.ObjectBinaryFieldName, ObjectByteArrayConverter.ObjectToByteArray(feed), Field.Store.YES);
            doc.Add(entireObjectToPersist);

            #endregion


            return doc;

        }

        FacetReader facetReader = new FacetReader();

        private void CategorizeFeedBySupervisorDataAnalytics(Document doc, FeedItem feed)
        {

            if (feed == null)
                return;

            string testinput = feed.Title + " " + feed.Link;

            Dictionary<string, List<categoryRank>> deepAnalysisDetails = null;

            var analysisStats = facetReader.GetAnalyticalStatsForString(testinput, out deepAnalysisDetails);

            //Response.Write(testinput);
            //Response.Write("<br/>");

            analysisStats = (from stat in analysisStats
                             orderby stat.GlobalPercent descending
                             select stat).ToList();

            if (analysisStats.Where(x => x.Name != "*").Count() == 0)
            {
                //Response.Write("No category matched.");
                return;
            }

            double maxGP = analysisStats.Where(x => x.Name != "*").Max(x => x.GlobalPercent);
            double totalGP = analysisStats.Where(x => x.Name == "*").First().GlobalPercent;
            int totalNumberOfItems = analysisStats.Count - 1; // because we need to take out the item for global stuff "*".

            int ratioAgainstMaxPercentagThreshold = 25;
            int overallPercentageThreashold = 25;

            if (feed.AnalysedTags == null) feed.AnalysedTags = new List<string>();

            foreach (var item in analysisStats)
            {
                if (item.Name == "*")
                    continue;

                var overallP = item.GlobalPercent * 100 / totalGP;
                var ratioAgainstMaxP = item.GlobalPercent * 100 / maxGP;

               // Response.Write(item.Name + " - " + item.GlobalPercent + "  -  " + overallP + " %" + "  -  " + ratioAgainstMaxP + " %");

                if (item.GlobalPercent >= 100 || (ratioAgainstMaxP > ratioAgainstMaxPercentagThreshold && overallP > overallPercentageThreashold))
                {
                   // Response.Write("   -   OK");
                    var f = new Field("atag", item.Name.ToLower(), Field.Store.NO, Field.Index.ANALYZED_NO_NORMS);

                    feed.AnalysedTags.Add(item.Name.ToLower());

                    //f.SetBoost((float)item.GlobalPercent);

                    doc.Add(f);

                }
                else
                {
                    // Response.Write("   -   NOT OK");
                }

                //Response.Write("<br/>");
            }

        }

        private Fieldable GetStringField(string name, string value, Field.Store storeOption, Field.Index indexOption, bool removeStopSpecialChars = false)
        {
            if (removeStopSpecialChars == true)
            {
                new Field(name.ToLower(), TempAnalyzer.RemoveAllSpecialCharacterForIndexing(value), Field.Store.YES, Field.Index.ANALYZED);
            }
            else
            {
                new Field(name.ToLower(), value, Field.Store.YES, Field.Index.ANALYZED);
            }

            return null;
        }

        private Fieldable GetNumberField(string name, object value, Field.Store storeOption, Field.Index indexOption)
        {

            long number = Convert.ToInt64(value);

            //NumericField numericField = new NumericField(propertyInfo.Name, 1, Field.Store.NO, true);
            //numericField.SetLongValue(number);
            //fields.Add(numericField);

            if (indexOption == Field.Index.ANALYZED) ;
            {
                NumericField nf = new NumericField(name.ToLower(), 0, storeOption, true);
                nf.SetLongValue(number);
                return nf;
            }
            {
                NumericField nf = new NumericField(name.ToLower(), 0, storeOption, false);
                nf.SetLongValue(number);
                return nf;
            }

        }


        #endregion

        private void MakeSureImageProcessed(FeedItem feedItem)
        {
            //CalculateFeedImageSizeJob.CalculateImageSizeForFeedImage(feedItem);

            //for now we will hold this... 
            //todo: Most probably we should be able get size in the downloader scope itself.
            //return;

            if (feedItem.ImageWidth > 0)
                return;

            bool isDetailedFeedProcessRequired = Convert.ToBoolean(NewsMine.Configuration.ConfigurationManager.Get("isDetailedFeedProcessRequired"));

            if (isDetailedFeedProcessRequired == false)
                return;

            if (!string.IsNullOrEmpty(feedItem.ImageUrl) && feedItem.ImageUrl.ToLower().StartsWith("http"))
            {
                object storedSizeObject = Defaults.ImageSizeInfoStore.Get(feedItem.ImageUrl);

                if (storedSizeObject != null && storedSizeObject is Size)
                {
                    Size imageSize = new Size();

                    imageSize = (Size)storedSizeObject;

                    if (imageSize.IsEmpty == false)
                    {
                        feedItem.ImageWidth = imageSize.Width;
                        feedItem.ImageHeight = imageSize.Height;
                    }
                    else
                    {
                        Defaults.ImageSizeCalculationQueue.Enqueue(feedItem);
                    }
                }
                else
                    Defaults.ImageSizeCalculationQueue.Enqueue(feedItem);
            }
        }

        public void UpdateIndexWithFeedItems(List<FeedItem> feedItems)
        {
            using (FolderSyncLocker syncLocker = new FolderSyncLocker(indexFolder))
            {
                luceneManager.Create();

                for (int i = 0; i < feedItems.Count; i++)
                {
                    luceneManager.DeleteObjectFromIndexByQuery("id:" + feedItems[i].ID.ToString());
                }

                luceneManager.AddItemsToIndex(feedItems);
                luceneManager.FinalizeWriter(true);
            }
        }

        public void DeleteFeedItemsByQuery(string query)
        {
            using (FolderSyncLocker syncLocker = new FolderSyncLocker(indexFolder))
            {
                luceneManager.Create();

                luceneManager.DeleteObjectFromIndexByQuery(query);

                luceneManager.FinalizeWriter(true);
            }
        }

        public void DeleteFeedItemsByID(long id)
        {
            using (FolderSyncLocker syncLocker = new FolderSyncLocker(indexFolder))
            {
                luceneManager.Create();

                luceneManager.DeleteObjectFromIndexByQuery("id:" + id.ToString());

                luceneManager.FinalizeWriter(true);
            }
        }

        public void DeleteFeedItemsByDomain(string domain)
        {
            if (string.IsNullOrEmpty(domain))
                return;

            using (FolderSyncLocker syncLocker = new FolderSyncLocker(indexFolder))
            {
                luceneManager.Create();

                luceneManager.DeleteObjectFromIndexByQuery("domain:" + domain.ToLower().ToString());

                luceneManager.FinalizeWriter(true);
            }
        }


        #region Re-indexing stuff

        public void OptimizeIndex()
        {

            var luceneManagerForReIndexing = luceneManager;

            luceneManager.Create();

            luceneManager.IndexWriter.Optimize(true);

            luceneManager.FinalizeWriter(false);

        }


        public void ReindexFromSqliteStore()
        {
            luceneManager.Create();

            int recsPerPage = 500;
            int totalRecs = 0;

            List<FeedItem> feeds = new List<FeedItem>();
            bool isNextSetResultsAvailable = false;

            SQLiteFeedStoreHelper sqliteFeedStore = sqliteFeedStoreHelper;

            long startId = 0;
            long nextStartId = 0;

            int retrivedCount = 0;

            try
            {
                luceneManager.DeleteObjectFromIndexByQuery("*:*");

                do
                {
                    feeds = sqliteFeedStore.GetAllByPage(startId, recsPerPage, out nextStartId);
                    retrivedCount = retrivedCount + feeds.Count;

                    if (feeds.Count > 0)
                    {
                        for (int i = 0; i < feeds.Count; i++)
                        {
                            feeds[i].Sanitize();
                            //feeds[i].Title = "r: " + feeds[i].Title;
                        }

                        var docs = AnalyzeFeedsForIndexing(feeds);

                        luceneManager.AddDocumentsToIndex(docs);

                        //luceneManagerForReIndexing.AddItemsToIndex(feeds);
                    }

                    startId = nextStartId;

                    if (feeds.Count > 0)
                    {
                        isNextSetResultsAvailable = true;
                    }
                    else
                        isNextSetResultsAvailable = false;
                }
                while (isNextSetResultsAvailable);

                luceneManager.FinalizeWriter(true);
            }
            catch (Exception ex)
            {
                luceneManager.IndexWriter.Rollback();
                throw;
            }
        }

        public void DoSelfReIndex()
        {
            DateTime start = DateTime.UtcNow;

            bool isNextSetResultsAvailable = false;
            int docIndexCounter =0;
            FeedItem tempFeedItem = null;

            luceneManager.Create();

            var indexReader = luceneManager.IndexWriter.GetReader();
            int maxDoc = indexReader.MaxDoc();

            try
            {
                luceneManager.DeleteObjectFromIndexByQuery("*:*");

                do
                {
                    //LogReIndexStatus("above read document at index " + docIndexCounter);
                    var d = indexReader.Document(docIndexCounter);

                    tempFeedItem = null;

                    tempFeedItem = LuceneReflection.GetObjFromDocument<FeedItem>(d);

                    var newDoc = AnalyzeFeedForIndexing(tempFeedItem);

                    luceneManager.IndexWriter.AddDocument(newDoc);

                    docIndexCounter = docIndexCounter + 1;

                    if (docIndexCounter >= maxDoc)
                        isNextSetResultsAvailable = false;
                    else
                        isNextSetResultsAvailable = true;

                    //LogReIndexStatus("Status : inprogress " + maxDoc.ToString() + " is total; " + docIndexCounter.ToString() +  " is the running count; start time " + start.ToString () + " took around " + (DateTime.UtcNow - start).TotalSeconds.ToString() + " seconds");

                }
                while (isNextSetResultsAvailable);
                luceneManager.FinalizeWriter(true);

                DateTime endTime = DateTime.UtcNow;

                LogReIndexStatus("Time take for reindex " + (endTime - start).TotalSeconds.ToString() + " seconds; and number of docs are : " + maxDoc.ToString());

            }
            catch (Exception ex)
            {
                TempLogger.Log("Error while reindexing" + Environment.NewLine + Environment.StackTrace, ex);
                luceneManager.IndexWriter.Rollback();

                throw;
            }

        }


        #endregion


        private void LogReIndexStatus(string status)
        {
            TempLogger.LogCustom("reIndexingStatus.txt", status);
        }


    }
}
