﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using NewsMine.DomainObjects;
using NewsMine.Storage;
using NewsMine.Utilities;

namespace NewsDiscoveryEngine
{
    // Note: this needs to be modified to do test against the main store.

    [Serializable]
    public class HtmlLinksDedupeAgainstMainHtmlFeedStore : BaseJob
    {
        public string FeedSourceUrl { get; set; }
        public List<HtmlLink> HtmlLinksAfterfirstroundOfDedupe { get; set; }

        public override bool ExecuteJob()
        {
            if (string.IsNullOrEmpty(FeedSourceUrl))
                throw new ApplicationException("FeedsourceUrl can't be null or empty for deduping");

            if (HtmlLinksAfterfirstroundOfDedupe == null)
            {
                return true;
            }

            var results = DedupeHtmlLinksAginstTitleAndLink(this.FeedSourceUrl, this.HtmlLinksAfterfirstroundOfDedupe);

            ProcessOutput(results);
            return true;
        }

        public static List<HtmlLink> DedupeHtmlLinksAginstTitleAndLink(string feedSourceUrl, List<HtmlLink> HtmlLinks)
        {
            var result =  DedupeHtmlLinksAgianstLink(feedSourceUrl, HtmlLinks);
            result = DedupeHtmlLinksAgainstTitle(feedSourceUrl, result);

            return result;
        }

        public static List<HtmlLink> DedupeHtmlLinksAgianstLink(string feedSourceUrl, List<HtmlLink> HtmlLinks)
        {
            if (HtmlLinks == null || HtmlLinks.Count == 0)
                return HtmlLinks;

           // TempLogger.LogCustom("DedupeVolume.txt", "Deduped links: " + HtmlLinks.Count.ToString());

            var htmlFeedStore = Defaults.UniqueHtmlFeedLinkStore;

            //if (htmlFeedStore.GetCount() == 0)
            //    return HtmlLinks;

            List<HtmlLink> resultedLinksAfterDedupe = new List<HtmlLink>();

            foreach (var htmlLink in HtmlLinks)
            {

                if (htmlFeedStore.IsKeyExists(NewsMine.Utilities.TempAnalyzer.RemoveAllSpecialCharacterForIndexing(htmlLink.Link).Replace(" ", string.Empty)) == false

                    && htmlFeedStore.IsKeyExists(htmlLink.Link) == false
                    
                    )
                    resultedLinksAfterDedupe.Add(htmlLink);
            }

            return resultedLinksAfterDedupe;

        }

        public static List<HtmlLink> DedupeHtmlLinksAgainstTitle(string feedSourceUrl, List<HtmlLink> HtmlLinks)
        {
            if (HtmlLinks == null || HtmlLinks.Count == 0)
                return HtmlLinks;

           // TempLogger.LogCustom("DedupeVolume.txt", "Deduped Titles: " + HtmlLinks.Count.ToString());


            var uniqueTitleStore = Defaults.UniqueTitleStore;

            //if (uniqueTitleStore.GetCount() == 0)
            //    return HtmlLinks;

            List<HtmlLink> resultedLinksAfterDedupe = new List<HtmlLink>();

            foreach (var htmlLink in HtmlLinks)
            {
                 if (string.IsNullOrEmpty(htmlLink.Title) || string.IsNullOrEmpty(htmlLink.Title.Trim()))
                     continue;

                 if (uniqueTitleStore.IsKeyExists(NewsMine.Utilities.TempAnalyzer.RemoveAllSpecialCharacterForIndexing(htmlLink.Title.Trim()).Replace(" ",string.Empty)) == false
                     && uniqueTitleStore.IsKeyExists(htmlLink.Title) == false
                     )
                     resultedLinksAfterDedupe.Add(htmlLink);
                 else
                 {
                     //todo: this needs to be removed after testing....
                     System.IO.File.AppendAllText(@"C:\DuplicatedTitles.txt", Environment.NewLine +  htmlLink.Title);
                 }
            }

            return resultedLinksAfterDedupe;

        }
    }
}
