﻿using PromoOffersCrawler.Crawler.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading.Tasks;
using PromoOffersCrawler.Crawler.ExtensionMethods;
using PromoOffersCrawler.Crawler.Utils;
using System.Collections.Concurrent;
using HtmlAgilityPack;

namespace PromoOffersCrawler.Crawler.Managers
{
    /// <summary>
    /// Main class used to implement the assignet task
    /// Реализирайте класификатор на оферти, който обхожда сайтове за оферти (например http://grabo.bg, http://grupovo.bg и др.) и записва намерените оферти в база от данни (таблица data със структура: name, description, price, link, validTo, type). Където type е типът на офертата (нека има възможност за типове: екскурзия, вечеря, друго). Класификаторът работи с минимум 2 сайта за оферти. Класифицираните данни да се представят таблично. (възможност за работа в екип от двама души)
    /// </summary>
    public class WebSiteCrawler
    {
        /// <summary>
        /// All pages that will be crawled.
        /// </summary>
        private ConcurrentDictionary<string, HtmlAnalyzer> crawlablePages = new ConcurrentDictionary<string, HtmlAnalyzer>();

        /// <summary>
        /// Offer details pages
        /// </summary>
        private ConcurrentDictionary<string, HtmlAnalyzer> detailsPagesToCrawl = new ConcurrentDictionary<string, HtmlAnalyzer>();

        /// <summary>
        /// Category names
        /// </summary>
        private ConcurrentDictionary<string, string> detailstToCategory = new ConcurrentDictionary<string, string>();

        /// <summary>
        /// Web site configuration of the site to crawl
        /// </summary>
        private WebSite site;

        /// <summary>
        /// Instance of report progress manager for easier and cleaner calculation of the overwall progress of the crawling of the site.
        /// </summary>
        private ProgressManager progressManager;

        /// <summary>
        /// Instance of a WebClient class used for persisting the web pages
        /// </summary>
        private WebClientCacheEnabled webClient;

        /// <summary>
        /// Fast key value pair collection of known facts
        /// </summary>
        private Dictionary<string, string> knowledge;

        /// <summary>
        /// Repository instance to be used by the instance of this class
        /// </summary>
        private Repository repository;

        /// <summary>
        /// Initialize new instance for a specific web site configuration
        /// </summary>
        /// <param name="site"></param>
        public WebSiteCrawler(WebSite site)
        {
            this.site = site;
            this.progressManager = new ProgressManager();
            webClient = new WebClientCacheEnabled(true);
            knowledge = site.WebSiteKknowledges.ToDictionary(x => x.Key, x => x.Value);
            repository = new Repository();
        }

        /// <summary>
        /// Returns current progress of the completion of the task
        /// </summary>
        /// <returns>Integer value from 0 to 100</returns>
        public int GetProgres()
        {
            return progressManager.CalculateOverwallProgress();
        }

        /// <summary>
        /// Main entry point for the crawl logic implementation.
        /// Starts async crawling process for the current web site configuration of the instance
        /// </summary>
        public void Crawl()
        {
            try
            {
                OnNewMessage("Start crawl for" + site.BaseUrl);
                OnNewMessage("Analyzing navigation on all pages");
 
                AnalyzePagesAndSubpagesNavigation(site.BaseUrl, "Generic").ToList();

                progressManager.InitializeTotalOperation(crawlablePages.Count * 2);
                OnNewMessage("Found" + crawlablePages.Count + "page with listing of details pages");
                OnNewMessage("Analyzing list of detail pages");

                Parallel.ForEach(crawlablePages, x => AnalyzideDetailsPagesNavigation(x.Value));
                OnNewMessage("Found " + detailsPagesToCrawl.Count + " detail pages");

                OnNewMessage("Gattering data from detail pages");
                var offersToInsert = detailsPagesToCrawl//.AsParallel()
                                                        .Where(x => IsForOfferDetail(x.Value.Url))
                                                        .Select(x => new { Analyzer = new OfferAnalyzer(x.Value, knowledge), OfferUrl = x.Key })
                                                        .Select(x => new Offer()
                                                                {
                                                                    Url = x.OfferUrl,
                                                                    Price = x.Analyzer.GetPrice(),
                                                                    Name = x.Analyzer.GetName(),
                                                                    Description = x.Analyzer.GetDescription(),
                                                                    TypeID = ExtractTypeId(x.OfferUrl)
                                                                });
                var offers = offersToInsert.ToList();
                repository.Insert(offers);
                OnNewMessage("Completed");
            }
            catch (AggregateException e)
            {
                foreach (var ex in e.InnerExceptions)
                {
                    OnNewMessage(ex.ToString());
                }
            }
            catch (Exception e)
            {
                OnNewMessage(e.ToString());
            }
        }

        private bool IsForOfferDetail(string url)
        {
            string action = knowledge[Constants.OfferDetailsAction];
            if (!string.IsNullOrWhiteSpace(action))
            {
                return url.Contains(action);
            }

            return true;
        }

        private int ExtractTypeId(string url)
        {
            return repository.GetTypeId(webClient.GetAnchorText(detailstToCategory[url]));
        }



        private void AnalyzideDetailsPagesNavigation(HtmlAnalyzer page)
        {
            List<HtmlNode> detailsPageUrlsList = new List<HtmlNode>();
            var detailsPageUrls = page.GetAllDescedantOf(knowledge[Constants.OffersListWrap], "a");
            if (detailsPageUrls != null)
            {
                detailsPageUrlsList.AddRange(detailsPageUrls);
            }
            else
            {
                detailsPageUrls = page.GetAllDescedantOf(knowledge[Constants.OffersListWrap], knowledge[Constants.OffersListElemenetPath]);
                if (detailsPageUrls != null)
                {
                    detailsPageUrlsList.AddRange(detailsPageUrls);
                }
                detailsPageUrls = page.GetAllDescedantOf(knowledge[Constants.OffersListWrap], knowledge[Constants.OffersListWrapTagName], knowledge[Constants.OffersListElemenetPath]);
                if (detailsPageUrls != null)
                {
                    detailsPageUrlsList.AddRange(detailsPageUrls);
                }
                detailsPageUrls = page.GetAllDescedantOf(knowledge[Constants.OffersListWrap], knowledge[Constants.OffersListWrapTagName], knowledge[Constants.OffersListElemenetPathSecondary]);
                if (detailsPageUrls != null)
                {
                    detailsPageUrlsList.AddRange(detailsPageUrls);
                }
            }

            Parallel.ForEach(detailsPageUrlsList, x =>
            {
                var url = x.Attributes.FirstOrDefault(y => y.Name == "href");
                if (url != null)
                {
                    detailsPagesToCrawl.GetOrAdd(url.Value, webClient.GetDownloadStringAsParsedHtml(url.Value, x.InnerText));
                    detailstToCategory[url.Value] = page.Url;
                }
            });
        }



        /// <summary>
        /// Analyzes page and returns all anchor that could be meaningfull to crawl. 
        /// For every page decided as point of interest recursivly is analyzed.
        /// </summary>
        /// <param name="url">The starting url</param>
        /// <returns>List of all urls that may be point of interest of the crawl process on the page and all subpages</returns>
        private ParallelQuery<string> AnalyzePagesAndSubpagesNavigation(string url, string urlText)
        {
            List<KeyValuePair<string, string>> currentLevelPagesToCrawl = new List<KeyValuePair<string, string>>();
            if (!crawlablePages.ContainsKey(url))
            {
                var currentPage = webClient.GetDownloadStringAsParsedHtml(url, urlText);

                crawlablePages.GetOrAdd(url, currentPage);

                var pagesToCrawl = GetCrawlPointOfInteresAnchorHrefs(currentPage);

                var distinct = pagesToCrawl.Where(x => !crawlablePages.ContainsKey(x.Key)).ToList();

                currentLevelPagesToCrawl.AddRange(distinct);
                distinct.ForEach(x => this.detailstToCategory[x.Key] = url);
            }

            ///Parallel recursive check for aditional navigation on every page
            return currentLevelPagesToCrawl.AsParallel().SelectMany(x => AnalyzePagesAndSubpagesNavigation(x.Key, x.Value));
        }

        /// <summary>
        /// Analyzes all anchors and take decision whatever the anchor is point of interest for the crawler
        /// </summary>
        /// <param name="html"></param>
        /// <returns></returns>
        private IEnumerable<KeyValuePair<string, string>> GetCrawlPointOfInteresAnchorHrefs(HtmlAnalyzer html)
        {
            if (knowledge.ContainsKey(Constants.NavigationWrap))
            {
                IEnumerable<HtmlNode> anchors;
                if (knowledge.ContainsKey(Constants.NavigationWrapElement))
                {
                    anchors = html.GetAllDescedantOf(knowledge[Constants.NavigationWrap], knowledge[Constants.NavigationWrapElement], "a");
                }
                else
                {
                    anchors = html.GetAllDescedantOf(knowledge[Constants.NavigationWrap], "a");
                }

                if (anchors == null)
                {
                    // No navigation on this page. Most probably about page of Grabo.bg :)
                    yield break;
                }

                foreach (var anchor in anchors)
                {
                    if (!knowledge.ContainsKey(Constants.KeywordsNegativeExample))
                    {
                        yield return new KeyValuePair<string, string>(anchor.Attributes["href"].Value, anchor.InnerText.RemoveDigits().RemoveHtml().Trim());
                    }
                    else
                    {
                        var keyWords = knowledge[Constants.KeywordsNegativeExample].Split(';');
                        string anchorText = anchor.InnerText.RemoveDigits().RemoveHtml().Trim();

                        if (!html.HasAttribute(anchor, "title", keyWords)
                            &&
                            !keyWords.Any(kw => anchorText.Contains(kw)))
                        {
                            yield return new KeyValuePair<string, string>(anchor.Attributes["href"].Value, anchorText);
                        }
                    }
                }
            }

            yield break;
        }

        private void OnNewMessage(string message)
        {
            if (NewMessage != null)
            {
                NewMessage(this, new NewMessageEventArgs() { Message = message });
            }
        }

        public int UniqueIdentifier { get; set; }

        public event EventHandler NewMessage; 
    }
}
