﻿using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using HtmlAgilityPack;
using xKnight.Data;
using xKnight.Models;
using xKnight.WebCrawling.Event;
using xKnight.WebCrawling.Core;

namespace xKnight.WebCrawling
{
    internal class CrawlerAgent
    {
        #region Delegates

        /// <summary>
        /// This Delegate is using for making async calls to Crawl method
        /// </summary>
        private delegate void CrawlAsyncCaller();
        
        #endregion

        #region Events
        
        /// <summary>
        /// this event will rise just after crawling get done.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        public delegate void CrawlAgentCompletedEventHandler(object sender, EventArgs e);
        public event CrawlAgentCompletedEventHandler CrawlAgentCompleted;
        
        /// <summary>
        /// This event will raise just before crawling get started.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        public delegate void CrawlAgentStartedEventHandler(object sender, EventArgs e);
        public event CrawlAgentStartedEventHandler CrawlAgentStarted;

        /// <summary>
        /// this event will raise to report about inner status of CrawlerAgent.
        /// </summary>
        public event xKnight.WebCrawling.Crawler.CrawlAnnouncedEventHandler CrawlAnnounced;

        #endregion

        #region Fields

        private readonly CrawlingSharedResource _sharedResource;

        #endregion

        #region Constructors

        /// <summary>
        /// 
        /// </summary>
        /// <param name="sharedResource"> used for sycnronizing between multiple agents</param>
        public CrawlerAgent(CrawlingSharedResource sharedResource)
        {
            _sharedResource = sharedResource;
        }

        #endregion

        #region Properties

        public CrawlingSharedResource CrawlingSharedResource
        {
            get { return _sharedResource; }
        }

        #endregion 

        #region Interface

        /// <summary>
        /// Crawl the host. download webpages, extract forms and links.
        /// </summary>
        public void Crawl()
        {        
            //Insert Host Address to queue as a first page
            AddDomainPageToQueue();

            Webpage page = null;

            while (!_sharedResource.StopCrawling && (page = GetNotVistedPage()) != null)
            {
                _sharedResource.IncrementNumberOfActiveAgents();

                DoCrawling(page);

                _sharedResource.DecrementNumberOfActiveAgents(); ;
            }
        }
        
        /// <summary>
        /// Make async call to Crawl()
        /// </summary>
        public void CrawlAsync()
        {
            CrawlAsyncCaller crawlAsyncCaller = new CrawlAsyncCaller(Crawl);
            AsyncCallback callback = new AsyncCallback(OnCrawlCompelete);

            OnCrawlAgentStarted();
            crawlAsyncCaller.BeginInvoke(callback,null);
        }

        #endregion

        #region Private Methods

        private void DoCrawling(Webpage page)
        {
            DownloadPage(page);

            if (page.Html != null)
            {
                Form[] forms = ExtractForms(page);

                AddFormsToPage(page, forms);

                DataLayer.Save(page);

                Webpage[] pages = ExtractLinks(page);

                AddPagesToQueue(pages);
            }
        }

        private void AddFormsToPage(Webpage page, Form[] forms)
        {
            lock (_sharedResource.SharedLock)
            {
                for (int i = 0; i < forms.Length; i++)
                {
                    string id = forms[i].GetId();
                    if (!FormHasVisitedBefore(id))
                    {
                        page.Forms.Add(forms[i]);
                        AddToVisitedFormList(id);
                    }
                }
            }
        }

        private void AddToVisitedFormList(string id)
        {
            _sharedResource.SharedFormHash.Add(id);
        }

        private bool FormHasVisitedBefore(string id)
        {
            return _sharedResource.SharedFormHash.Contains(id);
        }

        private void AddDomainPageToQueue()
        {
            lock (_sharedResource.SharedLock)
            {
                //Avoid Inserting multiple Domain page. becasue first step of each CrawlingAgent to begin
                //is to call AddDomainPageToQueue()
                if (!_sharedResource.SharedPageHash.Contains(_sharedResource.Host.HostName))
                {
                    Webpage page = new Webpage();
                    page.Depth = 0;
                    page.Url = _sharedResource.Host.HostName;
                    page.HostId = _sharedResource.Host.Id;

                    _sharedResource.AddTotalLinksFound(1);
                    _sharedResource.SharedPageHash.Add(page.Url);
                    _sharedResource.SharedQueue.Enqueue(page);
                }
            }
        }

        private void AddPagesToQueue(Webpage[] pages)
        {
            if (pages == null)
                return;

            lock (_sharedResource.SharedLock)
            {
                for (int i = 0; i < pages.Length; i++)
                {
                    // Just insert local links and not links form other domains.
                    if (IsWellFormedUri(pages[i].Url) && IsUrlLocal(pages[i].Url))
                    {
                        if (HasValidDepth(pages[i]) && !PageHasVisitedBefore(pages[i]))
                        {
                            _sharedResource.SharedPageHash.Add(pages[i].Url);
                            _sharedResource.SharedQueue.Enqueue(pages[i]);
                        }
                    }
                }
            }
        }

        private bool PageHasVisitedBefore(Webpage webpage)
        {
            return _sharedResource.SharedPageHash.Contains(webpage.Url);
        }

        private bool HasValidDepth(Webpage webpage)
        {
            return _sharedResource.CrawlerSetting.MaxDepth > webpage.Depth;
        }

        private bool IsUrlLocal(string url)
        {
            Uri uri = new Uri(url);

            string pagePath=uri.Host;
            if (pagePath.StartsWith("www."))
                pagePath = pagePath.Substring(4);

            Uri hostUri = new Uri(_sharedResource.Host.HostName);

            string hostPath=hostUri.Host;
            if (hostPath.StartsWith("www."))
                hostPath = hostPath.Substring(4);

            if (_sharedResource.CrawlerSetting.CrawlSubDomains)
                return pagePath.EndsWith(hostPath);

            return pagePath == hostPath;
        }

        private bool IsWellFormedUri(string uri)
        {
            return Uri.IsWellFormedUriString(uri, UriKind.Absolute);
        }

        /// <summary>
        /// returns a page which has not visted yet
        /// </summary>
        /// <returns>Not visted page, if all pages where visited returns null</returns>
        private Webpage GetNotVistedPage()
        {
            while (_sharedResource.SharedQueue.Count == 0 && _sharedResource.HasActiveAgent()) Thread.Sleep(1000) ;

            Webpage page = null;
            lock (_sharedResource.SharedLock)
            {
                page = _sharedResource.SharedQueue.Count != 0 ? _sharedResource.SharedQueue.Dequeue() : null;
            }

            return page;

        }

        /// <summary>
        /// Downloads  contents and save it
        /// </summary>
        /// <param name="page"></param>
        private void DownloadPage(Webpage page)
        {
            page.Html = null;

            CrawlAnnounceItem item = new CrawlAnnounceItem(page, WebCrawlingStatus.DownloadingStarted, null, DateTime.Now, _sharedResource);
            OnCrawlAnnounced(item);

            try
            {

                var result = WebCrawler.DownloadPage(page.Url,CrawlingSharedResource.Cookies);

                var crawlStatus = result.WebCrawlingStatus;
                page.Html = result.ResponseText;
                page.DateTime = DateTime.Now;

                if (crawlStatus == WebCrawlingStatus.DownloadingFinished)
                {
                    item = new CrawlAnnounceItem(page, WebCrawlingStatus.DownloadingFinished, null, DateTime.Now, _sharedResource);
                    OnCrawlAnnounced(item);
                }
                else if (crawlStatus == WebCrawlingStatus.DownloadingHaltedNoText)
                {
                    item = new CrawlAnnounceItem(page, WebCrawlingStatus.DownloadingHaltedNoText, "این آدرس محتوی متن نمی باشد.", DateTime.Now, _sharedResource);
                    OnCrawlAnnounced(item);
                }
                else if (crawlStatus == WebCrawlingStatus.DownloadingHaltedIncorrectResponse)
                {
                    item = new CrawlAnnounceItem(page, WebCrawlingStatus.DownloadingHaltedIncorrectResponse, "خطایی در حین بارگذاری صفحه رخ داد", DateTime.Now, _sharedResource);
                    OnCrawlAnnounced(item);
                }
            }
            catch (WebException ex)
            {
                HttpWebResponse response = ex.Response as HttpWebResponse;

                if (response != null)
                    item = new CrawlAnnounceItem(page, WebCrawlingStatus.DownloadingHalted, response.StatusCode + " " + response.StatusDescription + "خطایی در حین بارگذاری صفحه رخ داد", DateTime.Now, _sharedResource);
                else
                    item = new CrawlAnnounceItem(page, WebCrawlingStatus.DownloadingHalted, "خطایی در حین بارگذاری صفحه رخ داد", DateTime.Now, _sharedResource);

                OnCrawlAnnounced(item);
            }
            catch
            {
                item = new CrawlAnnounceItem(page, WebCrawlingStatus.DownloadingHalted, "خطایی در حین بارگذاری صفحه رخ داد", DateTime.Now, _sharedResource);
                OnCrawlAnnounced(item);
            }

            _sharedResource.IncrementTotalPagesGetDownloaded();
        }

        private Webpage[] ExtractLinks(Webpage parent)
        {
            CrawlAnnounceItem item = new CrawlAnnounceItem(parent, WebCrawlingStatus.ExtractingLinksStarted, null, DateTime.Now, _sharedResource);
            OnCrawlAnnounced(item);

            string[]  links=WebCrawler.ExtractLinks(parent.Url,parent.Html);

            List<Webpage> webpages = new List<Webpage>();

            for (int i = 0; i < links.Length; i++)
            {
                webpages.Add(new Webpage()
                {
                    Url = links[i],
                    Depth = parent.Depth + 1,
                    HostId = parent.HostId,
                    RefererId = parent.Id
                });
            }

            _sharedResource.AddTotalLinksFound(links.Length);

            item = new CrawlAnnounceItem(parent, WebCrawlingStatus.ExtractingLinksFinished, string.Format("این صفحه دارای {0} لینک می باشد.", links.Length), DateTime.Now, _sharedResource);
            OnCrawlAnnounced(item);

            return webpages.ToArray();
        }

        private Form[] ExtractForms(Webpage page)
        {
            CrawlAnnounceItem item = new CrawlAnnounceItem(page, WebCrawlingStatus.ExtractingFormsStarted
                , null, DateTime.Now, _sharedResource);
            OnCrawlAnnounced(item);

            Form[] forms = WebCrawler.ExtractForms(page.Url,page.Html);
            _sharedResource.AddTotalFormsFound(forms.Length);

            item = new CrawlAnnounceItem(page, WebCrawlingStatus.ExtractingFormsFinished,
                string.Format("این صفحه دارای {0} فرم می باشد.", forms.Length), DateTime.Now, _sharedResource);
            OnCrawlAnnounced(item);

            return forms;
        }

        #endregion

        #region EventHandlers

        private void OnCrawlCompelete(IAsyncResult result)
        {
            OnCrawlCompeleted();
        }

        private void OnCrawlAgentStarted()
        {
            if (CrawlAgentStarted != null)
                CrawlAgentStarted(this, new EventArgs());
        } 

        private void OnCrawlCompeleted()
        {
            if (CrawlAgentCompleted != null)
                CrawlAgentCompleted(this, new EventArgs());
        }

        private void OnCrawlAnnounced(CrawlAnnounceItem item)
        {
            if (CrawlAnnounced != null)
            {
                CrawlAnnounced(this, new CrawlAnnouncedEventArgs(item));
            }
        }

        #endregion
    }
}
