﻿using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading;
using HtmlAgilityPack;
using xKnight.Data;
using xKnight.Models;
using xKnight.WebCrawling.Event;
using xKnight.WebCrawling.Models;
using xKnight.WebCrawling.Core.Authentication;

namespace xKnight.WebCrawling
{
    public class Crawler
    {

        #region Events
        /// <summary>
        /// This event will rise after crawling for all of the host get done.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        public delegate void CrawlCompletedEventHandler(object sender, CrawlCompletedEventArgs e);
        public event CrawlCompletedEventHandler CrawlCompleted;

        /// <summary>
        /// This event will rise after crawling for all of the host get started.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        public delegate void CrawlStartedEventHandler(object sender, CrawlStartedEventArgs e);
        public event CrawlStartedEventHandler CrawlStarted;

        /// <summary>
        /// This event will will report to the subscriber about inner events and jobs.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        public delegate void CrawlAnnouncedEventHandler(object sender, CrawlAnnouncedEventArgs e);
        public event CrawlAnnouncedEventHandler CrawlAnnounced;

        #endregion

        #region Fields

        private readonly object _lock = new object(); // used for managing shared resources in critical sections
        private readonly WebCrawlingSetting _crawlSetting;
        private readonly Dictionary<string, int> _aliveAgentsDic = new Dictionary<string, int>(); // determine number of currently alive threads for each host
        readonly Dictionary<Host,CrawlerAgent[]> _agentsDic=new Dictionary<Host,CrawlerAgent[]>(); // maps each host to it`s crawler agents
        private WebCrawlingTarget[] _targets;
        private CrawlerAgent[] _agents;
        private CrawlingSharedResource _sharedResource;

        #endregion
        
        #region Constructors

        /// <summary>
        /// Construct a new instance of Crawler class
        /// </summary>
        /// <param name="crawlerSetting">Settings for this job</param>
        /// <param name="host">the hosts to do crawling on them</param>
        /// <param name="agentsPerHost">number of agents for crawling the specified host</param>
        public Crawler(WebCrawlingSetting crawlerSetting, WebCrawlingTarget[] targets)
        {
            if (crawlerSetting ==null)
            {
                throw new ArgumentNullException(string.Format("null value for CrawlerSetting is not acceptable : {0}", crawlerSetting));
            }

            for (int i = 0; i < targets.Length; i++)
            {
                if (!Uri.IsWellFormedUriString(targets[i].TargetUrl, UriKind.Absolute))
                {
                    throw new InvalidDataException(string.Format("invalid host name : {0}", targets[i].TargetUrl));
                }
            }

            _crawlSetting = crawlerSetting;
            _targets = targets;
        }

        #endregion

        #region Properties

        /// <summary>
        /// The Setting which Crawling operation is based on it
        /// </summary>
        public WebCrawlingSetting CrawlSetting
        {
            get { return _crawlSetting; }
        }

        /// <summary>
        /// Hosts to crawl
        /// </summary>
        public WebCrawlingTarget[] WebCrawlingTargets
        {
            get { return _targets; }
        }

        #endregion
            
        #region Interface

        /// <summary>
        /// Starts Crawling by creating appropriate number of agents
        /// </summary>
        public void Crawl()
        {
            _crawlSetting.StartTime = DateTime.Now;
            DataLayer.Save(_crawlSetting);

            for (int i = 0; i < _targets.Length; i++)
            {
                _sharedResource = MakeSharedResourceForCrawlAgents(_targets[i]);
                 _agents = new CrawlerAgent[_targets[i].CrawlerThreadCount];

                 Host host = _sharedResource.Host;

                _aliveAgentsDic.Add(host.HostName, 0);
                _agentsDic.Add(host, _agents);

                host.StartTime = DateTime.Now;
                host.CrawlId = _crawlSetting.Id;

                DataLayer.Save(host);
                DataLayer.Save(host, _sharedResource.Cookies);

                for (int j = 0; j < _agents.Length; j++)
                {
                    _agents[j] = new CrawlerAgent(_sharedResource);

                    _agents[j].CrawlAgentCompleted += AgentCrawlAgentCompleted;
                    _agents[j].CrawlAnnounced += AgentCrawlAnnounced;
                    _agents[j].CrawlAgentStarted += Crawler_CrawlAgentStarted;

                    _agents[j].CrawlAsync();
                }
            }
        }

        public void Stop()
        {
            _sharedResource.StopCrawling = true;
        }

        #endregion

        #region Event Raiser

        private void OnCrawlStarted(Crawler crawler)
        {
            if (CrawlStarted != null)
            {
                CrawlStarted(this, new CrawlStartedEventArgs(crawler._crawlSetting.Id,DateTime.Now));
            }
        }

        private void OnCrawlCompleted(Crawler crawler)
        {
            if (CrawlCompleted != null)
            {
                CrawlCompleted(this, new CrawlCompletedEventArgs(crawler._crawlSetting.Id, DateTime.Now));
            }
        }

        private void OnCrawlAnnounced(CrawlAnnounceItem item)
        {
            if (CrawlAnnounced != null)
            {
                CrawlAnnounced(this, new CrawlAnnouncedEventArgs(item));
            }
        }

        #endregion

        #region Event Handlers

        void AgentCrawlAnnounced(object sender, CrawlAnnouncedEventArgs e)
        {
            OnCrawlAnnounced(e.CrawlAnnounceItem);
        }

        void AgentCrawlAgentCompleted(object sender, EventArgs e)
        {
            CrawlerAgent agent = sender as CrawlerAgent;
            Host host = agent.CrawlingSharedResource.Host;
            
            lock (_lock)
            {
                DecrementNumberOfAgents(host.HostName);

                if (!HasAliveAgents(host.HostName))
                {
                    MarkHostAsCrawled(host);

                    if (!HasUncrawledHost())
                    {
                        MarkCrawlAsDone();
                    }
                }
            }
        }        

        void Crawler_CrawlAgentStarted(object sender, EventArgs e)
        {
            CrawlerAgent agent = sender as CrawlerAgent;
            Host host = agent.CrawlingSharedResource.Host;

            lock (_lock)
            {
                IncrementNumberOfAgents(host.HostName);
                
                if (IsCrawlingForAllHostsStarted())
                {
                    OnCrawlStarted(this);
                }
            }
        }

        #endregion

        #region Private Methods

        private CrawlingSharedResource MakeSharedResourceForCrawlAgents(WebCrawlingTarget target)
        {

            Host host = new Host() { HostName = target.TargetUrl };
            Queue<Webpage> sharedQueue = new Queue<Webpage>();
            object sharedLock = new object();
            HashSet<string> sharedPageHash = new HashSet<string>();
            HashSet<string> sharedFormHash = new HashSet<string>();

            Cookie[] cookies =  Authenticator.Authenticate(target.AuthenticationSetting);

            return new CrawlingSharedResource(_crawlSetting,
                host,
                sharedQueue,
                sharedLock,
                sharedPageHash,
                sharedFormHash,
                cookies);

        }

        private void MarkCrawlAsDone()
        {
            _crawlSetting.FinishTime = DateTime.Now;
            DataLayer.Save(_crawlSetting);
            OnCrawlCompleted(this);
        }

        private bool HasUncrawledHost()
        {
            return _agentsDic.Count != 0;
        }

        private void MarkHostAsCrawled(Host host)
        {
            _agentsDic.Remove(host);
            host.FinishTime = DateTime.Now;

            DataLayer.Save(host);
        }

        private bool HasAliveAgents(string targetUrl)
        {
            return _aliveAgentsDic[targetUrl] != 0;
        }

        private void DecrementNumberOfAgents(string targetUrl)
        {
            _aliveAgentsDic[targetUrl]--;
        }

        private bool IsCrawlingForAllHostsStarted()
        {
            for (int i = 0; i < _targets.Length;i++)
            {
                if (_aliveAgentsDic[_targets[i].TargetUrl] != _targets[i].CrawlerThreadCount)
                    return false;
            }

            return true;
        }

        private void IncrementNumberOfAgents(string targetUrl)
        {
            _aliveAgentsDic[targetUrl]++;
        }

        #endregion
    }
}
