﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading;
using xKnight.Models;

namespace xKnight.WebCrawling
{
    /// <summary>
    /// CrawlingAgnets which are crawling a host, use CrawlingSharedResource as shared resource for syncronizing their operations and 
    /// update the overall crawling status in a safe manner.
    /// </summary>
    public class CrawlingSharedResource
    {
        #region Fields 

        private int _numberOfActiveAgents = 0;
        private Cookie[] _cookies;

        #endregion

        #region Properties

        public Cookie[] Cookies
        {
            get
            {
                return _cookies;
            }
        }

        #endregion


        #region Properties

        /// <summary>
        /// Every new and unique pages will inserted to this queue. CrawlingAgents will consume these pages
        /// in order and extract new forms and links from them. 
        /// </summary>
        internal Queue<Webpage> SharedQueue {get; private set;}

        /// <summary>
        // A SharedLock among CrawlingAgents used for managing shared resources in critical sections
        /// </summary>
        internal object SharedLock { get; private set; }

        /// <summary>
        /// for Every new discovred Page, value of Page.Url will inserted to this HashSet. 
        /// so we can remove occurance of duplicant Pages.
        /// </summary>
        internal HashSet<string> SharedPageHash { get; private set; }

        /// <summary>
        /// for Every new discovred form, value of Form.GetId() will inserted to this HashSet. 
        /// so we can remove occurance of duplicant forms.
        /// </summary>
        internal HashSet<string> SharedFormHash { get; private set; }

        public WebCrawlingSetting CrawlerSetting { get; private set; }

        public int TotalLinksFound { get; private set; }

        public int TotalUniqueLinksFound { get { return SharedPageHash.Count; } }

        public int TotalFormsFound { get; private set; }

        public int TotalUniqueFormsFound { get {return SharedFormHash.Count;} }

        public int TotalPagesDownloaded { get; private set; }

        internal Host Host { get; set; }

        #endregion

        #region Constructors

        public CrawlingSharedResource(WebCrawlingSetting crawlerSetting, Host host, Queue<Webpage> sharedQueue,
            object sharedLock, HashSet<string> sharedPageHash, HashSet<string> sharedFormHash, Cookie[] cookies)
        {
            _cookies = cookies;
            CrawlerSetting = crawlerSetting;
            Host = host;
            SharedQueue = sharedQueue;
            SharedLock = sharedLock;
            SharedFormHash = sharedFormHash;
            SharedPageHash = sharedPageHash;
        }

        #endregion

        #region Interface

        internal void IncrementTotalPagesGetDownloaded()
        {
            lock(SharedLock)
            {
                TotalPagesDownloaded++;
            }
        }

        internal void AddTotalFormsFound(int num)
        {
            lock(SharedLock)
            {
                TotalFormsFound+=num;
            }
        }

        internal void AddTotalLinksFound(int num)
        {
            lock(SharedLock)
            {
                TotalLinksFound+=num;
            }
        }

        /// <summary>
        /// Incerement number of active agents which are in the middle of crawling
        /// </summary>
        internal void IncrementNumberOfActiveAgents()
        {
            Interlocked.Increment(ref _numberOfActiveAgents);
        }

        /// <summary>
        /// Decrement number of active agents which were in the middle of crawling
        /// </summary>
        internal void DecrementNumberOfActiveAgents()
        {
            Interlocked.Decrement(ref _numberOfActiveAgents);
        }

        /// <summary>
        /// Determine whether there is a CrawlingAgent in the middle of crawling operation
        /// </summary>
        /// <returns></returns>
        internal bool HasActiveAgent()
        {
            return _numberOfActiveAgents != 0;
        }

        #endregion

        public bool StopCrawling { get; set; }
    }
}
