﻿using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading;
using System.Text.RegularExpressions;
using FetchAndProcess.QueuMgr;
using HtmlAgilityPack;
using WebDUPlugIn.ConfigAndModels;
using log4net;


namespace WebDUPlugIn.Crawlers
{
    class WorkItem
    {
        public Uri uri { get; set; }
        public WebCrawlEngine CrawlEngine { get; set; }

        public WorkItem( Uri u, WebCrawlEngine c)
        {
            uri = u;
            CrawlEngine = c;
        }
    
    }
    
    class WebCrawlEngine
    {
        private static readonly log4net.ILog log = log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);

        private readonly BlockReadQueue<QueueWorkItem> _urlQueue = new BlockReadQueue<QueueWorkItem>();
        private RobotsDotTxtFilter robotsDotTextFilter = new RobotsDotTxtFilter();
        //private CountdownEvent _countdown = null; 

        public  void EnQueueUrl(Uri  uri)
        {
            
            
                if (_urlQueue == null)
                    return;
                var wrkItem = new WorkItem(uri, this);
                var workItemToProcess = new QueueWorkItem(QUEUE_WORK_ITEM_TYPE.PROCESS_WORK_ITEM, null, wrkItem);
                _urlQueue.Enqueue(workItemToProcess);
                CrawlDictionary.MarkasQueuedUri(wrkItem.uri);
            
        }

        private void EnqueueEndOfWork()
        {
            
            
                if (_urlQueue == null) return;
                var endoftheWork = new QueueWorkItem(QUEUE_WORK_ITEM_TYPE.PROCESS_END, null, null);
                _urlQueue.Enqueue(endoftheWork);
            

        }

        public long GetQueueLength()
        {
            return _urlQueue.Count;
        }

        private WebDUConfig fetchRec = null;
        public WebCrawlEngine(WebDUConfig configRec)
        {
            fetchRec = configRec;
        }


        public void CrawlRobotsDotText(Uri uriForRobotsTextCrawling)
        {
            robotsDotTextFilter.NewHost(uriForRobotsTextCrawling.Host);

        }

        /// <summary>
        ///  Kick Off Thread 
        /// </summary>
        /// <param name="uri"></param>
        public void StartCrawling(  )
        {
            log.DebugFormat("Kicking Off  Crawling URLs");
            //_countdown = null;
            int _count = 0;
            int _sleepAndRetryCount=0;
            while (true)
            {
                QueueWorkItem workItem = null;                
                workItem = _urlQueue.Dequeue();                

                if (workItem != null)
                {
                    if (workItem.cmdType == QUEUE_WORK_ITEM_TYPE.PROCESS_END)
                    {
                        log.DebugFormat(" Received End Request by a Thread.. ");
                        // Process Close up Actions from all data processors .. 
                        log.DebugFormat("Crawling completed..");
                        return;
                    }

                    _sleepAndRetryCount=0;
                    //ThreadPool.QueueUserWorkItem(ProcessQueuedWorkItemThreadFunction, workItem);
                    ProcessQueuedWorkItemThreadFunction(workItem);

                    if (_urlQueue.Count == 0 )
                    {
                        log.DebugFormat("Queuiong End of Work Request.................... THE END is Coming");
                        EnqueueEndOfWork();
                    }

                    ++_count;
                    if (_count % 1000 == 0)
                    {
                        // Dump All URLS in dictionary .. 
                        CrawlDictionary.DumpDictionarytoFile();
                        log.DebugFormat("Memory used before collection: {0}", GC.GetTotalMemory(false));
                        GC.Collect();
                        log.DebugFormat("Memory used After  collection: {0}", GC.GetTotalMemory(true));
                        _count = 0;
                    }
                }
                else
                {
                    /*
                    log.DebugFormat("  Got into Sleep Zone , while waiting for more URLs");
                    Thread.Sleep(100);++_sleepAndRetryCount;
                    if (_urlQueue.Count == 0 && _sleepAndRetryCount > 100)
                    {
                        log.DebugFormat("Queuiong End of Work Request.................... THE END is Coming");
                        EnqueueEndOfWork();
                    }
                     * */
                }
            }

        }


        private const string MIMETypeHTML = "text/html";
        private const string MIMETypeXML = "text/xml";
        private const string MIMETypeImage = "image/";
        private const string ImageUrlPattern = @"[\w]+\.(jpeg|gif|jpg|tiff|mov|mp3|mp4|png|bmp|raw|svg|js)";

        /// <summary>
        /// Crawling Thread.. 
        /// </summary>
        /// <param name="args"></param>
        private static void ProcessQueuedWorkItemThreadFunction(Object args)
        {
            var workItem = args as QueueWorkItem;
            object rawData = null;
            WorkItem wrkItem = null;
            List<Uri> urisToCrawlAgain = null;

            if (workItem != null)
            {
                rawData = workItem.ResultRecord;

                if (rawData != null)
                {
                    // We got the URL now to Crawl .. 
                    wrkItem = rawData as WorkItem;

                    //1. Perform HTTP Request 
                    try
                    {
                        if ( (wrkItem != null ) && (wrkItem.uri != null) )
                        {
                            log.InfoFormat("****Crawling  URI :{0}", wrkItem.uri);
                            //var randomSleep = new  Random();
                            //Thread.Sleep(randomSleep.Next(30,100)); // Sleep upto  100 msec to ensure we are doing BOT attack..
                            var crawlRequest = (HttpWebRequest)WebRequest.Create(wrkItem.uri);
                            crawlRequest.UserAgent = "Mozilla/5.0 (compatible;Windows NT 6.1; WOW64; ENUS) Gecko/20100101 Firefox/13.0.1";
                            crawlRequest.ContentType = "text/html,application/xhtml+xml,application/xml,*/*";
                            //2. Get HTML Content ...
                            var response = (HttpWebResponse)crawlRequest.GetResponse();

                            //log.DebugFormat("**** Got Response for  URI :{0}", wrkItem.uri);

                            if (response.StatusCode == HttpStatusCode.OK)
                            {
                                // Chk Content Type 
                                
                                if (response.ContentType.Contains(MIMETypeHTML))
                                {
                                    urisToCrawlAgain = ProcessHtmlDocument(response, wrkItem.uri,wrkItem.CrawlEngine.fetchRec);
                                    //log.InfoFormat(" Enqueuing URLs, Count :{0}", urisToCrawlAgain.Count);
                                    int enqueuedURLCount = 0;
                                    foreach (Uri crawlUri in urisToCrawlAgain)
                                    {
                                        if (crawlUri.IsFile || Regex.IsMatch(crawlUri.ToString(), ImageUrlPattern) || ( crawlUri.Host != wrkItem.uri.Host) )
                                        {                                            
                                            continue;
                                        }

                                        // Take out #<Word> Part out from URL Tail
                                        Uri  reformattedUri = new Uri(String.Format("{0}://{1}{2}", crawlUri.Scheme, crawlUri.Host, crawlUri.AbsolutePath));

                                        if (!CrawlDictionary.IsQueued(reformattedUri) )
                                        {
                                            if( !wrkItem.CrawlEngine.robotsDotTextFilter.IsExcluded(reformattedUri) )
                                            {
                                               //log.DebugFormat("EnQueued URL  : {0}",reformattedUri);
                                               ++enqueuedURLCount;
                                               wrkItem.CrawlEngine.EnQueueUrl(reformattedUri);
                                            }
                                            else
                                            {
                                                log.DebugFormat("  Skipping due to Robots.txt :{0}",reformattedUri);
                                            }
                                        }
                                    }
                                    log.InfoFormat(" Enqueued URI #:{0} vs URL in URI {1} ,Q Length : {2} ", enqueuedURLCount, urisToCrawlAgain.Count, wrkItem.CrawlEngine.GetQueueLength());

                                }
                                else if (response.ContentType.Contains(MIMETypeImage))
                                {
                                    log.InfoFormat(" Image Content {0}, {1}", response.ContentType, wrkItem.uri);
                                }
                                else if ((response.ContentType.Contains(MIMETypeXML)))
                                {
                                    log.InfoFormat(" XML Content {0}, {1}", response.ContentType, wrkItem.uri);
                                }
                                else
                                {
                                    log.InfoFormat(" Unknown  Content {0}", response.ContentType);
                                }

                            }
                        }
                        



                    }
                    catch (Exception e)
                    {
                        log.DebugFormat("Exception Stack Trace : {0} : {1} : {2}", e.StackTrace, e.Message, wrkItem.uri);
                    }


                    
                }

            }

            /*
            if (wrkItem != null)
            {
                
                if (wrkItem.CrawlEngine._countdown != null)
                {
                    wrkItem.CrawlEngine._countdown.Signal();
                }
                wrkItem.CrawlEngine.EnqueueEndOfWork(); 
            } */

            

        }

        private const string BODY = "//body";
        private const string AHREF = ".//a[@href]";
        private const string HREF = "href";
        private const string LINKSRC = ".//*[@src]";
        private const string SRC = "src";


        private static List<Uri> ProcessHtmlDocument(HttpWebResponse response, Uri rootUri,WebDUConfig cfgRec )
        {

            var uriList = new List<Uri>();
            if (response == null)
                return null;

            // Read Content .. 
            var htmlStreamReader = new StreamReader(response.GetResponseStream());
            var htmlContent = htmlStreamReader.ReadToEnd();
            htmlStreamReader.Close();

            var doc = new HtmlDocument();
            doc.LoadHtml(htmlContent);

            // Queue this to Processor 
            DocumentToProcess docToProcess = new DocumentToProcess(rootUri,doc);
            QueueManager.EnqueueWorkItem(cfgRec, docToProcess);
             
			HtmlNodeCollection atts = doc.DocumentNode.SelectNodes("//*[@background or @lowsrc or @src or @href]");
            string[] attrNames = { "background", "href", "src", "lowsrc" };
			if (atts != null)
            {
                foreach(HtmlNode n in atts)
                {
                    Uri uritoAdd = null;

                    foreach (string str in attrNames)
                    {
                        uritoAdd = ParseLink(n, str, rootUri);
                        if( uritoAdd != null )
                           uriList.Add(uritoAdd);
                    }                                      
                }
            }
			

		   HtmlNodeCollection hrefs = doc.DocumentNode.SelectNodes("//a[@href]");
           uriList.AddRange(hrefs.Select(node => node.Attributes["href"]).Where(att => att != null).Select(att => GetUri(att.Value, rootUri)).Where(uri => uri != null));
           return uriList;

        }

        private static Uri  ParseLink(HtmlNode node, string name, Uri rootUri)
		{            
			HtmlAttribute att = node.Attributes[name];
			if (att == null)
				return null;

			// if name = href, we are only interested by <link> tags
			if ((name == "href") && (node.Name != "link"))
				return null;


            return GetUri(att.Value,rootUri);
		}

        private const string httpPrefixPattern = @"\b(http|https)\s*:\s*/\s*/";

        private static Uri GetUri(string value, Uri rootUri)
        {   
            Uri uriToReturn = null;
            try
            {
                if( Regex.IsMatch(value,httpPrefixPattern))
                {
                    uriToReturn = new Uri(value);
                }
                else
                {
                    uriToReturn = new Uri(rootUri, value);
                }
                
            }
            catch (Exception e)
            {
                
                return null;
            }

            return uriToReturn;

        }

    }
}
