﻿// ------------------------------------------------------------------------------------------------
//  <copyright file="Engine.cs" company="Iveely">
//    Copyright (c) Iveely Liu.  All rights reserved.
//  </copyright>
//  
//  <Create Time>
//    12/03/2012 20:12 
//  </Create Time>
//  
//  <contact owner>
//    liufanping@iveely.com 
//  </contact owner>
//  -----------------------------------------------------------------------------------------------

#region

using System;
using System.Collections;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
using IveelySE.Common;
using IveelySE.Common.Html;
using IveelySE.Common.Log;
using IveelySE.Config;
using IveelySE.IDFS.Store;

#endregion

namespace IveelySE.Spider
{
    public class Engine
    {
        /// <summary>
        ///   网页迭代量(一般设定3000位一次迭代，即每次任务获取300个网页即可)
        /// </summary>
        private const int Iteration = 200;

        /// <summary>
        ///   爬行数量
        /// </summary>
        private static int _num;

        /// <summary>
        ///   基本的爬行入口
        /// </summary>
        public HashSet<Uri> BaseUrl;

        /// <summary>
        ///   多个线程同时访问的键值对的线程安全集合
        ///   用于记录合法的链接集合
        /// </summary>
        private ConcurrentDictionary<string, object> _invalidUrls = new ConcurrentDictionary<string, object>();

        /// <summary>
        ///   处理类型组
        /// </summary>
        private IProcessor[] _processors; // = new {new HtmlHttpCrawlerContentProcessor()};

        /// <summary>
        ///   队列集合
        /// </summary>
        private ConcurrentQueue<KeyValuePair<Uri, Uri>> _queue = new ConcurrentQueue<KeyValuePair<Uri, Uri>>();

        /// <summary>
        ///   多个线程同时访问的键值对的线程安全集合
        ///   用于记录访问过的链接集合
        /// </summary>
        private ConcurrentDictionary<Uri, object> _visitedUrls = new ConcurrentDictionary<Uri, object>();

        /// <summary>
        ///   构造方法
        /// </summary>
        /// <param name="baseUrl"> </param>
        public Engine(ArrayList baseUrl)
        {
            this.BaseUrl = new HashSet<Uri>();
            Host = new HashSet<string>();
            _num = 0;
            //初始化入口URL
            foreach (var url in baseUrl)
            {
                Content.GetInstance().Write("爬虫开始执行任务:" + url, MessageType.Information);
                var uri = new Uri(url.ToString());
                this.BaseUrl.Add(uri);
                //获取根域名
                Host.Add(uri.Host.StartsWith("www.") ? uri.Host.Replace("www.", "") : uri.Host);
            }
            //初始化处理类型
            this.InitProcessors(new HtmlProcessor());
        }

        /// <summary>
        ///   根域名
        /// </summary>
        private static HashSet<string> Host { get; set; }

        /// <summary>
        ///   初始化类型组
        /// </summary>
        /// <param name="processors"> </param>
        private void InitProcessors(params IProcessor[] processors)
        {
            _processors = processors;
        }


        /// <summary>
        ///   历史记录访问
        /// </summary>
        private void TravelHistory()
        {
            var processBar = new ProcessBar("更新历史访问记录", this._visitedUrls.Count);
            processBar.Init();
            int i = 0;
            foreach (var url in this._visitedUrls)
            {
                processBar.Current(++i);
                this.ProcessUrl(url.Key, null, int.Parse(url.Value.ToString()));
            }
            processBar.Clean();
        }


        public void Start()
        {
            //如果不是第一次，而是做迭代操作
            if (File.Exists("_queue") && File.Exists("_visitedUrls") && File.Exists("_invalidUrls"))
            {
                Matrix.Load();
                //将queue(队列)反序列化
                _queue = (ConcurrentQueue<KeyValuePair<Uri, Uri>>)Serialize.UnSerailzable("_queue");

                //将_VisitedUrls(访问过的URL)反序列化
                _visitedUrls = (ConcurrentDictionary<Uri, object>)Serialize.UnSerailzable("_visitedUrls");

                //将_InvalidUrls(无效的URL)反序列化
                _invalidUrls = (ConcurrentDictionary<string, object>)Serialize.UnSerailzable("_invalidUrls");

                //将历史记录走一遍
                try
                {
                    this.TravelHistory();
                }
                catch (Exception exception)
                {
                    Content.GetInstance().Write("[Travel History] exception:" + exception, MessageType.Error);
                }
            }
            //如果是第一次
            else
            {
                foreach (var uri in BaseUrl)
                {
                    //入队列
                    _queue.Enqueue(new KeyValuePair<Uri, Uri>(uri, null));
                }
                //先做一遍
                ProcessQueue();
            }
            //执行体
            Action<ParallelLoopState> processQueue = lps =>
                                                         {
                                                             //如果队列不为空且在迭代范围之内
                                                             if (_queue.IsEmpty == false && Iteration > _num++)
                                                             {
                                                                 //执行队列任务
                                                                 ProcessQueue();
                                                             }
                                                             //反之
                                                             else
                                                             {
                                                                 //结束循环执行
                                                                 lps.Break();
                                                             }
                                                         };


            //队列的并行执行
            processQueue.InParallelWhile(_queue.IsEmpty == false);
            Block.SaveStoreInfo("Page");
            Matrix.Save();
            //将queue(队列)序列化
            Serialize.Serailzable(_queue, "_queue");
            //将_VisitedUrls(访问过的URL)序列化
            Serialize.Serailzable(_visitedUrls, "_visitedUrls");
            //将_InvalidUrls(无效的URL)序列化
            Serialize.Serailzable(_invalidUrls, "_invalidUrls");
        }

        private static void AddRange<T>(ref HashSet<T> hashSet, IEnumerable<T> collection)
        {
            if (hashSet == null)
            {
                throw new ArgumentNullException("hashSet");
            }

            if (collection == null)
            {
                throw new ArgumentNullException("collection");
            }
            //添加到hashSet中
            foreach (var item in collection)
            {
                if (!hashSet.Contains(item))
                    hashSet.Add(item);
            }
        }


        private void ProcessQueue()
        {
            //即将处理的URL
            KeyValuePair<Uri, Uri> urlToProcess;
            //尝试从队列中取出一个URL
            if (_queue.TryDequeue(out urlToProcess))
            {
                try
                {
                    var urls = ProcessUrl(urlToProcess.Key, urlToProcess.Value, 0);
                    //依次遍历
                    foreach (var item in urls)
                    {
                        //如果没有访问过
                        if (!_visitedUrls.ContainsKey(item))
                        {
                            //那么加进去
                            _queue.Enqueue(new KeyValuePair<Uri, Uri>(item, urlToProcess.Key));
                        }
                    }
                }
                catch (Exception exception)
                {
                    Content.GetInstance().Write("[Spider]" + exception.Message, MessageType.Error);
                }
            }
        }

        /// <summary>
        ///   处理URL
        /// </summary>
        /// <param name="url"> 需要处理的URL </param>
        /// <param name="referrerUrl"> 引用它的URL </param>
        /// <param name="identify"> </param>
        /// <returns> </returns>
        protected IEnumerable<Uri> ProcessUrl(Uri url, Uri referrerUrl, int identify)
        {
            if (identify == 0)
            {
                //如果不满足爬行条件
                if (ShouldNotProcessUrl(url))
                {
                    //返回空
                    return Enumerable.Empty<Uri>();
                }
            }
            //增加到访问过的URL集合中
            AddVisitedUrl(url);
            //从该链接页面发现的链接集合
            var foundUrls = new HashSet<string>();
            try
            {
                //执行请求
                var request = CreateRequest(url, referrerUrl);
                //获取响应
                using (var response = (HttpWebResponse)request.GetResponse())
                {
                    //处理获取到得响应结果
                    using (var crawlerContent = ProcessFactory.Create(url, referrerUrl, response))
                    {
                        //生成对象
                        var page = new Page();
                        page.Content = crawlerContent.GetContent();
                        page.Title = crawlerContent.GetTitle();
                        if (identify != page.Title.GetHashCode())
                        {
                            page.Date = DateTime.Now.ToShortDateString();
                            page.Url = url.ToString();
                            page.Date = DateTime.Now.ToString(CultureInfo.InvariantCulture);
                            //page.Rank = Host.Length * 3.0 / (page.Url.Length + 1);
                            //存起来
                            DataService.LocalSave("Page", page);
                        }
                        //增加到即将爬行的URL集合中
                        AddRange(ref foundUrls, (ProcessContent(crawlerContent)));
                        //增加链接值(标识内容，用于下次是否再次爬行)
                        this.AddVisitedUrlValue(url, page.Title.GetHashCode());
                    }
                }
            }
            catch (Exception ex)
            {
                Common.Log.Content.GetInstance().Write(ex,MessageType.Warning);
                return Enumerable.Empty<Uri>();
            }

            //AddVisitedUrl(url);

            var urlsToDownload = ProcessFoundUrls(url, foundUrls);

            var toDownload = urlsToDownload as Uri[] ?? urlsToDownload.ToArray();
            Matrix.Add(url.ToString(), toDownload);

            return toDownload;
        }


        /// <summary>
        ///   为空、相对链接、爬行过的网页不再爬行
        /// </summary>
        /// <param name="url"> 链接 </param>
        /// <returns> true为不能爬行，false为可以爬行 </returns>
        private bool ShouldNotProcessUrl(Uri url)
        {
            return url == null || WasVisited(url);
        }

        private IEnumerable<Uri> ProcessFoundUrls(Uri url, IEnumerable<string> foundUrls)
        {
            var urlsToDownload = new HashSet<Uri>();

            foreach (var foundUrl in foundUrls)
            {
                if (!IsKnownInvalidUrl(foundUrl))
                {
                    Uri foundUri;

                    if (Uri.IsWellFormedUriString(foundUrl, UriKind.Absolute))
                    {
                        foundUri = new Uri(foundUrl);
                    }
                    else if (Uri.IsWellFormedUriString(foundUrl, UriKind.Relative) && !foundUrl.StartsWith("telnet:"))
                    {
                        foundUri = new Uri(url, new Uri(foundUrl, UriKind.Relative));
                    }
                    else
                    {
                        AddInvalidUrl(foundUrl);

                        continue;
                    }

                    if (IsOriginSimilar(foundUri))
                    {
                        urlsToDownload.Add(foundUri);
                    }
                }
            }

            return urlsToDownload;
        }

        private static bool IsOriginSimilar(Uri compareTo)
        {
            //return Host.Contains(compareTo.ToString());
            foreach (var h in Host)
            {
                if (compareTo.Host.Contains(h))
                {
                    return true;
                }
            }
            return false;
        }

        /// <summary>
        ///   创建请求
        /// </summary>
        /// <param name="url"> 请求的URL </param>
        /// <param name="referrerUrl"> 以及引用该URL的链接 </param>
        /// <returns> 返回请求结果 </returns>
        private HttpWebRequest CreateRequest(Uri url, Uri referrerUrl)
        {
            //创建请求
            var request = (HttpWebRequest)WebRequest.Create(url);
            //跟随重定向响应
            request.AllowAutoRedirect = true;
            //重定向最大数目
            request.MaximumAutomaticRedirections = 3;
            //request.UserAgent = _UserAgent;
            //request.Timeout = _TimeoutInMilliseconds;
            //如果引用不为空
            if (referrerUrl != null)
            {
                //设置标头
                request.Referer = referrerUrl.ToString();
            }
            //返回请求结果
            return request;
        }

        private IEnumerable<string> ProcessContent(BasicContent crawlerContent)
        {
            var urlCandidates = new HashSet<string>();

            foreach (var processor in _processors)
            {
                if (processor.CanProcess(crawlerContent))
                {
                    try
                    {
                        var foundUrlCandidates = processor.Process(crawlerContent);

                        if (foundUrlCandidates != null)
                        {
                            AddRange(ref urlCandidates, (foundUrlCandidates));
                        }
                    }
                    catch (Exception exception)
                    {
                        Content.GetInstance().Write(exception.Message, MessageType.Error);
                    }
                }
            }

            return urlCandidates;
        }

        private bool IsKnownInvalidUrl(string foundUrl)
        {
            return _invalidUrls.ContainsKey(foundUrl);
        }

        private bool WasVisited(Uri url)
        {
            return _visitedUrls.ContainsKey(url);
        }

        /// <summary>
        ///   增加访问过的URL记录
        /// </summary>
        /// <param name="url"> </param>
        private void AddVisitedUrl(Uri url)
        {
            if (WasVisited(url))
            {
                return;
            }

            _visitedUrls.AddOrUpdate(url, (object)null, (k, v) => v);
        }

        /// <summary>
        ///   增加访问过的URL的内容值（是以HashCode的方式体现）
        /// </summary>
        /// <param name="url"> </param>
        /// <param name="value"> </param>
        private void AddVisitedUrlValue(Uri url, int value)
        {
            _visitedUrls[url] = value;
        }

        private void AddInvalidUrl(string url)
        {
            _invalidUrls.AddOrUpdate(url, (object)null, (k, v) => v);
        }
    }
}