﻿/*************************************************************
 * Software of blackcore
 *
 * CLRVersion:4.0.30319.296
 * MachineName:SPACE
 * Author:Andy Huang
 * Email:itblackhole@gmail.com
 * DateTime：6/6/2013 10:23:27 AM
 * Function：
 *************************************************************/

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using Sob.Common;
using System.IO;
using System.Configuration;
using NLog;
using System.Diagnostics;
using Sob.Machine.WebPageParser.Entity;

namespace Sob.Machine.WebPageParser.Demo
{
    public class Crawler
    {
        #region ******************************Private Property******************************
        private static Logger logger = LogManager.GetCurrentClassLogger();

        /// <summary>
        /// 基地址
        /// </summary>
        private Uri baseUri;
        /// <summary>
        /// 主域
        /// </summary>
        private string baseHost = string.Empty;
        /// <summary>
        /// 工作队列
        /// </summary>
        private Queue<string> pending = new Queue<string>();
        /// <summary>
        /// 已访问的队列
        /// </summary>
        private HashSet<string> visited = new HashSet<string>();


        private int failure = 0, success = 0;
        #endregion ******************************Private Property******************************


        #region ******************************Public Property******************************
        /// <summary>
        /// 存储抓取文件目录
        /// </summary>
        public string SaveSpiderPageRootDirectory { get; private set; }
        /// <summary>
        /// 存储指定域名抓取文件目录
        /// </summary>
        public string SaveSpiderPageTargetDomain { get; set; }

        /// <summary>
        /// 进度
        /// </summary>
        /// <param name="baseHost">主域</param>                
        /// <param name="visitedUrl">已处理URL</param>
        /// <param name="success"></param>
        /// <param name="failure"></param>
        /// <param name="useTime">耗时</param>
        public delegate void CrawlerProgressEventHander(CrawlInfo crawlInfo);
        public event CrawlerProgressEventHander CrawlerProgress;
        #endregion ******************************Public Property******************************


        #region ******************************Structure******************************
        public Crawler(string url)
        {
            baseUri = new Uri(url);
            //主域
            baseHost = baseUri.Host.Substring(baseUri.Host.IndexOf('.'));
            //抓取首地址入队
            pending.Enqueue(url);

            //目录初始化
            InitDirectory();
        }

        void InitDirectory()
        {
            SaveSpiderPageRootDirectory = ConfigurationManager.AppSettings["SpiderWeb"].ToString();
            if (!Directory.Exists(SaveSpiderPageRootDirectory))
            {
                Directory.CreateDirectory(SaveSpiderPageRootDirectory);
            }
            SaveSpiderPageTargetDomain = SaveSpiderPageRootDirectory + "\\" + baseHost;
            if (!Directory.Exists(SaveSpiderPageTargetDomain))
            {
                Directory.CreateDirectory(SaveSpiderPageTargetDomain);
            }
        }
        #endregion ******************************Structure******************************


        #region ******************************Private Method******************************
        /// <summary>
        /// 保存抓取文件信息
        /// <para>按抓取网站的目录结果生成本地目录</para>
        /// </summary>
        /// <param name="pageInfo"></param>
        void SaveCrawlPage(PageInfo pageInfo)
        {
            //生成目录结构
            string tmpDirectory = "";
            for (int i = 0; i < pageInfo.ResponseUri.Segments.Length; i++)
            {
                if (i == pageInfo.ResponseUri.Segments.Length - 1)
                {
                    continue;
                }
                tmpDirectory += pageInfo.ResponseUri.Segments[i];
            }
            if (!Directory.Exists(SaveSpiderPageTargetDomain + tmpDirectory))
            {
                Directory.CreateDirectory(SaveSpiderPageTargetDomain + tmpDirectory);
            }
            //路径符号
            tmpDirectory = string.IsNullOrWhiteSpace(tmpDirectory) ? "/" : tmpDirectory;
            //生成文件路径并存储
            foreach (char invalidCharacter in Path.GetInvalidFileNameChars())
            {
                pageInfo.Title = pageInfo.Title.Replace(invalidCharacter.ToString(), "ic");
            }
            string filepath = SaveSpiderPageTargetDomain + tmpDirectory + pageInfo.Title;
#if DEBUG
            if (filepath.StartsWith(@"I:\SpiderWeb\.jiuxian.com酒仙网-"))
            {
            }
#endif
            using (StreamWriter sw = new StreamWriter(filepath, false, pageInfo.Charset))
            {
                sw.Write(pageInfo.Snapshot);
            }
        }


        /// <summary>
        /// 提取当前页面中的Url
        /// </summary>
        /// <param name="pageInfo"></param>
        void ExtractUrl(PageInfo pageInfo)
        {      
            Regex reg = new Regex(@"(?is)<a[^>]*?href=(['""]?)(?<url>[^'""\s>]+)\1[^>]*>(?<text>(?:(?!</?a\b).)*)</a>");
            MatchCollection mc = reg.Matches(pageInfo.Snapshot);
            foreach (Match m in mc)
            {
                var url = m.Groups["url"].Value;
                //链接名称
                //var text = m.Groups["text"].Value;
                //text = GetUrlTitle(text);

                if (url == "#")
                {
                    //当前页则不处理
                    continue;
                }

                //相对路径转换为绝对路径
                if (!Uri.IsWellFormedUriString(url, UriKind.RelativeOrAbsolute))
                    continue;

                Uri uri = new Uri(baseUri, url);
                //剔除外网链接(获取顶级域名)
                if (!uri.Host.EndsWith(baseHost))
                    continue;
                if (uri.Host.StartsWith("ftp"))
                    continue;
                //uri入队
                if (!visited.Contains(uri.ToString()) && !pending.Contains(uri.ToString()))
                {
                    //已访问队列中不存在且暂停队列中也不存在uri，则入队
                    pending.Enqueue(uri.ToString());
                }
            }
        }


        #endregion ******************************Private Method******************************


        #region ******************************Public Method******************************
        /// <summary>
        /// 下载数据
        /// </summary>
        public void DownLoad()
        {
            while (pending.Count > 0)
            {
                var currentUrl = pending.Dequeue();
                visited.Add(currentUrl);

                try
                {
                    Stopwatch useTime = new Stopwatch();
                    useTime.Start();

                    Uri uri = new Uri(currentUrl);
                    //抓取当前页内容
#if DEBUG
                    //测试特定数据使用
                    if (uri.AbsoluteUri.Equals("http://www.jiuxian.com/html/mobile.shtml") || success == 35 || uri.AbsoluteUri.Equals("http://edu.ifeng.com/"))
                    {
                    }
#endif
                    CrawlPage crawl = new CrawlPage(uri);
                    if (crawl.DownloadPage())
                    {
                        var pageInfo = crawl.PageInfo;
                        if (pageInfo.IsSuccess)
                        {
                            //将抓取数据写入本地目录
                            SaveCrawlPage(pageInfo);
                            success++;
                        }
                        else
                        {
                            failure++;
                        }
                    }
                    else
                    {
                        failure++;
                    }

                    //分析URL
                    ExtractUrl(crawl.PageInfo);

                    if (CrawlerProgress != null)
                    {
                        CrawlerProgress(new CrawlInfo() { BaseHost = this.baseHost, Visited = visited.Count, Pending = pending.Count, Success = success, Failure = failure, useTime = useTime });
                    }
                }
                catch (Exception err)
                {
                    logger.Error("下载失败，错误：{0}", err.Message);
                }
            }
        }
        #endregion ******************************Public Method******************************




    }
}