﻿using System;
using System.Collections.Generic;
using System.Text;
using System.Text.RegularExpressions;

using ShootSearch.Util;
using ShootSearch.Common;

using HtmlAgilityPack;
using ShootSearch.Core;
using System.ComponentModel;
using ShootSearch.Logging;
using log4net;

namespace ShootSearch.Spiders
{
    public class UrlParser: IUrlParser
    {

        protected UrlParserConfig config;

        public UrlParser()
        {
            config = new UrlParserConfig();
        }


        #region IUrlParser Members

        public List<Url> ParsePage(Url url, string page)
        {
            List<Url> urls = new List<Url>();
            Uri baseUri = new Uri(url.Address);
            short depth =(short)(url.Depth + 1);
            HtmlDocument doc = new HtmlAgilityPack.HtmlDocument();
            doc.LoadHtml(page);            
            HtmlNodeCollection atts = doc.DocumentNode.SelectNodes("//*[@background or @lowsrc or @src or @href]");
            if (atts != null)
            {
                foreach (HtmlNode n in atts)
                {
                    ParseLink(n, "background", urls, baseUri, depth);
                    ParseLink(n, "href", urls, baseUri, depth);
                    ParseLink(n, "src", urls, baseUri, depth);
                    ParseLink(n, "lowsrc", urls, baseUri, depth);
                }
            }
            HtmlNodeCollection hrefs = doc.DocumentNode.SelectNodes("//a[@href]");
            if (hrefs != null)
            {
                foreach (HtmlNode href in hrefs)
                {
                    EnQueue(href.Attributes["href"].Value, urls, baseUri, depth);
                    
                }
            }
            return urls;
        }

        #endregion

        #region ParseLink
        /// <summary>
        /// 获取标签属性值
        /// </summary>
        /// <param name="node">Html节点</param>
        /// <param name="name">属性名称</param>
        private void ParseLink(HtmlNode node, string name, List<Url> urls, Uri baseUri, short depth)
        {
            HtmlAttribute att = node.Attributes[name];
            if ((att != null) && (!(name == "href") || !(node.Name != "link")))
            {
                EnQueue(att.Value, urls, baseUri, depth);
            }

        } 
        #endregion

        #region EnQueue
        /// <summary>
        /// 入队操作
        /// </summary>
        /// <param name="link">链接</param>
        private void EnQueue(string link, List<Url> urls, Uri baseUri, short depth)
        {
            Uri uri;
            try
            {
                //格式化链接，把相对链接变成绝对链接
                link = WebPage.FormatLink(link);
                //这里改变了直接使用正则表达式字符串匹配,而是使用正则表达式对象,防止用户对这个字符串设置错误.
                //这里主要是防止类似"http://ww\\\ww/"这样的非正常的Url出现从而抛出UriFormatException
                //过滤设置可以减少异常抛出的次数,从而降低CPU使用率
                if (((link.IndexOf("http:") > -1) || ((link.IndexOf("https:") > -1)
                    && (config.RegularUrlFilter != null))) && !config.RegularUrlFilter.IsMatch(link))
                {
                    return;
                }
                uri = new Uri(baseUri, link);



            }
            catch (UriFormatException ex)
            {
                Log.Warn(Logger, "Invalid URI:" + link , ex);
                //Log.Warn("Invalid URI:" + link + " Error:" + e.Message);
                return;
            }

            //这里只处理Http的链接
            if (!uri.Scheme.ToLower().Equals("http")
                && !uri.Scheme.ToLower().Equals("https"))
                return;
            Url u = new Url(depth, uri.ToString());
            //如果只下载静态页面
            if (config.StaticOnly && u.Address.Contains("?")) return;

            bool canEnqueue = true;
            for (int i = 0; i < config.EnqueueFilters.Count; i++)
            {
                if (config.EnqueueFilters[i].IsMatchRule &&
                    !config.EnqueueFilters[i].IsMatch(u.Address)
                    || (!config.EnqueueFilters[i].IsMatchRule &&
                    config.EnqueueFilters[i].IsMatch(u.Address)))
                {
                    canEnqueue = false;
                    break;
                }
            }
            if (canEnqueue)
            {
                urls.Add(u);
            }

        }
        #endregion

        #region IConfigable Members

        public ShootSearch.Core.SpiderManConfig Config
        {
            get { return config; }
            set
            {
                if (value is UrlParserConfig)
                {
                    config = value as UrlParserConfig;
                }

            }
        }


        public ILog Logger { get; set; }
        #endregion
    }


    [TypeConverter(typeof(Util.ExpandableConverter<UrlParserConfig>))]
    public class UrlParserConfig : SpiderManConfig
    {
        public const string REGULAR_URL =
                    @"^((https|http|ftp|rtsp)?://)?(([0-9a-z_!~*'().&=+$%-]+: )?[0-9a-z_!~*'().&=+$%-]+@)?(([0-9]{1,3}\.){3}[0-9]{1,3}|([0-9a-z_!~*'()-]+\.)*([0-9a-z][0-9a-z-]{0,61})?[0-9a-z]\.[a-z]{2,6})(:[0-9]{1,4})?((/?)|(/[0-9a-z_!~*'().;?:@&=+$,%#-]+)+/?)";

        /// <summary>
        /// Url格式检查器
        /// </summary>
        public Rule RegularUrlFilter { get; set; }
        /// <summary>
        /// Enqueue filter
        /// </summary>
        public List<UrlFilter> EnqueueFilters { get; set; }


        /// <summary>
        /// Only static page url will be enqueued
        /// </summary>
        public bool StaticOnly { get; set; }

        public UrlParserConfig()
        {
            EnqueueFilters = new List<UrlFilter>();
            RegularUrlFilter = new Rule(REGULAR_URL);
        
        }
    }
}
