﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Data;
using GatherWebLib;
using System.Text.RegularExpressions;
using System.Threading;
using GatherWebLib.Model;

namespace GatherWebLib
{
   public class cGatherTaskSplit
   {
       public cGatherTaskSplit()
       {
           m_GatherData = new DataTable();
       }
       


       #region 属性
       /// <summary>
       /// 事件 线程同步锁
       /// </summary>
       private readonly Object m_eventLock = new Object();

       private string m_TaskName;
       public string TaskName
       {
           get { return m_TaskName; }
           set { m_TaskName = value; }
       }

       private DataTable m_GatherData;
       /// <summary>
       /// 获取的数据
       /// </summary>
       public DataTable GatherData
       {
           get { return m_GatherData; }
           set { m_GatherData = value; }
       }
      
       private cGlobalParas.TaskType m_TaskType;
       /// <summary>
       /// 
       /// </summary>
       public cGlobalParas.TaskType TaskType
       {
           get { return m_TaskType; }
           set { m_TaskType = value; }
       }
      
       private bool _IsAjax=false;
       public bool IsAjax
       {
           get { return _IsAjax; }
           set { _IsAjax = value; }
       }
       private bool m_IsErrorLog=true;
     
       private int m_AgainNumber=4;
       /// <summary>
       /// 链接错误的尝试次数
       /// </summary>
       public int AgainNumber
       {
           get { return m_AgainNumber; }
           set { m_AgainNumber = value; }
       }
       private bool m_Ignore404=false;
       /// <summary>
       /// 404错误是否再次链接
       /// </summary>
       public bool Ignore404
       {
           get { return m_Ignore404; }
           set { m_Ignore404 = value; }

       }
       private List<cWebLink_m> m_Weblink;
       /// <summary>
       /// 此分解任务需要采集的网页地址
       /// </summary>
       public List<cWebLink_m> Weblink
       {
           get { return m_Weblink; }
           set { m_Weblink = value; }
       }

       private List<cWebpageCutFlag_m> m_CutFlag;
       /// <summary>
       /// 页面内容截取参数
       /// </summary>
       public List<cWebpageCutFlag_m> CutFlag
       {
           get { return m_CutFlag; }
           set { m_CutFlag = value; }
       }
       public bool IsErrorLog=true;
       #endregion

       #region 事件
       /// <summary>
       /// 写日志事件
       /// </summary>
       private event EventHandler<cGatherTaskLogArgs> e_Log;
       internal event EventHandler<cGatherTaskLogArgs> Log
       {
           add { lock (m_eventLock) { e_Log += value; } }
           remove { lock (m_eventLock) { e_Log -= value; } }
       }
       #endregion

       public bool GatherUrl()
       {
           bool IsSucceed = false;
         
           //判断此网址是否为导航网址，如果是导航网址则需要首先将需要采集的网址提取出来
           //然后进行具体网址的采集
           foreach (cWebLink_m weblink in m_Weblink)
           {
               if (weblink.IsNavigation == true)
               {
                   IsSucceed = GatherNavigationUrl(weblink);
               }
               else
               {
                   IsSucceed = GatherSingleUrl(weblink);
               }
           }
           return IsSucceed;
          
       }




       //用于采集一个网页的数据
        private bool GatherSingleUrl(cWebLink_m Link_m)
        {
            cGatherWeb gWeb = new cGatherWeb();
            DataTable tmpData;
            string Url = Link_m.Weblink;
            string NextUrl = Link_m.Weblink;
            string Old_Url = NextUrl;

            //gWeb.CutFlag = m_TaskSplitData.CutFlag;

          

            if (m_TaskType == cGlobalParas.TaskType.AjaxHtmlByUrl)
                IsAjax = true;


            try
            {
                if (Link_m.IsNextpage)
                {
                    do
                    {
                        Url = NextUrl;
                        Old_Url = NextUrl;

                        e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Info).ToString() + "正在采集：" + Url + "\n", this.IsErrorLog, cGlobalParas.LogType.Info));

                        if (Link_m.IsUrlEncode == true)
                        {
                            Url = cTool.UrlEncode(Url, Link_m.UrlEncode);
                        }


                        tmpData = GetGatherData(Url,Link_m.WebCode, Link_m.StartPos, Link_m.EndPos);

                        if (tmpData != null)
                        {
                            m_GatherData.Merge(tmpData);
                        }


                        e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Info).ToString() + "采集完成：" + Url + "\n", this.IsErrorLog, cGlobalParas.LogType.Info));


                            e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Info).ToString() + "开始根据下一页规则获取下一页网址\n", this.IsErrorLog, cGlobalParas.LogType.Info));


                            NextUrl = GetNextstr(Url, Link_m.NextPageRule, Link_m.WebCode);
                    }
                    while (NextUrl != "" && Old_Url != NextUrl);
                }
                else
                {

                    if (Link_m.IsUrlEncode == true)
                    {
                        Url = cTool.UrlEncode(Url, Link_m.UrlEncode);
                    }


                    tmpData = GetGatherData(Url, Link_m.WebCode,Link_m.StartPos, Link_m.EndPos);

                    if (tmpData != null)
                    {
                        m_GatherData.Merge(tmpData);
                    }


                    e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Info).ToString() + "采集完成：" + Url + "\n", this.IsErrorLog, cGlobalParas.LogType.Info));
                }



            }
            catch (System.Exception ex)
            {
                e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Error).ToString() + Url + "采集发生错误：" + ex.Message + "\n", this.IsErrorLog, cGlobalParas.LogType.Error));
               
              //  onError(ex);
                return false;
            }

            gWeb = null;
            tmpData = null;

            return true;
        }

        private string GetNextstr(string Url, string NextRule, cGlobalParas.WebCode WebCode)
        {
            string NextUrl = "";
            bool IsAjax = false;

            if (m_TaskType == cGlobalParas.TaskType.AjaxHtmlByUrl)
                IsAjax = true;
            cGatherWeb gWeb = new cGatherWeb();
            string webSource = gWeb.GetHtml(Url, WebCode, "", "", "", true, IsAjax);

            string NRule = "((?<=href=[\'|\"])\\S[^#+$<>\\s]*(?=[\'|\"]))[^<]*(?<=" + NextRule + ")";
            Match charSetMatch = Regex.Match(webSource, NRule, RegexOptions.IgnoreCase | RegexOptions.Multiline);
            string strNext = charSetMatch.Groups[1].Value;
            if (strNext != "")
            {
                if (strNext.Substring(0, 1) == "/")
                {
                    string PreUrl = Url;
                    PreUrl = PreUrl.Substring(7, PreUrl.Length - 7);
                    PreUrl = PreUrl.Substring(0, PreUrl.IndexOf("/"));
                    PreUrl = "http://" + PreUrl;
                    strNext = PreUrl + strNext;
                }
                else if (strNext.StartsWith("http://", StringComparison.CurrentCultureIgnoreCase))
                {
                    // NextUrl = strNext;
                }
                else if (strNext.StartsWith("?", StringComparison.CurrentCultureIgnoreCase))
                {
                    Match aa = Regex.Match(Url, @".*(?=\?)");
                    string PreUrl = aa.Groups[0].Value.ToString();
                    strNext = PreUrl + strNext;
                }
                else
                {
                    Match aa = Regex.Match(Url, ".*/");
                    string PreUrl = aa.Groups[0].Value.ToString();
                    strNext = PreUrl + strNext;
                }

            }
            else
            {
                e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Info).ToString() + "已经到最终页" + "\n", this.IsErrorLog, cGlobalParas.LogType.Info));
            }

            NextUrl = strNext;
            return NextUrl;
        }

        ///这是采集带有导航规则的网址数据的入口
        ///导航规则分为两类：一是下一页的导航规则；而是页面导航，
        ///此方法传入地址后，主要处理下一页的规则，然后调用ParseGatherNavigationUrl
        ///处理页面导航的问题
        private bool GatherNavigationUrl(cWebLink_m Link_m)
        {
            string Url=Link_m.Weblink;
            
            bool IsNext = Link_m.IsNextpage;
            string NextRule = Link_m.NextPageRule;

            cGatherWeb gWeb = new cGatherWeb();
            //gWeb.CutFlag = m_TaskSplitData.CutFlag;
            string NextUrl = Url;
            string Old_Url = NextUrl;
            bool IsSucceed = false;

            try
            {

                if (IsNext)
                {
                    do
                    {
                       
                            Url = NextUrl;
                            Old_Url = NextUrl;

                            e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Info).ToString() + "正在采集：" + Url + "\n", this.IsErrorLog, cGlobalParas.LogType.Info));

                            IsSucceed = ParseGatherNavigationUrl(Url, Link_m); //, NagRule, IsOppPath);

                                   e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Info).ToString() + "采集完成：" + Url + "\n", this.IsErrorLog,cGlobalParas.LogType.Info));

                             e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Info).ToString() + "开始根据下一页规则获取下一页网址\n", this.IsErrorLog,cGlobalParas.LogType.Info));

                             NextUrl = GetNextstr(Url, NextRule, Link_m.WebCode);

                       

                    }
                    while (NextUrl != "" && Old_Url != NextUrl);
                }
                else
                {
                    IsSucceed = ParseGatherNavigationUrl(Url, Link_m); //, NagRule, IsOppPath);
                }
            }
            catch (System.Exception ex)
            {
                e_Log(this, new cGatherTaskLogArgs(m_TaskName, ((int)cGlobalParas.LogType.Error).ToString() + Url + "采集发生错误：" + ex.Message + "\n", this.IsErrorLog, cGlobalParas.LogType.Error));
                
               // onError(ex);
                return false;
            }

            gWeb = null;

            return IsSucceed;
        }
        //用于采集需要导航的网页，在此处理导航页规则
        private bool ParseGatherNavigationUrl(string Url,cWebLink_m Link_m)
        {
            cUrlAnalyze u = new cUrlAnalyze();
            List<string> gUrls;
            bool IsSucceed = false;

            e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Info).ToString() + "开始根据导航规则获取网页地址，请等待......\n导航层级为：" + Link_m.NavigRules.Count + " 层\n", this.IsErrorLog, cGlobalParas.LogType.Info));

            gUrls = u.ParseUrlRule(Url, Link_m.NavigRules, Link_m.WebCode, "");

            u = null;
            if (gUrls == null || gUrls.Count == 0)
            {
                e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Info).ToString() + Url + " 导航解析失败，有可能是由于导航规则配置错误，也有可能是由于垃圾数据造成，如果是垃圾数据，则不影响系统对数据的采集\n", this.IsErrorLog, cGlobalParas.LogType.Info));
                return false;
            }

            e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Info).ToString() + "成功根据导航规则获取" + gUrls.Count + "个网址\n", this.IsErrorLog, cGlobalParas.LogType.Info));

          
          
            for (int j = 0; j < gUrls.Count; j++)
            {
               
                    try
                    {
                        if (string.Compare(gUrls[j].Substring(0, 4), "http", true) != 0)
                        {
                            string PreUrl = Url;

                            if (gUrls[j].Substring(0, 1) == "/")
                            {
                                PreUrl = PreUrl.Substring(7, PreUrl.Length - 7);
                                PreUrl = PreUrl.Substring(0, PreUrl.IndexOf("/"));
                                PreUrl = "http://" + PreUrl;
                            }
                            else
                            {
                                Match aa = Regex.Match(PreUrl, ".*/");
                                PreUrl = aa.Groups[0].Value.ToString();
                            }

                            IsSucceed = GatherParsedUrl(PreUrl + gUrls[j].ToString(),Link_m);
                        }
                        else
                        {
                            IsSucceed = GatherParsedUrl(gUrls[j].ToString(), Link_m);
                        }


                      
                      

                    }
                    catch (System.Exception ex)
                    {

                        e_Log(this, new cGatherTaskLogArgs(m_TaskName, ((int)cGlobalParas.LogType.Error).ToString() + Url + "采集发生错误：" + ex.Message + "\n", this.IsErrorLog, cGlobalParas.LogType.Error));
                        //  onError(ex);
                    }
               

            }

            return true;
        }

        //用于采集导航网页分解后的单独地址
        private bool GatherParsedUrl(string Url,cWebLink_m Link_m)
        {
             
            cGatherWeb gWeb = new cGatherWeb();
            DataTable tmpData = null;

            gWeb.CutFlag = CutFlag;

            bool IsAjax = false;

            if (m_TaskType == cGlobalParas.TaskType.AjaxHtmlByUrl)
                IsAjax = true;

            try
            {
                //e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Info).ToString() + "正在采集：" + Url + "\n", this.IsErrorLog));

                if (Link_m.IsUrlEncode == true)
                {
                    Url = cTool.UrlEncode(Url, Link_m.UrlEncode);
                }


                tmpData = GetGatherData(Url, Link_m.WebCode, Link_m.StartPos, Link_m.EndPos);

                if (tmpData != null)
                {
                    m_GatherData.Merge(tmpData);
                }

            
                if (tmpData == null || tmpData.Rows.Count == 0)
                {
                    e_Log(this, new cGatherTaskLogArgs(m_TaskName, ((int)cGlobalParas.LogType.Error).ToString() + Url + " 此地址无数据！" + "\n", this.IsErrorLog, cGlobalParas.LogType.Error));
                }
                else
                {
                    e_Log(this, new cGatherTaskLogArgs(m_TaskName, cTool.GetEnumCNNameFromEnum(cGlobalParas.LogType.Info).ToString() + "采集完成：" + Url + "\n", this.IsErrorLog, cGlobalParas.LogType.Info));
                }
                tmpData = null;

            }
            catch (System.Exception ex)
            {
                e_Log(this, new cGatherTaskLogArgs(m_TaskName, ((int)cGlobalParas.LogType.Error).ToString() + Url + "采集发生错误：" + ex.Message + "\n", this.IsErrorLog, cGlobalParas.LogType.Error));
              //  onError(ex);
                return false;
            }

            gWeb = null;

            return true;

        }


        //这是一个通讯的接口方法，不做采集规则的处理，所有需要采集的网页均调用此防范
        //由此方法调用cGatherWeb.GetGatherData，做次方法的目的是为了可以处理错误重试

        private DataTable GetGatherData(string Url, cGlobalParas.WebCode WebCode, string StartPos, string EndPos)
        {
            
            cGatherWeb gWeb = new cGatherWeb();
            gWeb.CutFlag = m_CutFlag;

            DataTable tmpData;
            int AgainTime = 0;

        GatherAgain:

            try
            {
                tmpData = gWeb.GetGatherData(Url, WebCode, "", StartPos, EndPos, "", IsAjax);
            }
            catch (System.Exception ex)
            {
                AgainTime++;

                if (AgainTime > m_AgainNumber)
                {
                    if (m_IsErrorLog == true)
                    {
                        //保存出错日志
                    }

                    throw ex;
                }
                else
                {
                    if (m_Ignore404 == true && ex.Message.Contains("404"))
                    {
                        if (m_IsErrorLog == true)
                        {
                            //保存出错日志
                        }

                        throw ex;
                    }
                    else
                    {
                        e_Log(this, new cGatherTaskLogArgs(m_TaskName, ((int)cGlobalParas.LogType.Error).ToString() + "网址：" + Url + "访问发生错，错误信息：" + ex.Message + "，等待3秒重试\n", this.IsErrorLog, cGlobalParas.LogType.Error));

                        Thread.Sleep(3000);

                        e_Log(this, new cGatherTaskLogArgs(m_TaskName, ((int)cGlobalParas.LogType.Warning).ToString() + Url + "正在进行第" + AgainTime + "次重试\n", this.IsErrorLog, cGlobalParas.LogType.Warning));

                        //返回重试
                        goto GatherAgain;
                    }
                }
            }

            return tmpData;
        }

    }
}
