﻿using System;
using System.Collections.Generic;
using System.Text;
using System.Data;
using System.Threading;
using System.Net;

namespace Lib.Crawler
{
    /***
     * This class similar to QuickCrawling, but only get links by provided pattern
     * Written on 03/05/2010 (dd/MM/yyyy)
     * xatm092
    ***/
    public class LinkPatternCrawling
    {
        string _linkPattern="";
        public string LinkPattern { get { return _linkPattern; } set { _linkPattern = value; } }

        string _dataPattern = "";
        public string DataPattern { get { return _dataPattern; } set { _dataPattern = value; } }

        int _ppr = int.MaxValue;
        public int PagePerRequest { get { return _ppr; } set { _ppr = value; } }
        
        string _cate = "0";
        public string CategoryID { get { return _cate; } set { _cate = value; } }

        int _cr = 0;

        public LinkPatternCrawling()
        {
            _cr = 0;
        }
        public LinkPatternCrawling(string LinkRegEx)
        {
            LinkPattern = LinkRegEx;
            _cr = 0;
        }
        public LinkPatternCrawling(string LinkRegEx, string DataRegEx)
        {
            LinkPattern = LinkRegEx;
            DataPattern = DataRegEx;
            _cr = 0;
        }
        public LinkPatternCrawling(string LinkRegEx, string DataRegEx, string CateID)
        {
            LinkPattern = LinkRegEx;
            DataPattern = DataRegEx;
            CategoryID = CateID;
            _cr = 0;
        }
        public void Dispose()
        {
            
        }

        string _fpath="";
        //int _numRows = 30;
        
        public void CrawlLinks(string URL, string Path)
        {
            if (URL.ToLower().IndexOf("http://") >= 0)
                URL = URL.ToLower().Replace("http://", "");
            objQueu.Enqueue("http://" + URL);
            utbl = new DataTable();
            utbl.Columns.Add("Index", typeof(int));
            utbl.Columns["Index"].AutoIncrement = true;
            utbl.Columns["Index"].AutoIncrementSeed = 1;
            utbl.Columns["Index"].AutoIncrementStep = 1;

            utbl.Columns.Add("Link", typeof(string));
            utbl.Columns.Add("Status", typeof(bool));
            utbl.Columns.Add("Retry", typeof(int));
            utbl.PrimaryKey = new DataColumn[] { utbl.Columns["Link"] };
            //utbl = tbl;
            utbl.Rows.Add(new object[] { null, "http://" + URL, false, 0 });

            //Get rootURL
            string[] u = URL.Split(new char[] { '/', '\\' }, StringSplitOptions.RemoveEmptyEntries);
            if (u.Length > 0 && u[0].IndexOf(".") > 0)
                rootURL = u[0];
            else
                rootURL = URL;

            _fpath = Path;

            Crawl(Path);
            _total++;
        }
        string rootURL = "";
        public void Crawl(object obj)
        {
            if (!working || _cr>PagePerRequest)
            {
                _numStop++;
                return;
            }
            string URL = (objQueu.Count > 0) ? (string)objQueu.Dequeue() : null;
            string URLName = ""; // URL;
            if (URL.IndexOf(Environment.NewLine)>=0)
            {
                string[] URLContent = URL.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries);
                if (URLContent.Length > 0)
                {
                    URL = URLContent[0];
                    if (URLContent.Length > 1)
                        URLName = string.Join(" ", URLContent, 1, URLContent.Length - 1);
                    else
                        URLName = URL;
                }
                else
                    return;
            }

            if (!string.IsNullOrEmpty(URL))
            {
                // Initiate HTTP object from Utilities
                Utilities.BasicHTTP objHttp = new Lib.Utilities.BasicHTTP();

                string Content = "";
                // Get status from URL, and return response values to Content variable
                HttpStatusCode scode = objHttp.GetLinkStatusAndContent(URL, ref Content);
                //ObjCrawler objCrawl = new ObjCrawler();
                // Check whether HTTP status is ok, if ok, extract data into variables of crawler
                if (scode == HttpStatusCode.OK)
                {

                    // Assign HTML content to Content variable
                    //objCrawl.InnerHTML = Content;

                    System.Text.RegularExpressions.MatchCollection matchLinkA = System.Text.RegularExpressions.Regex.Matches(URL, LinkPattern, System.Text.RegularExpressions.RegexOptions.IgnoreCase|System.Text.RegularExpressions.RegexOptions.IgnorePatternWhitespace);

                        // Start check link pattern
                    if (string.IsNullOrEmpty(LinkPattern) || matchLinkA.Count > 0)
                    {
                        // Save content to Temporary File
                        string fpath = _fpath;
                        Extraction.DataPatternExtract.DataToXML(URL, URLName, Content, rootURL, DataPattern, _fpath, CategoryID);
                        _cr++;
                        // Using RE to find the links exist in Content variable (HTML content)
                        _success++;
                    }

                    matchLinkA = null;

                    lock (utbl)
                        try
                        {
                            utbl.Rows.Find(URL)["Status"] = true;
                        }
                        catch { }
                    //string Pattern = @"(<a\s*href\s*=\s*""(?<lnk>[^""]+)"".*>)(?<content>[^<]+)(</a>)";
                    string Pattern = @"(<a\s*href=""(?<lnk>[^""]+)""[^>]*>)(?<content>[^<]+)(</a>)";
                    //System.Text.RegularExpressions.MatchCollection match = System.Text.RegularExpressions.Regex.Matches(Content, "(?i)href=[\"'](.+?)[\"']");
                    System.Text.RegularExpressions.MatchCollection match = System.Text.RegularExpressions.Regex.Matches(Content.Replace(Environment.NewLine,""), Pattern, System.Text.RegularExpressions.RegexOptions.IgnoreCase|System.Text.RegularExpressions.RegexOptions.IgnorePatternWhitespace);

                    // Create an array list
                    //System.Collections.ArrayList arrList = new System.Collections.ArrayList();
                    // Add Links found inside Content to the array list
                    foreach (System.Text.RegularExpressions.Match objMatch in match)
                    {
                        string nextURL = objMatch.Groups["lnk"].ToString();
                        string nameContents = objMatch.Groups["content"].ToString();

                        System.Text.RegularExpressions.MatchCollection matchLink = System.Text.RegularExpressions.Regex.Matches(nextURL, LinkPattern, System.Text.RegularExpressions.RegexOptions.IgnoreCase|System.Text.RegularExpressions.RegexOptions.IgnorePatternWhitespace);

                        // Start check link pattern
                        if (string.IsNullOrEmpty(LinkPattern) || matchLink.Count > 0)
                        {
                            
                            nameContents = Utilities.CommonUtility.RemoveStrHtmlTags(nameContents, "");

                            if (nextURL.IndexOf(rootURL.Replace("www.", "")) >= 0)
                            {

                                nextURL = nextURL.Replace("href=", "").Replace("\"", "").Replace("\'", "");
                                if (nextURL.StartsWith("/"))
                                    nextURL = rootURL + nextURL.Remove(0, 1);
                                if (nextURL.IndexOf("http://") < 0)
                                    nextURL = "http://" + nextURL;
                                if (!utbl.Rows.Contains(nextURL))
                                {
                                    utbl.Rows.Add(new object[] { null, nextURL, false, 0 });

                                    objQueu.Enqueue(nextURL + Environment.NewLine + nameContents);
                                    _total++;
                                    //QuickCrawling qc = new QuickCrawling();
                                    ThreadPool.QueueUserWorkItem(new WaitCallback(Crawl));
                                }
                            }
                            else
                            {
                                if (nextURL.IndexOf("http://") < 0)
                                {
                                    nextURL = nextURL.Replace("href=", "").Replace("\"", "").Replace("\'", "");
                                    if (nextURL != URL.Replace(rootURL, "").Replace("http://", "").Replace("www.", ""))
                                    {

                                        if (nextURL.StartsWith("/"))
                                            nextURL = rootURL + nextURL;
                                        else
                                            nextURL = rootURL + "/" + nextURL;
                                        if (nextURL.IndexOf("http://") < 0)
                                            nextURL = "http://" + nextURL;
                                        if (!utbl.Rows.Contains(nextURL))
                                        {
                                            try
                                            {
                                                utbl.Rows.Add(new object[] { null, nextURL, false, 0 });
                                            }
                                            catch { }

                                            objQueu.Enqueue(nextURL + Environment.NewLine + nameContents);
                                            _total++;
                                            //QuickCrawling qc = new QuickCrawling();
                                            object state = new object();
                                            ThreadPool.QueueUserWorkItem(new WaitCallback(Crawl), state);
                                        }
                                    }
                                }
                            }
                        }
                        // End check link pattern
                    }
                    
                }
                else
                {
                    if (scode == HttpStatusCode.BadRequest)
                    {
                        utbl.Rows.Find(URL)["Status"] = false;
                        utbl.Rows.Find(URL)["Retry"] = 3;
                        _failed++;
                    }
                }
            }
        }

        DataTable utbl = new DataTable();

        public DataTable getDataTable()
        {
            return utbl;
        }
        public int _total = 0;
        public int _success = 0;
        public int _failed = 0;
        bool working = true;
        int _numStop = 0;
        public void Pause()
        {
            working = false;
        }
        public void Continue()
        {
            working = true;
            for(int i=0; i<_numStop; i++)
                ThreadPool.QueueUserWorkItem(new WaitCallback(Crawl));
        }

        //System.Collections.Stack objStack = new System.Collections.Stack();
        System.Collections.Queue objQueu = new System.Collections.Queue();
        //System.Collections.ArrayList arrLinkList = new System.Collections.ArrayList();

        private string deQueuLink()
        {
            if (objQueu.Count > 0)
                return (string)objQueu.Dequeue();
            else
                return null;
        }

        //private void pushLink(string linkValue)
        //{
        //    objStack.Push(linkValue);
        //    //arrLinkList.Add(linkValue);
        //}

        //private string popLink()
        //{
        //    if(objStack.Count>0)
        //        return objStack.Pop().ToString();
        //    else
        //        return null;
        //}
    }
}
