using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;

using Jeffrey.XFramework.XInteract;
using Jeffrey.XFramework.XNet;
using Jeffrey.XFramework.XManager;

namespace Jeffrey.XFramework.XCrawl
{
    public class CrawlSpider
    {
        public enum SpiderStatus
        {
            Start,
            GetNewUri,
            EntryingProcessor,
            FinishedProcessor,
            Connecting,
            ReceivingHeader,
            Downloading,
            Downloaded,
            FinishedUri,
            Waiting,
            ShutDown
        }

        private static readonly Object s_SyncRoot = new Object();

        private static Boolean m_NoMoreLinks = false;

        public CrawlSpider(CrawlController controller)
        {
            m_Controller   = controller;
        }

        public void XSpiderRun()
        {
            UriManagement UriManagement = m_Controller.UriManagement;
            CrawlUri curi = null;
            String   suri = String.Empty;

            //some of the uris are allowed to process again...
            Boolean bIfReTry = false;

            //register the spdier's http-request...
            XWebRequest xRequest = null;
            lock(s_SyncRoot)
                m_Controller.m_RequestCollection[Thread.CurrentThread.ManagedThreadId] = xRequest;

            Int32 SpiderID = Int32.Parse(Thread.CurrentThread.Name);
            m_Controller.Xbuject.OnSpiderStatusChanged(new SpiderStatusPackage(SpiderID, SpiderStatus.Start));

            while (SpiderID < m_Controller.m_CrawlOrder.m_SpiderNum)
            {
                if (!bIfReTry)
                {
                    Int32 depth = UriManagement.GetNext(out suri);

                    if (suri != null && suri != String.Empty)
                    {
                        //in case for the error format of suri...
                        try
                        {
                            curi = new CrawlUri(suri);
                            curi.m_Depth = depth;
                        }
                        catch (UriFormatException)
                        {
                            continue;
                        }
                    }
                    else
                        curi = null;
                }
                else
                {
                    suri = curi.AbsoluteUri;
                    bIfReTry = false;
                }

                if (curi != null && curi.m_Depth <= m_Controller.m_CrawlOrder.m_Depth)
                {
                    m_Controller.Xbuject.OnSpiderStatusChanged(new SpiderStatusPackage(SpiderID, suri, curi.m_Depth.ToString(), SpiderStatus.GetNewUri));

                    /*
                     * the main loop of the uri processing...
                     */
                    foreach (Processor proc in m_Controller.UriProcessChain)
                        proc.Process(curi);

                   /*
                    * check if the uri has been at least one processor processed...
                    */
                    if (curi.m_Status == CrawlUri.StatusCode.N_RejectedByAll)
                    {
                        m_Controller.Xbuject.OnErrorOccurred(new ErrorPackage(suri, CrawlToolKits.UriStatusToString(CrawlUri.StatusCode.N_RejectedByAll)));
                    }
                    /*
                     * when successful...
                     */
                    else if (curi.m_Status == CrawlUri.StatusCode.S_Succeed)
                    {
                        //not the curi.AbsoluteUri, in case for the last '/'...
                        UriManagement.Succeeded(suri);
                        /*
                         * considering to separate this part to a single class...
                         */
                        if (m_Controller.m_CrawlOrder.m_TotalPages > 0 && UriManagement.AlreadyProcessed > m_Controller.m_CrawlOrder.m_TotalPages)
                            m_NoMoreLinks = true;

                        if ( !m_NoMoreLinks ||
                             curi.m_Depth == m_Controller.m_CrawlOrder.m_Depth
                           )
                        {
                            foreach (String str in curi.NormalLinks)
                                UriManagement.Schedule(str, false, curi.m_Depth + 1);

                            foreach (String str in curi.PriorLinks)
                                UriManagement.Schedule(str, true, curi.m_Depth + 1);
                        }

                        curi.NormalLinks.Clear();
                        curi.PriorLinks.Clear();
                    }
                    /*
                     * if failed, check if can retry the uri
                     */
                    else if (curi.m_CanReTry && curi.m_Status == CrawlUri.StatusCode.E_NeedRetry)
                    {
                        //back to initial
                        curi.Initialize();
                        //flag this uri can not be retry any more...
                        curi.m_CanReTry = false;
                        //set to retry...
                        bIfReTry = true;

                        m_Controller.Xbuject.OnErrorOccurred(new ErrorPackage(suri, CrawlToolKits.UriStatusToString(CrawlUri.StatusCode.E_NeedRetry),
                                                                      curi.m_SocketError + "\r\n" + curi.m_CustomizedError)
                                                             );
                    }
                    else
                    {
                        UriManagement.Failed(suri);

                        //if retry failed...
                        if (curi.m_Status == CrawlUri.StatusCode.E_NeedRetry)
                            curi.m_Status = CrawlUri.StatusCode.E_RetryFailed;

                        else if (curi.m_Status == CrawlUri.StatusCode.E_ResponsedErrorCode)
                            m_Controller.Xbuject.OnErrorOccurred(new ErrorPackage(suri, CrawlToolKits.UriStatusToString(curi.m_Status),
                                                                      CrawlToolKits.HttpStatusCodesToString(curi.m_ResponseCode))
                                                                 );
                        else
                        {
                            m_Controller.Xbuject.OnErrorOccurred(new ErrorPackage(suri, CrawlToolKits.UriStatusToString(curi.m_Status),
                                                                      curi.m_SocketError + "\r\n" + curi.m_CustomizedError)
                                                                 );
                        }
                    }

                    if ( m_Controller.m_CrawlOrder.m_GC &&
                         SpiderID % 5 == 1
                       )
                        GC.Collect();

                    m_Controller.Xbuject.OnSpiderStatusChanged(new SpiderStatusPackage(SpiderID, SpiderStatus.FinishedUri));

                    Thread.Sleep(m_Controller.m_CrawlOrder.m_Interval);
                }
                else
                {
                    /*
                     * once the spider didn't catch the link from UriManagement, we allow it to wait another 'm_Inerval 'sceonds...
                     * if still don't have links in UriManagement, we let the spider go...
                     */
                    m_Controller.Xbuject.OnSpiderStatusChanged(new SpiderStatusPackage(SpiderID,SpiderStatus.Waiting));
                    Thread.Sleep(m_Controller.m_CrawlOrder.m_Interval);

                    /*if (0 == UriManagement.UriCounter)
                        break;*/
                }
            }

            m_Controller.Xbuject.OnSpiderStatusChanged(new SpiderStatusPackage(SpiderID, SpiderStatus.ShutDown));

            lock (s_SyncRoot)
            {
                m_Controller.m_SpiderXisRunning[SpiderID] = false;
            }
        }

        private CrawlController m_Controller;
    }
}
