﻿using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using System.Windows.Forms;

namespace Spider
{
    public class CrawWorker
    {
        /// <summary>
        /// Common Object
        /// </summary>
        private Util mUtil;

        /// <summary>
        /// The base URI that is to be crawled.
        /// </summary>
        private Uri m_QueryUri;

        /// <summary>
        /// The thread that is being used.
        /// </summary>
        private Thread m_thread;

        private string mThreadName;

        IRequest request;

        /// <summary>
        /// constructor
        /// </summary>
        /// <param name="util"></param>
        public CrawWorker(Util util,string ThreadName)
        {
            mUtil = util;
            mThreadName = ThreadName;
        }

        public Uri ObtainWork()
        {
            Monitor.Enter(this);
            while (mUtil.QueueQueryUrl.Count < 1)
            {
                Monitor.Wait(this, 100);
            }
            Uri next = null;
            next = (Uri)mUtil.QueueQueryUrl.Dequeue();
            Monitor.Pulse(this);
            Monitor.Exit(this);
            return next;
        }

        /// <summary>
        /// This method is the main loop for the crawlling threads.
        /// </summary>
        private void Process()
        {
            mUtil.done.WorkerBegin();
            request = new HttpRequestSyn();
            //request.GetResponseComplete += new RequestByHttp.GetCompleteHandler(request_GetResponseComplete);

            while (!mUtil.AbortTag)
            {
                m_QueryUri = null;
                m_QueryUri = ObtainWork();
                if (m_QueryUri == null)
                {
                    //Thread.Sleep(1000);
                    continue;
                }
                
                //mUtil.InfoForm.showThreadNum(mUtil.done.ActiveThreads);

                mUtil.modifyQueryURIStatus(m_QueryUri, Status.STATUS_QUEUEDING);

                try
                {
                    request_GetResponseComplete(request.GetPage(m_QueryUri));                        
                }
                catch
                {
                    mUtil.modifyQueryURIStatus(m_QueryUri, Status.STATUS_FAILED);
                }            
            }
            //Console.WriteLine(mThreadName);
            request.Dispose();
            mUtil.done.WorkerEnd();
        }

        void request_GetResponseComplete(PageClass Page)
        {
            //Monitor.Enter(this);
            completeResponse(Page);
            //Monitor.Exit(this);
        }

        void completeResponse(PageClass Page)
        {
            if (Page == null)
                return;

            if (!string.IsNullOrEmpty(Page.html))
            {
                mUtil.GetHrefs(Page.URL.ToString(), Page.html);
                mUtil.modifyQueryURIStatus(Page.URL, Status.STATUS_SUCCESS);
            }
            else
            {
                mUtil.modifyQueryURIStatus(Page.URL, Status.STATUS_FAILED);
            }
        }
        
        /// <summary>
        /// Start the Crawlling
        /// </summary>
        public void Start()
        {
            ThreadStart ts = new ThreadStart(this.Process);
            m_thread = new Thread(ts);
            m_thread.Name = mThreadName;
            m_thread.IsBackground = true;
            //m_thread.Priority = ThreadPriority.Highest;
            m_thread.Start();
            Log.Instance.WriteMsg(string.Format("CrawWorker Thread Name:{0} Thraad State:{1}", AppDomain.GetCurrentThreadId(), Thread.CurrentThread.ThreadState.ToString()));

        }

        public Thread MThread
        {
            get { return m_thread; }
        }

        public Uri QueryURI
        {
            get { return m_QueryUri; }
        }
    }
}
