﻿using System;
using System.Xml;

using FetchAndProcess.Models;
using WebDUPlugIn.ConfigAndModels;

namespace WebDUPlugIn.Crawlers
{
    class WebDUPlugIn : FetcherInterface
    {
        private static readonly log4net.ILog log = log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
        public FetcherConfigBaseRec getConfiguration(XmlNode fetcherNode)
        {
            var  duConfig = new WebDUConfig();
            FetcherConfigBaseRec baseRec = null;

            if (duConfig.LoadConfiguration(fetcherNode))
            {
                baseRec = duConfig as FetcherConfigBaseRec;
            }
            return baseRec;
        }

        public object crawlForData(FetcherConfigBaseRec configRec)
        {
            // Ready to Kick off Crawling now.. 

            // Get Config Record 
            var webduCfg = configRec as WebDUConfig;
            if (webduCfg == null)
                return null;

            var crawlTheWeb = new WebCrawlEngine(webduCfg);
            foreach (UrlData urlNode in webduCfg.UrlParentNodes)
            {
                log.DebugFormat(" Kikced Off Crawling for Parent URL {0}",urlNode.Uri.ToString());
                /*
                var roboUri = new Uri(urlNode.Uri.ToString() + "robots.txt");
                crawlTheWeb.EnQueueUrl(roboUri);
                 * */
                crawlTheWeb.CrawlRobotsDotText(urlNode.Uri);
                crawlTheWeb.EnQueueUrl(urlNode.Uri);
            }

            crawlTheWeb.StartCrawling();
            log.DebugFormat("Crawling completed.. for Fetcher {0}",configRec.FetcherName);
            return null;
        }

        
    }
}