﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using RegExSpider.Configuration;
using System.Xml;
using System.Diagnostics;

namespace ConsoleApplication1
{
    public static class CrawlerTest
    {
        public static void Begin()
        {
            //Hack.Net.Crawler.HackCrawler crawler = new Hack.Net.Crawler.HackCrawler(new Uri("http://www.aftonbladet.se"));
            //crawler.OnStatusChanged += new Hack.Net.Crawler.HackCrawler.StatusChanged((object o, string status) =>
            //{
            //    Console.WriteLine("Status: " + status + Environment.NewLine);
            //});
            //crawler.OnError += new Hack.Net.Crawler.HackCrawler.Error((object o, string head, string body) => 
            //{
            //    Console.WriteLine("Error: " + head + Environment.NewLine + body + Environment.NewLine);
            //});
            //crawler.StartParsing();
            RegExSpider.Spider.Spider sp = new RegExSpider.Spider.Spider();
           
            
            XmlDocument doc = new XmlDocument();
            doc.InnerXml =
                //            @"<?xml version=""1.0"" encoding=""utf-8"" ?>
                //            <Site>
                //              <Name>Site name</Name>
                //              <RootUrl>http://code.google.com</RootUrl>
                //
                //              <CrawlUrlsConfig>
                //
                //                <MaxDepth>0</MaxDepth>
                //
                //                <StartPoint>http://code.google.com/hosting/search?q=spider&amp;btn=Search+projects</StartPoint>    
                //
                //                <NoFollow>
                //                  <RegEx>(start=10)</RegEx>      
                //                </NoFollow>
                //
                //              </CrawlUrlsConfig>
                //
                //
                //              <PageExtractionElements>
                //                <Element Name=""SpiderResult"" RegEx=""&lt;div class=&quot;name&quot;&gt;([.\s\S]*?)&lt;div class=&quot;labels&quot;&gt;"">
                //                  <Field Name=""Title"" RegEx=""&gt;([.\s\S]*?)&lt;/a&gt;"" />
                //                  <Field Name=""Description"" RegEx=""&lt;div&gt;([.\s\S]*?)&lt;/div&gt;"" />
                //                </Element>    
                //              </PageExtractionElements>
                //
                //            </Site>";
                            @"<?xml version=""1.0"" encoding=""utf-8"" ?>
                            <Site>
                              <Name>Site name</Name>
                              <RootUrl>http://google.com</RootUrl>
                
                              <CrawlUrlsConfig>
                
                                <MaxDepth>5</MaxDepth>
                
                                <StartPoint>http://google.com</StartPoint>    
                
                                <NoFollow>
                                  <RegEx>(start=10)</RegEx>      
                                </NoFollow>
                
                              </CrawlUrlsConfig>
                
                
                              <PageExtractionElements>
                                <Element Name=""SpiderResult"" RegEx=""&lt;div class=&quot;name&quot;&gt;([.\s\S]*?)&lt;div class=&quot;labels&quot;&gt;"">
                                  <Field Name=""Title"" RegEx=""&gt;([.\s\S]*?)&lt;/a&gt;"" />
                                  <Field Name=""Description"" RegEx=""&lt;div&gt;([.\s\S]*?)&lt;/div&gt;"" />
                                </Element>    
                              </PageExtractionElements>
                
                            </Site>";
            RegExSpider.Configuration.ConfigReader.ReadSiteEntityXml(doc);
            sp.Initialize(RegExSpider.Configuration.ConfigReader.ReadSiteEntityXml(doc), 1);
            sp.OnReportStatus += new RegExSpider.Spider.Spider.ReportStatus(sp_OnReportStatus);
            sp.StartCrawling();
            
            //Hack.Net.Crawler.MyWebRequest req = null;
            //crawler.ParseUri(new Hack.Net.Crawler.MyUri(crawler.ParseURL), ref req);
            
        }

        static void sp_OnReportStatus(RegExSpider.Storage.Entities.ElementStorageStatus elements, RegExSpider.Storage.Entities.LinkStorageStatus links)
        {
            Console.WriteLine(links.Scanned + " / " + links.Waiting);
        }
    }
}
