﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Doves.DataAccessLayer;
using HtmlAgilityPack;
using Doves.BussinessLayer;

namespace Doves.Crawlement
{
    public class ArticleCrawleJob
    {
        public ArticleCrawlerInputDataType PreparingCrawleDateFromDB(Link inLink)
        {
            ArticleCrawlerInputDataType ReData = new ArticleCrawlerInputDataType();
            if (inLink == null)
            {
                return null;
            }
            ReData.Link = inLink;
            // Get Html document of the link
            try
            {
                // Load document of this url
                HtmlWeb hw = new HtmlWeb();
                ReData.HtmlDoc = hw.Load(inLink.Url);
            }
            catch (System.Exception ex)
            {
                return null;
            }

            // Get Article Analizer for this link
            ArticleAnalizerBO ObjBo = new ArticleAnalizerBO();
            ReData.Analizer = ObjBo.GetArticleAnalizerByLink(inLink);
            if (ReData.Analizer == null)
            {
                return null;
            }

            // Get Zoom infomation
            ZombyBO ZombyBo = new ZombyBO();
            ReData.Link.ZomId = inLink.ZomId;
                //ZombyBo.GetZombyById(inLink.ZomId)

            // Get killed child list of this analizer from database
            ContentKilledChildBO KilledChildBo = new ContentKilledChildBO();
            ReData.KilledChildList = KilledChildBo.GetContentKilledChildByArticleAnalizerId(ReData.Analizer.Id);

            // Get replacing list of the analizer from database
            ContentReplacingBO ReplacingBo = new ContentReplacingBO();
            ReData.ReplacingList = ReplacingBo.GetContentReplacingByArticleAnalizerId(ReData.Analizer.Id);

            return ReData;
        }

        public bool CrawleArticles()
        {
            // While exists link need to crawle then do
            while((new LinkBO().IsHaveLinkNeedCrawle().RetValue))
            {
                // Get top 100 link to crawle
                IEnumerable<Link> CrawledLinkList = new LinkBO().GetLinkToCrawle(Utilities.Constants.NUM_LINK_EACH_QUERY);
                foreach (Link CrawledLink in CrawledLinkList)
                {
                    ArticleCrawlerInputDataType InputData = PreparingCrawleDateFromDB(CrawledLink);
                    if (InputData != null)
                    {
                        ArticleCrawler ArCrawlerObj = new ArticleCrawler(InputData);
                        Article MyArticle = ArCrawlerObj.ScanArticleContent();
                        // If scan success then update link and insert article to database
                        if (MyArticle != null)
                        {
                            // Update this link is crawled
                            LinkBO LinkBoObj = new LinkBO();
                            Link thisLink = LinkBoObj.GetLinkById(CrawledLink.Id);
                            thisLink.IsCrawled = true;
                            thisLink.CrawledTime = DateTime.Now;
                            LinkBoObj.UpdateLink(thisLink);
                            // Insert the article to database
                            new ArticleBO().InsertArticle(MyArticle);
                            Console.WriteLine("Ngon");
                        } 
                        else
                        {
                            // Update this link is not crawled by increase number of retry crawle
                            LinkBO LinkBoObj = new LinkBO();
                            Link thisLink = LinkBoObj.GetLinkById(CrawledLink.Id);
                            thisLink.RetryTimes = (thisLink.RetryTimes == null) ? 1 : (thisLink.RetryTimes + 1);
                            LinkBoObj.UpdateLink(thisLink);
                            Console.WriteLine("Retry");
                        }

                    }
                }
            }
            return true;
        }

    }
}
