﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using log4net;
using System.Reflection;
using Crawler.TaskControl;
using WebCollection;
using Util;

namespace Crawler
{
    public class CrawlerXLBK
    {
        private static ILog log = log4net.LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);

        private TaskManager taskmgr = new TaskManager();

        private PathManager pathmgr = new PathManager();

        public void Fetch()
        {
            List<CrawlTask> tasks = taskmgr.GetMyTasks();
            if (tasks.Count == 0)
            {
                log.Info("num of tasks is zero ,program exit");
                return;
            }
            bool enablerepeation = bool.Parse(ConfigUtil.GetAppSetting("enablerepeation"));
            foreach (CrawlTask task in tasks)
            {
                try
                {
                    FetchSingeTask(task, enablerepeation);
                }
                catch (Exception e)
                {
                    log.Warn(e.Message);
                    log.Warn(e.StackTrace);
                }
            }
            log.Info("task excute complete ,program exit");
        }

        private void FetchSingeTask(CrawlTask task, bool enablerepeation)
        {
            log.Info("task start ," + task);
            String configpath = pathmgr.GetConfigPath(task.Name);
            String outputpath = pathmgr.GetOutputPath(task.Name);
            Robot robot = new Robot(configpath, outputpath, enablerepeation);
            robot.Start(task.SeedUrl);
            robot.Close();
            log.Info("task finished ," + task);
        }


    }
}
