﻿using Crawler.Common.Net;
using Crawler.Core.CrawlerHandler.Analyzer;
using Crawler.Core.EventBus;
using Crawler.IRepositories;
using Crawler.IRepositories.SystemDefault;
using Crawler.Model.SystemDefault;
using System;
using System.Collections.Generic;
using System.Text;

namespace Crawler.Core.CrawlerHandler.Collector
{
    public class CollectTaskHandler
    {
        private readonly IHttpConfigRepository httpConfigRepository;
        private readonly ICollectorUrlRepository collectorUrlRepository;
        private readonly IRepository<PageInfoModel> pageInfoRepository;
        private readonly IEventBus eventBus;
        private readonly IRepository<CollectInfoConfigModel> collectInfoConfigRepository;

        public CollectTaskHandler(
            IHttpConfigRepository httpConfigRepository
            , ICollectorUrlRepository collectorUrlRepository
            , IRepository<PageInfoModel> pageInfoRepository
            , IEventBus eventBus
            , IRepository<CollectInfoConfigModel> collectInfoConfigRepository
            )
        {
            this.httpConfigRepository = httpConfigRepository;
            this.collectorUrlRepository = collectorUrlRepository;
            this.pageInfoRepository = pageInfoRepository;
            this.eventBus = eventBus;
            this.collectInfoConfigRepository = collectInfoConfigRepository;
        }

        public void Collect(TerminalConfigModel terminalConfig, CollectInfoConfigModel configModel, CollectorUrlModel urlModel)
        {
            //获取配置信息
            configModel = collectInfoConfigRepository.Find(configModel.ID);
            //配置设置停止则终止
            if (configModel == null || configModel.WorkStatus != WorkStatus.Running)
            {
                //Console.WriteLine("+++++Collector Stop");
                return;
            }

            var html = string.Empty;
            try
            {
                Console.WriteLine("+++++Collector Collect:" + urlModel.Url);
                var agent = httpConfigRepository.GetUserAgent();
                string proxyip = httpConfigRepository.GetProxyIP();
                //采集
                html = HttpHelper.Get(urlModel.Url, agent, configModel.Encoding, proxyip);
            }
            catch (Exception ex)
            {
                Console.WriteLine($"{urlModel.Url}：{ex.Message}");
            }

            //出错，可能是异常
            if (!html.IsAvailable() || html.Equals("error"))
            {
                //尝试重复采集，错误超过多少次则不再处理
                if (terminalConfig.MaxErrorTimes < 1)
                    terminalConfig.MaxErrorTimes = 1;

                if (urlModel.ErrorTimes >= terminalConfig.MaxErrorTimes) return;
                urlModel.ErrorTimes++;
                //更新采集状态
                collectorUrlRepository.Update(urlModel);

                //发布事件,重复进行采集
                eventBus.Publish(new CollectorEventEntity { collectorUrlModel = urlModel });
                return;
            }
            //采集成功 则保存到待解析队列、数据表
            urlModel.CollectStatus = CollectStatus.Collected;
            collectorUrlRepository.Update(urlModel);
            var pageInfoModel = new PageInfoModel
            {
                CollectorConfigID = configModel.ID,
                CollectorUrlID = urlModel.ID,
                Url = urlModel.Url,
                Depth = urlModel.Depth,
                AnalyzeStatus = AnalyzeStatus.Default,
                Html = html
            };
            //保存采集到的页面信息
            pageInfoRepository.Add(pageInfoModel);
            //发布解析事件
            eventBus.Publish(new AnalyzerEventEntity { pageInfoModel = pageInfoModel });
        }
    }
}
