package com.skytech.component.crawler.task;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.skytech.component.crawler.scratch.chrome.SingleChromeTask;
import com.skytech.component.crawler.scratch.chrome.TestChromeService;
import com.skytech.component.crawler.parse.ParserLoader;
import com.skytech.component.crawler.scratch.base.*;
import com.skytech.component.crawler.scratch.downloader.HtmlUnitDownloader;
import com.skytech.component.crawler.scratch.process.CreateHtmlPageProcessor;
import com.skytech.component.crawler.scratch.process.SimpleCommonProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.web.client.RestTemplateBuilder;
import org.springframework.data.domain.Example;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.AsyncResult;
import org.springframework.stereotype.Service;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Future;

@Service
public class CrawlerTask {
    @Autowired
    private CrawlerUrlDAO crawlerUrlDAO;
    @Autowired
    private TargetRulesDAO targetRulesDAO;
    @Autowired
    private ParserLoader parserLoader;
    @Autowired
    private PageContentDAO pageContentDAO;
    @Autowired
    private RestTemplateBuilder restTemplateBuilder;
    @Autowired
    private TestChromeService testChromeService;

    @Value("${elasticsearch.server}")
    private String server;
    @Autowired
    private SingleChromeTask singleChromeTask;

    @Value("${chrome.driver.path}")
    private String driverPath;

    private int taskCount = 5;

    private static final Logger log = LoggerFactory.getLogger(CrawlerTask.class);

    public static Spider spider;

    public long startTime;


    public void startChromeTask(){
        List<TargetRules> ruleList = targetRulesDAO.findAll();
        TaskManager taskManager = new TaskManager(ruleList,crawlerUrlDAO,driverPath);
        taskManager.start();
        //todo parse
        parserLoader.load();
        /*ruleList.sort(Comparator.comparing(TargetRules::getId));
        if(ruleList.size()>=taskCount){
            List<List<TargetRules>> divide = divide(ruleList, taskCount);
            for (List<TargetRules> s : divide) {
                singleChromeTask.start(s,crawlerUrlDAO);
            }

        }else{
            singleChromeTask.start(ruleList,crawlerUrlDAO);
        }*/

    }

    private List<List<TargetRules>> divide(List<TargetRules> all,int taskCount){
        List<List<TargetRules>> result = new ArrayList<>();
        int count = all.size()/taskCount;
        for(int j = 0;j<taskCount;j++){
            if(j == taskCount-1){
                result.add(all.subList(j*count,all.size()));
            }else{
                result.add(all.subList(j*count,(j+1)*count));
            }

        }
        return result;
    }


    @Async
    public Future<String> startTask(Boolean status){
        startTime = System.currentTimeMillis();
        TargetRules targetRule = new TargetRules();
        targetRule.setType(CreatePageType.simple.toString());
        List<TargetRules> listSimple = targetRulesDAO.findAll(Example.of(targetRule));
        targetRule = new TargetRules();
        targetRule.setType(CreatePageType.create_html_page.toString());
        List<TargetRules> listCreateHtmlPage = targetRulesDAO.findAll();

        this.SimpleCommonProcessorTask(status,listSimple);
        /*this.createHtmlPageTask(status,listCreateHtmlPage);*/
        /*parserLoader.load();*/
        return new AsyncResult<>("完成");
    }
    @Async
    public void test(List<TargetRules> list){
        for (TargetRules targetRules : list) {
            if(CreatePageType.simple.toString().equals(targetRules.getType())){
                SimpleCommonProcessorTask(true,Arrays.asList(targetRules));
            }else{
                createHtmlPageTask(true,Arrays.asList(targetRules));
            }

        }
    }
    public void parse(){
        parserLoader.load();
    }
    public Double getTime(){
        if(startTime==0)return 0.0;
        return (System.currentTimeMillis()-startTime)/(1000*60.0);
    }

    public void SimpleCommonProcessorTask(boolean initStatus, List<TargetRules> list) {
        long time = System.currentTimeMillis();
        log.info("爬虫任务开始");
        for (TargetRules targetRules : list) {
            //todo 查找最新的页面
            CrawlerUrl crawlerUrl = new CrawlerUrl();
            crawlerUrl.setRuleId(targetRules.getId());
            List<CrawlerUrl> crawlerUrls = crawlerUrlDAO.findAll(Example.of(crawlerUrl));
            crawlerUrls.sort(Comparator.comparing(CrawlerUrl::getOperateTime).reversed());
            SimpleCommonProcessor simpleCommonProcessor = new SimpleCommonProcessor(targetRules, Site.me().setRetryTimes(3).setSleepTime(1000), crawlerUrlDAO,testChromeService);
            simpleCommonProcessor.setInitStatus(initStatus);
            spider = Spider.create(simpleCommonProcessor)
                    .addUrl(targetRules.getSeedUrl());
            spider.run();
        }
        log.info("爬虫任务结束,耗费时间:{}min", (System.currentTimeMillis() - time) / (1000 * 60.0));
        System.out.println();
    }

    public void createHtmlPageTask(boolean initStatus, List<TargetRules> list) {
        long begin = System.currentTimeMillis();
        int count = list.size();
        for (TargetRules rule : list) {
            CreateHtmlPageProcessor createHtmlPageProcessor = new CreateHtmlPageProcessor(rule, crawlerUrlDAO);
            createHtmlPageProcessor.setInitStatus(initStatus);
            try {
                spider = Spider.create(createHtmlPageProcessor)
                        .setDownloader(new HtmlUnitDownloader())
                        .addUrl(rule.getSeedUrl())
                        .thread(1);
                spider.run();
            } catch (Exception e) {
                e.printStackTrace();
                System.out.println("发生错误");
            }
        }
        System.out.println("总数：" + count + "爬到：" + CountUtil.count + "错误：" + CountUtil.errorPage);
        System.out.println(CountUtil.errorPageList);
        log.info("爬取过程耗费时间{}", (System.currentTimeMillis() - begin) / (1000 * 60.0));
    }

    public void backUpData(){
        List<PageContent> list = pageContentDAO.findAll();
        for (PageContent pageContent : list) {
            JSONObject jsonObject = JSON.parseObject(JSON.toJSONString(pageContent));
            jsonObject.remove("id");
            restTemplateBuilder.build().put(server+"crawler_news/news/"+pageContent.getId(),jsonObject);
        }
    }
}
