package org.jeecg.crawler.task;


import lombok.SneakyThrows;
import org.jeecg.common.system.vo.DictModel;
import org.jeecg.crawler.content.ContentNewsCrawler;
import org.jeecg.crawler.special.FaZhiNewsCrawler;
import org.jeecg.crawler.special.FazhiDetailNewsCrawler;
import org.jeecg.crawler.special.NongMinDetailNewsCrawler;
import org.jeecg.crawler.special.NongMinNewsCrawler;
import org.jeecg.modules.crawlerpaper.entity.CrawlerInfo;
import org.jeecg.modules.crawlerpaper.service.ICrawlerInfoService;
import org.jeecg.modules.system.service.ISysDictService;
import org.quartz.JobExecutionException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.util.CollectionUtils;

import java.util.Iterator;
import java.util.List;
import java.util.Map;

@Configuration
@EnableScheduling
public class SpecialScheduleTask {
    @Autowired
    private ICrawlerInfoService crawlerInfoService;

    @Autowired
    private ISysDictService sysDictService;



    @SneakyThrows
    @Scheduled(cron = "0 25 9 * * ?")
    public void nanjingchenbao() throws JobExecutionException {
           this.getFaZHiCrewlerInfo();


    }
    @SneakyThrows
    @Scheduled(cron = "0 30 9 * * ?")
    public void nongminribao() throws JobExecutionException {
        this.getNongMinCrewlerInfo();


    }


    public void  getNongMinCrewlerInfo()  {
        try {

            ContentNewsCrawler.lock.lock();
            NongMinNewsCrawler crawler = new NongMinNewsCrawler("crawl", true);
            crawler.start(1);
            Map<String, String> urlMap = crawler.getUrlMap();

            NongMinDetailNewsCrawler crawlerDetail = new NongMinDetailNewsCrawler("crawl", true,urlMap);
            crawlerDetail.start(1);
            List<CrawlerInfo> crawlerInfoList = crawlerDetail.getCrawlerInfoList();
            if(!CollectionUtils.isEmpty(crawlerInfoList)){
                List<DictModel> keywords = sysDictService.queryDictItemsByCode("keywords");
                if(!CollectionUtils.isEmpty(keywords)){

                    Iterator<CrawlerInfo> iterator = crawlerInfoList.iterator();
                    while (iterator.hasNext()){
                        CrawlerInfo next = iterator.next();
                        Boolean removeFlag = true;
                        for (DictModel keyword : keywords) {
                            if(next.getArticleName().contains(keyword.getValue())||next.getArticleContent().toString().contains(keyword.getValue())){
                                removeFlag = false;
                                break;
                            }
                        }
                        if(removeFlag){
                            iterator.remove();
                        }

                    }
                }
                if(!CollectionUtils.isEmpty(crawlerInfoList)){
                    crawlerInfoService.saveOrUpdateBatch(crawlerInfoList);

                }

            }
        }catch (Exception e){
            e.printStackTrace();
        }finally {
            ContentNewsCrawler.lock.unlock();
        }
    }




    public void  getFaZHiCrewlerInfo()  {
        try {

            ContentNewsCrawler.lock.lock();
            FaZhiNewsCrawler crawler = new FaZhiNewsCrawler("crawl", true);
            crawler.start(1);
            Map<String, String> urlMap = crawler.getUrlMap();

            FazhiDetailNewsCrawler crawlerDetail = new FazhiDetailNewsCrawler("crawl", true,urlMap);
            crawlerDetail.start(1);
            List<CrawlerInfo> crawlerInfoList = crawlerDetail.getCrawlerInfoList();
            if(!CollectionUtils.isEmpty(crawlerInfoList)){
                List<DictModel> keywords = sysDictService.queryDictItemsByCode("keywords");
                if(!CollectionUtils.isEmpty(keywords)){

                    Iterator<CrawlerInfo> iterator = crawlerInfoList.iterator();
                    while (iterator.hasNext()){
                        CrawlerInfo next = iterator.next();
                        Boolean removeFlag = true;
                        for (DictModel keyword : keywords) {
                            if(next.getArticleName().contains(keyword.getValue())||next.getArticleContent().toString().contains(keyword.getValue())){
                                removeFlag = false;
                                break;
                            }
                        }
                        if(removeFlag){
                            iterator.remove();
                        }

                    }
                }
                if(!CollectionUtils.isEmpty(crawlerInfoList)){
                    crawlerInfoService.saveOrUpdateBatch(crawlerInfoList);

                }

            }
        }catch (Exception e){
            e.printStackTrace();
        }finally {
            ContentNewsCrawler.lock.unlock();
        }
    }
}
