package org.jeecg.crawler.node;


import org.jeecg.common.system.vo.DictModel;
import org.jeecg.common.util.DateTimeUtil;
import org.jeecg.common.util.DateUtils;
import org.jeecg.modules.crawlerpaper.entity.CrawlerInfo;

import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.jeecg.modules.crawlerpaper.service.ICrawlerInfoService;
import org.jeecg.modules.system.service.ISysDictService;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;

import java.util.Iterator;
import java.util.List;
import java.util.Map;

/**
 * 示例不带参定时任务
 *
 * @Author Scott
 */
@Slf4j
public class CrawlerJob implements Job {

	/**
	 * 若参数变量名修改 QuartzJobController中也需对应修改
	 */
	private String parameter;

	public void setParameter(String parameter) {
		this.parameter = parameter;
	}

	@Autowired
	private ICrawlerInfoService crawlerInfoService;

	@Autowired
	private ISysDictService sysDictService;

	//参数：baseurl,mainUrl,dataurl,page,scoureName,articleTime;

	@SneakyThrows
	@Override
	public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {

		if(parameter !=null){
			String[] params = parameter.split(",");
			if(params[2].equals("0")){
				params[2] = "/"+ DateTimeUtil.getTodayChar9();

			}
			if(params[5].equals("0")){
				params[5] = DateTimeUtil.getTodayChar8En();
			}
			 this.getCrewlerInfo(params[0],params[1],params[2],params[3],params[4],params[5]);

		}
	}


	public void  getCrewlerInfo(String baseUrl, String mainUrl,String dateUrl,String page,String sourceName,String articleTime) throws Exception {
		BaseNewsCrawler crawler = new BaseNewsCrawler("crawl", true,dateUrl,baseUrl,mainUrl,page);
		crawler.start(1);
		Map<String, String> urlMap = crawler.getUrlMap();

        DetailAutoNewsCrawler crawlerDetail = new DetailAutoNewsCrawler("crawl", true,urlMap,baseUrl,sourceName,articleTime);
		crawlerDetail.start(1);
		List<CrawlerInfo> crawlerInfoList = crawlerDetail.getCrawlerInfoList();
		if(!CollectionUtils.isEmpty(crawlerInfoList)){
			List<DictModel> keywords = sysDictService.queryDictItemsByCode("keywords");
			if(!CollectionUtils.isEmpty(keywords)){

				Iterator<CrawlerInfo> iterator = crawlerInfoList.iterator();
				while (iterator.hasNext()){
					CrawlerInfo next = iterator.next();
                    Boolean removeFlag = true;
					for (DictModel keyword : keywords) {
						if(next.getArticleName().contains(keyword.getValue())||next.getArticleContent().toString().contains(keyword.getValue())){
							removeFlag = false;
							break;
						}
					}
					if(removeFlag){
						iterator.remove();
					}

				}
			}

			crawlerInfoService.saveOrUpdateBatch(crawlerInfoList);

		}
	}
}
