package com.webull.information.center.carwler.job;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;

import javax.annotation.PostConstruct;

import org.apache.commons.lang3.ArrayUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.simple.SimpleJob;
import com.webull.information.center.carwler.common.model.NewsDetection;
import com.webull.information.center.carwler.common.util.news.detection.DetectionInvestingList;
import com.webull.information.center.carwler.common.util.news.detection.DetectionList;
import com.webull.information.center.carwler.dao.NewsDetectionDao;

/**
 * 新闻源更新频度对比
 * 
 * @author shimingjun
 * @date 2017年2月17日 下午4:48:52
 * @version 1.0
 * @since JDK 1.8
 */
@Component
public class News_Detection_Job implements SimpleJob {
	protected final Logger logger = LogManager.getLogger(getClass());
	private @Autowired NewsDetectionDao newsDetectionDao;
	// private @Autowired CollectRecordDao collectRecordDao;
	// private final String collectJob = "News_Detection_Job";
	// private @Autowired DetectionReutersList detectionReutersList;
	// private @Autowired DetectionBloombergList detectionBloombergList;
	// private @Autowired DetectionYahooList detectionYahooList;
	// private @Autowired DetectionGoogleList detectionGoogleList;
	private @Autowired DetectionInvestingList detectionInvestingList;
	private Map<String, DetectionList> detectionMapper = new HashMap<>();

	@PostConstruct
	public void initDetectionMapper() {
		// detectionMapper.put(NewsDetection.Source.RETURE,
		// detectionReutersList);
		// detectionMapper.put(NewsDetection.Source.BLOOMB,
		// detectionBloombergList);
		// detectionMapper.put(NewsDetection.Source.YAHOO, detectionYahooList);
		// detectionMapper.put(NewsDetection.Source.GOOGLE,
		// detectionGoogleList);
		detectionMapper.put(NewsDetection.Source.INVESTING, detectionInvestingList);
	}

	/**
	 * 默认1小时跑一次
	 * 
	 * @since 0.1.0
	 */
	// @Scheduled(fixedDelay = 60 * 60 * 1000, initialDelay = 30 * 60 * 1000)
	public void process() {
		logger.info("News_Detection_Job start:------------------------------------->");
		execute(null);
	}

	/**
	 * 特殊的新闻源，可以提高频率 ，因为yahoo只能抓前3条，
	 * 
	 * @since 0.1.0
	 */
	// @Scheduled(fixedDelay = 10 * 60 * 1000, initialDelay = 60 * 1000)
	public void process2() {
		List<NewsDetection.Detection> shardDetection = newsDetectionDao.queryAllDetection(NewsDetection.Source.YAHOO);
		logger.info("News_Detection_Job process2 start:------------------------------------->");
		process_0(shardDetection);
	}


	@Override
	public void execute(ShardingContext context) {
		int sharding = Optional.ofNullable(context).map(v -> v.getShardingItem()).orElse(-1);
		int totalCount = Optional.ofNullable(context).map(v -> v.getShardingTotalCount()).orElse(-1);

		logger.info("News_Detection_Job start:" + ArrayUtils.toString(sharding) + ",totalCount:" + totalCount);
		List<NewsDetection.Detection> shardDetection = newsDetectionDao
				.queryAllDetection(NewsDetection.Source.INVESTING);
		if (shardDetection == null || shardDetection.isEmpty())
			return;
		logger.info("News_Detection_Job div size{};exchanges:{};", shardDetection.size());
		process_0(shardDetection);
	}


	public void process_0(List<NewsDetection.Detection> shardDetection) {
		// Collections.shuffle(shardDetection);
		logger.info("News_Detection_Job div shardExchanges size{}",
				shardDetection != null ? shardDetection.size() : 0);
		for (NewsDetection.Detection detection : shardDetection) {
			try {
				Optional.ofNullable(detectionMapper.get(detection.getsSource()))//
						.map(v -> v.parseNewsList(detection))// ;
						.ifPresent(v -> v.forEach(v0 -> {
							if (newsDetectionDao.countByTickerUrl(v0.getTickerId(), v0.getUrlMd5()) <= 0)
								try {
									newsDetectionDao.insertDetectionRes(v0);
								} catch (Exception e) {
									logger.error("News_Detection_Job shardExchanges collectJob error{},{}", detection,
											e);
								}
						}));
			} catch (Exception e) {
				logger.error("News_Detection_Job shardExchanges collectJob div error{},{}", detection, e);
			}
			// CollectRecord record = backRecord();
			// record.setLastTickerId(-1);
			// collectRecordDao.add(record);
		}
	}

	// private CollectRecord backRecord() {
	// CollectRecord record = new CollectRecord();
	// record.setCollectJob(collectJob);
	// record.setExchangeCode("0");
	// record.setExchangeId(0);
	// record.setRecordType(CollectRecord.RecordTypeConstants.NEWSDetection);
	// try {
	// record.setCustomer(InetAddress.getLocalHost().getHostAddress());
	// } catch (Exception e) {
	// }
	// return record;
	// }

}
