/*
 * TOP SECRET Copyright 2006-2015 Transsion.com All right reserved. This software is the confidential and proprietary
 * information of Transsion.com ("Confidential Information"). You shall not disclose such Confidential Information and
 * shall use it only in accordance with the terms of the license agreement you entered into with Transsion.com.
 */
package com.yunji.framework_template.web.quartz.news;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;

import javax.annotation.Resource;

import org.apache.http.client.ClientProtocolException;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import com.yunji.framework_template.biz.cache.CacheService;
import com.yunji.framework_template.biz.crawler.NewsCrawler;
import com.yunji.framework_template.biz.service.AppsService;
import com.yunji.framework_template.common.util.StringUtil;
import com.yunji.framework_template.orm.datasource.HandleDataSource;
import lombok.extern.slf4j.Slf4j;

/**
 * ClassName:CrawlerTimer <br/>
 * Date: 2018年11月27日 下午3:18:42 <br/>
 * 
 * @author fenglibin1982@163.com
 * @Blog http://blog.csdn.net/fenglibing
 * @version
 * @see
 */
@Slf4j
@Service
public class NewsCrawlerTimer {

    private static final Logger logger    = Logger.getLogger(NewsCrawlerTimer.class);
    private AtomicBoolean       isRunning = new AtomicBoolean(false);
    @Resource
    private CacheService        cacheService;
    @Resource
    private AppsService         appsService;
    @Value("#{settings['countries']}")
    private String              countries;

    /**
     * 每分钟执行一次
     */
    public void execute() {
        try {
            logger.info("Begin to schedule crawle news");
            if (isRunning.get()) {
                logger.warn("The news crawler job is still running.");
                return;
            }
            isRunning.set(true);
            List<NewsCrawler> newsCrawlerList = NewsCrawler.getNewsCrawlerList();
            if (newsCrawlerList == null || newsCrawlerList.size() == 0) {
                logger.info("Cannot find any news crawler.");
                return;
            }
            // 将爬虫按国家分组，加快抓取的速度
            Map<String, List<NewsCrawler>> newsCrawlerListMap = new HashMap<String, List<NewsCrawler>>();
            newsCrawlerList.forEach(crawler -> {
                Set<String> countryCodeSet = crawler.getCountryCodeSet();
                countryCodeSet.forEach(countryCode->{
                    List<NewsCrawler> list = newsCrawlerListMap.get(countryCode);
                    if (list == null) {
                        list = new ArrayList<NewsCrawler>();
                        newsCrawlerListMap.put(countryCode, list);
                    }
                    list.add(crawler);
                });
                
            });

            newsCrawlerListMap.forEach((countryCode, crawlerList) -> {
                new Thread() {

                    public void run() {
                        crawlerList.forEach((crawler) -> {
                            try {
                                if (!StringUtil.isEmpty(cacheService.getCountryCache().getOnlyCountry())) {
                                    if (!countryCode.equals(cacheService.getCountryCache().getOnlyCountry())) {
                                        log.warn("当前爬虫只允许指定的国家:" + cacheService.getCountryCache().getOnlyCountry()
                                                 + "执行资源爬取操作。");
                                        return;
                                    }
                                }
                                if (cacheService.getCountryCache().getCountries().indexOf(countryCode) < 0) {
                                    log.warn("当前爬虫指定的国家:" + countryCode + "不在配置中，需要在配置文件中指定。");
                                    return;
                                }
                                /*
                                 * 发现通过多线程执行的时候，会存在一个国家的数据爬虫会将数据写到其它国家中 new Thread() { public void run() { //
                                 * 设置当前的数据源为该爬虫对应的国家所在数据源 HandleDataSource.putDataSource(countryCode);
                                 * c.saveNews(); logger.info("Finish schedule crawle news");
                                 * cacheService.initLatestNews(); } }.start();
                                 */
                                // 设置当前的数据源为该爬虫对应的国家所在数据源
                                logger.info("Begin to schedule crawle news, the country code is:" + countryCode);
                                HandleDataSource.putDataSource(countryCode);
                                crawler.saveNews();
                                logger.info("Finish schedule crawle news, the country code is:" + countryCode);
                                cacheService.initOneCountryLatestNews(countryCode);

                            } catch (Exception e) {
                                logger.error("Crawl content from " + crawler.getClass().getName() + " exception happened:"
                                             + e.getMessage(), e);
                            } finally {
                                HandleDataSource.putDataSource(null);
                            }
                        });
                    }
                }.start();
            });

        } catch (Exception e) {
            logger.error("The crawler timer execute error happened:" + e.getMessage(), e);
        } finally {
            isRunning.set(false);
        }
    }

    public void appAnnieExcecutor() throws ClientProtocolException, IOException {
        String[] countryArr = countries.split(",");
        for (String countryCode : countryArr) {
            HandleDataSource.putDataSource(countryCode);
            appsService.loadAppsFromAppAnnie(countryCode);
        }
    }
}
