package com.hao.job;

import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.simple.SimpleJob;
import com.hao.entity.CrawlerUrl;
import com.hao.service.CrawlerDomService;
import com.hao.service.CrawlerUrlService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;

import java.util.List;

/**
 * 爬虫Dom任务
 * Created by R.hao on 2017/7/16.
 */
public class CrawlerDomJob implements SimpleJob {

    private static final Logger logger = LoggerFactory.getLogger(CrawlerUrlJob.class);

    @Autowired
    private CrawlerUrlService urlService;

    @Autowired
    private CrawlerDomService domService;

    @Override
    public void execute(ShardingContext context) {
        String topLog = "CrawlerDomJob-shardingItem-" + context.getShardingItem() + "-"
                + "totalItem-" + context.getShardingTotalCount() + "-";
        logger.info(topLog);
        List<CrawlerUrl> urlList = this.urlService.findCrawlerUrl(context.getShardingItem(),
                context.getShardingTotalCount());
        if (null == urlList || urlList.isEmpty()) {
            logger.info(topLog + "not find data");
            return;
        }
        this.urlService.clockCrawlerUrl(urlList);
        for (CrawlerUrl crawlerUrl : urlList) {
            this.domService.runCrawler(topLog, crawlerUrl);
        }
    }
}
