package cn.amazonspider.crawler;

import cn.amazonspider.model.AmazonItem;
import cn.amazonspider.model.CrawlerModel;
import cn.amazonspider.pipeline.AmazonPageModelPipeline;
import cn.amazonspider.utils.Constants;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import us.codecraft.webmagic.downloader.HttpClientDownloader;

import java.util.List;

@Component
public class AmazonCrawler {
    private Logger logger = Logger.getLogger(getClass());
    public List<CrawlerModel> crawlerModelList;
    @Qualifier("amazonPageModelPipeline")
    @Autowired
    private AmazonPageModelPipeline amazonPageModelPipeline;
    @Autowired(required = true)
    private BaseCrawler baseCrawler;

    public void amazoncrawler() {
        logger.info("started to collect the data of www.amazon.cn");
        long startMili = System.currentTimeMillis();
        for (CrawlerModel crawlerModel : crawlerModelList) {
            init(crawlerModel.getTable(), crawlerModel.getFirst_sort(), crawlerModel.getSecond_sort());
            baseCrawler.crawler(
                    amazonPageModelPipeline,
                    new HttpClientDownloader(),
                    crawlerModel.getSeed(),
                    Constants.THREADNUM,
                    AmazonItem.class
            );
        }
        long endMili = System.currentTimeMillis();
        logger.info("elapsed time:" + (endMili - startMili) + "ms");
        logger.info("collect the data end");
    }

    /**
     * 准备数据
     *
     * @param table
     * @param first_sort
     * @param second_sort
     */
    private void init(String table, String first_sort, String second_sort) {
        amazonPageModelPipeline.setTable(table);
        amazonPageModelPipeline.setFirst_sort(first_sort);
        amazonPageModelPipeline.setSecond_sort(second_sort);
    }

    public AmazonCrawler addCrawlerModel(List<CrawlerModel> list) {
        try {
            crawlerModelList=list;
        } catch (Exception e) {
            logger.error(e);
        }
        return this;
    }

}
