package com;

import com.dao.TArticleDao;
import com.dao.TAuthorDao;
import com.entity.TArticle;
import com.entity.TArticleExample;
import com.entity.TAuthor;
import com.entity.TAuthorExample;
import com.pipeline.MySQLPipeline;
import com.processor.ArticleProcessor;
import com.processor.AuthorProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.stereotype.Component;
import us.codecraft.webmagic.Request;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.pipeline.ConsolePipeline;
import us.codecraft.webmagic.scheduler.QueueScheduler;
import us.codecraft.webmagic.scheduler.component.DuplicateRemover;
import us.codecraft.webmagic.scheduler.component.HashSetDuplicateRemover;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
 * Created by del on 16-5-1.
 */
@Component
public class CrawlerMain {

    private static Logger LOG = LoggerFactory.getLogger(CrawlerMain.class);

    @Autowired
    private ArticleProcessor articleProcessor;

    @Autowired
    private AuthorProcessor authorProcessor;

    @Autowired
    private MySQLPipeline mySQLPipeline;

    @Autowired
    private TArticleDao tArcticleDao;

    @Autowired
    private TAuthorDao tAuthorDao;

    public void crawl(){
        Spider articleSpider = Spider.create(articleProcessor)
                .addUrl("http://blog.csdn.net/?ref=toolbar_logo")
                .addPipeline(mySQLPipeline)
                .addPipeline(new ConsolePipeline())
                .thread(5);
        Spider authorSpider = Spider.create(authorProcessor)
                .addUrl("http://my.csdn.net/" + tAuthorDao.selectNewest().getUid())
                .addPipeline(mySQLPipeline)
                .addPipeline(new ConsolePipeline())
                .thread(5);

//        //初始化文章url
//        List<String> articleUrls = new ArrayList<>();
//        List<TArticle> articles = tArcticleDao.selectExample(new TArticleExample());
//        for (TArticle article : articles){
//            articleUrls.add(article.getUrl());
//        }
//        init(articleSpider,articleUrls);

//        //初始化作者url
//        List<String> authorUrls = new ArrayList<>();
//        List<TAuthor> authors = tAuthorDao.selectByExample(new TAuthorExample());
//        for (TAuthor author : authors){
//            authorUrls.add("http://my.csdn.net/" + author.getUid());
//        }
//        init(authorSpider,authorUrls);

        ExecutorService executor = Executors.newFixedThreadPool(10);
        executor.execute(articleSpider);
        executor.execute(authorSpider);
    }

    /**
     * 将url设为已爬取
     * @param spider
     * @param urls
     */
    private void init(Spider spider, List<String> urls) {
        DuplicateRemover duplicateRemover = new HashSetDuplicateRemover();
        for (String url : urls){
            duplicateRemover.isDuplicate(new Request(url),spider);
            LOG.info("初始化spider:{},url:{}",spider.getClass().getName(),url);
        }
        ((QueueScheduler)spider.getScheduler()).setDuplicateRemover(duplicateRemover);
    }

    public static void main(String [] args) throws IOException {
        ApplicationContext applicationContext = new ClassPathXmlApplicationContext("classpath:/platform-services-servlet.xml");
        CrawlerMain crawlerMain = applicationContext.getBean(CrawlerMain.class);
        crawlerMain.crawl();
    }
}
