package edu.ycu.aladdin.collector.service.weibo;

import edu.ycu.aladdin.collector.CrawlerConfigPool;
import edu.ycu.aladdin.collector.api.CrawlerService;
import edu.ycu.aladdin.collector.api.entity.weibo.BaseWeiBoCrawlerTaskQueue;
import edu.ycu.aladdin.common.interfaces.ProcessAble;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.processor.PageProcessor;

import java.util.HashMap;
import java.util.Map;
import java.util.UUID;

import static edu.ycu.aladdin.collector.detail.RedisCrawlerConfigPool.DEFAULT_SITE;

/**
 * 基于 webmagic 的 爬虫服务实现.
 * // TODO
 *
 * @author <a href="mailto:wangruihuano@gmail.com">王瑞环</a>
 * @since 1.0
 */

public abstract class BaseCrawlerWebmagicService<TASK extends BaseWeiBoCrawlerTaskQueue> implements CrawlerService<TASK> {

    private final CrawlerConfigPool crawlerConfigPool;

    /**
     * 爬虫池
     */
    protected final Map<String, SpiderWithTask> spiders = new HashMap<>();

    public BaseCrawlerWebmagicService(CrawlerConfigPool crawlerConfigPool) {
        this.crawlerConfigPool = crawlerConfigPool;
    }

    @Override
    public String createCrawler(String agentId, TASK task) {
        return createCrawler(agentId, task, 1);
    }

    @Override
    public String createCrawler(String agentId, TASK task, int threadNum) {

        Site agent = agentId != null
                ? crawlerConfigPool.getAgent(agentId)
                : crawlerConfigPool.generateRandomSite();
        agent = agent == null ? DEFAULT_SITE : agent;

        Spider spider = initSplider(task, agent);
        spider.thread(threadNum);
        String id = UUID.randomUUID().toString().replace("-", "");
        spiders.put(id, new SpiderWithTask(spider, task));
        return id;
    }

    private Spider initSplider(TASK task, Site agent) {
        PageProcessor pageProcessor = createPageProcessor(task, agent);
        return Spider.create(pageProcessor).addUrl(initUrl(task));
    }

    /**
     * 存储
     *
     * @param task
     * @return
     */
    protected abstract void storage(TASK task);

    /**
     * 捕获异常
     *
     * @param task
     * @param e
     * @return
     */
    protected abstract void processException(TASK task, Exception e);

    /**
     * 初始化Url
     *
     * @param task
     * @return
     */
    protected abstract String initUrl(TASK task);

    public final PageProcessor createPageProcessor(TASK task, Site agent) {

        ProcessAble processAble = doProcess(task);
        return new PageProcessor() {
            @Override
            public void process(Page page) {
                try {
                    processAble.process(page);
                    storage(task);
                } catch (Exception e) {
                    processException(task, e);
                }
            }

            @Override
            public Site getSite() {
                return agent;
            }
        };
    }

    protected abstract ProcessAble<Page> doProcess(TASK task);

    @Override
    public void startCrawler(String id) {
        spiders.get(id).spider.run();
    }

    @Override
    public void stopCrawler(String id) {
        spiders.get(id).spider.stop();
    }

    @Override
    public boolean destroyCrawler(String id) {
        spiders.get(id).spider.close();
        spiders.remove(id);
        return true;
    }

    class SpiderWithTask {

        Spider spider;
        TASK task;

        SpiderWithTask(Spider spider, TASK task) {
            this.spider = spider;
            this.task = task;
        }
    }
}
