package top.maof.book.task;

import org.quartz.*;
import org.springframework.scheduling.quartz.QuartzJobBean;
import top.maof.book.webmagic.HttpClientDownloader;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.model.OOSpider;
import us.codecraft.webmagic.pipeline.PageModelPipeline;

import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;


public class BookJob extends QuartzJobBean {


    private final static Map<String, Spider> JOB_MAP = new ConcurrentHashMap<>();


    @Override
    protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException {
        Trigger trigger = jobExecutionContext.getTrigger();
        JobKey jobKey = trigger.getJobKey();
        String jobName = jobKey.getName();
        Spider spider = JOB_MAP.get(jobName);

        if (spider == null) {
            JobDetail jobDetail = jobExecutionContext.getJobDetail();
            JobDataMap jobDataMap = jobDetail.getJobDataMap();

            String charset = jobDataMap.get("charset").toString();
            PageModelPipeline pipline = (PageModelPipeline) jobDataMap.get("pipline");
            Class clazz = (Class) jobDataMap.get("clazz");
            String url = jobDataMap.get("url").toString();

            spider = OOSpider.create(Site.me().setCharset(charset), pipline, clazz)
                    .setDownloader(new HttpClientDownloader())
                    .addUrl(url).thread(2);
            JOB_MAP.put(jobName, spider);
            spider.run();
        } else {
            try {
                spider.stop();
                spider.close();
            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                JOB_MAP.remove(jobName);
            }


        }
    }


}
