package com.example.book.crawler.service;

import com.example.book.base.BaseService;
import com.example.book.common.utils.Constants;
import com.example.book.common.utils.RedisService;
import com.example.book.crawler.dao.CrawlerContentDao;
import com.example.book.crawler.entity.CrawlerCatalog;
import com.example.book.crawler.dao.CrawlerCatalogDao;

import com.example.book.crawler.entity.CrawlerContent;
import com.example.book.crawler.pageprocessor.CatalogPageProcessor;
import com.example.book.crawler.pageprocessor.ContentPageProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import us.codecraft.webmagic.ResultItems;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.Task;
import us.codecraft.webmagic.downloader.HttpClientDownloader;
import us.codecraft.webmagic.pipeline.Pipeline;
import us.codecraft.webmagic.proxy.Proxy;
import us.codecraft.webmagic.proxy.SimpleProxyProvider;
import us.codecraft.webmagic.scheduler.RedisScheduler;

import java.text.ParseException;
import java.util.*;
import java.util.concurrent.TimeUnit;

/**
 * @author hcj
 * @date 2019-11-01 22:25:02
 */
@Service
public class CrawlerCatalogService extends BaseService<CrawlerCatalog, CrawlerCatalogDao> {

    @Autowired
    private CrawlerContentDao crawlerContentDao;
    @Autowired
    private RedisTemplate redisTemplate;

    /**
     * 查询一条
     *
     * @return
     */
    public CrawlerCatalog getInfoOfByStatusAndAsc() {
        return dao.getInfoOfByStatusAndAsc();
    }

    /**
     * 初始化章节信息
     * 初始化之后 初始化按钮不可用
     * 初始化之后 开启任务可手动执行状态
     *
     * @param crawlerCatalog
     */
    @Transactional(readOnly = false, rollbackFor = Exception.class)
    public void initCrawler(CrawlerCatalog crawlerCatalog) {
        List<CrawlerContent> result = new ArrayList<>();

        Spider spider = null;
        Map<String, Object> map = (Map<String, Object>) redisTemplate.opsForList().rightPop(Constants.IP_QUEUE, 2, TimeUnit.SECONDS);

        if (map != null) {
            System.out.println("本次使用IP ----- >：" + map.get("ip").toString());
            // 开启IP代理 如果没有取到IP则使用本地IP
            HttpClientDownloader downloader = new HttpClientDownloader();
            downloader.setProxyProvider(SimpleProxyProvider.from(
                    new Proxy(map.get("ip").toString(), Integer.parseInt(map.get("port").toString()))
            ));
            spider = new Spider(new CatalogPageProcessor()).setDownloader(downloader);
        } else {
            System.out.println("本次使用IP ----- >：本地");
            spider = new Spider(new CatalogPageProcessor());
        }

        spider.setUUID(UUID.randomUUID().toString());
        spider.addUrl(new String[]{crawlerCatalog.getCrawlerUrl()}).addPipeline(new Pipeline() {
            @Override
            public void process(ResultItems resultItems, Task task) {
                List<CrawlerContent> list = (List<CrawlerContent>) resultItems.get("list");
                if (list != null) {
                    for (CrawlerContent cc : list) {
                        cc.setBookInfoId(crawlerCatalog.getBookInfoId());
                        cc.setContentHref(crawlerCatalog.getCrawlerUrl() + cc.getContentHref());
                        cc.setIsCarriedOut("0");
                        result.add(cc);
                    }
                }
            }
        }).thread(1).run();

        if (map != null) {
            redisTemplate.opsForList().leftPush(Constants.IP_QUEUE, map);
        }

        if (result.size() > 0) {
            crawlerContentDao.insertBatch(result);
            crawlerCatalog.setIsCarriedOut("1");
            dao.update(crawlerCatalog);
        }
    }


}