package com.modouyu.modules.job.task;


import com.modouyu.common.utils.PinyinUtil;
import com.modouyu.common.utils.RedisKeys;
import com.modouyu.common.utils.RedisUtils;
import com.modouyu.modules.crawler.dao.*;
import com.modouyu.modules.crawler.entity.NovelArticleEntity;
import com.modouyu.modules.crawler.entity.SpiderArticle;
import com.modouyu.modules.crawler.entity.SpiderChapter;
import com.modouyu.modules.crawler.spider.CrawlerLogProcessor;
import com.modouyu.modules.crawler.spider.listener.NovelSpiderListener;
import com.modouyu.modules.crawler.spider.pipeline.CrawlerLogPipeline;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.SpiderListener;

import java.util.*;

/**
 * ${DESCRIPTION}
 *
 * @author yangcb
 * @email mudouyu@aliyun.com
 * @create 2018-01-10 16:31
 **/
@Component("crawlerLogTask")
public class CrawlerNovelTask {


    @Autowired
    private SpiderChapterDao spiderChapterDao;
    @Autowired
    private SpiderArticleDao spiderArticleDao;

    @Autowired
    private NovelArticleDao novelArticleDao;
    @Autowired
    private NovelChapterDao novelChapterDao;


    @Autowired
    private RedisUtils redisUtils;


    private static Logger LOGGER = LoggerFactory.getLogger(CrawlerNovelTask.class);

    public void spider(String params) {
        String[] array = params.split(",");
        if (array == null || 2 < array.length) {
            throw new RuntimeException("定时任务传递参数错误");
        }
        Spider spider = Spider.create(new CrawlerLogProcessor(redisUtils, array[1]));
        //将缓存没有爬取的地址，重新添加到爬取队列
        List<String> startUrls = new ArrayList<>();
        //章节地址
        List<String> chapterUrls = redisUtils.hmKeys(RedisKeys.getCrawlerLogKey(String.format(CrawlerLogProcessor.CHAPTER_URL_KEY, array[1])));
        //目录列表
        List<String> articleUrls = redisUtils.hmKeys(RedisKeys.getCrawlerLogKey(String.format(CrawlerLogProcessor.ARTICLE_URL_KEY, array[1])));
        List<Object> articleUrlValues = redisUtils.hmValues(RedisKeys.getCrawlerLogKey(String.format(CrawlerLogProcessor.ARTICLE_URL_KEY, array[1])));
        //分页列表信息
        List<String> pageUrls = redisUtils.hmKeys(RedisKeys.getCrawlerLogKey(String.format(CrawlerLogProcessor.PAGE_URL_KEY, array[1])));
        List<Object> pageValues = redisUtils.hmValues(RedisKeys.getCrawlerLogKey(String.format(CrawlerLogProcessor.PAGE_URL_KEY, array[1])));
        /**
         * 1、将未抓取的章节放入爬虫队列
         */
        if (chapterUrls != null && !chapterUrls.isEmpty()) {
            startUrls.addAll(chapterUrls);
        }
        /**
         * 2、将未获得章节的小说加入队列
         */
        if (articleUrls != null && !articleUrls.isEmpty()) {
            int size = articleUrls.size();
            for (int i = 0; i < size; i++) {
                Integer num = Integer.valueOf(articleUrlValues.get(i).toString());
                if (0 == num) {
                    startUrls.add(articleUrls.get(i));
                }
            }
        }
        /**
         * 3、将未抓取的小说分页页面放入队列
         */
        if (pageUrls != null && !pageUrls.isEmpty()) {
            LOGGER.info("未抓取的分页页面放入队列");
            int size = pageUrls.size();
            for (int i = 0; i < size; i++) {
                Integer num = Integer.valueOf(pageValues.get(i).toString());
                if (0 == num) {
                    startUrls.add(pageUrls.get(i));
                }
            }
        }
        /**
         * 没有待抓取队列
         */
        if (startUrls.isEmpty()) {
            spider.addUrl(array[0]);
        } else {
            spider.startUrls(startUrls);
        }
        spider.addPipeline(new CrawlerLogPipeline(redisUtils, array[1]));
        spider.thread(100);
        List<SpiderListener> spiderListeners = new ArrayList<>();
        spiderListeners.add(new NovelSpiderListener(redisUtils, array[1]));
        spider.setSpiderListeners(spiderListeners);
        spider.run();
    }


    public void dbSpiderChapter() {
        long size = redisUtils.lSize(RedisKeys.getCrawlerLogKey("chapter:obj"));
        if (size == 0) {
            return;
        }
        long count = 1;
        int num = 1;
        List<SpiderChapter> spiderChapterList = new ArrayList<SpiderChapter>(100);
        while (count <= size) {
            if (spiderChapterList == null) {
                spiderChapterList = new ArrayList<SpiderChapter>(100);
            }
            SpiderChapter spiderChapter = (SpiderChapter) redisUtils.lPop(RedisKeys.getCrawlerLogKey(CrawlerLogPipeline.CHAPTER_OBJ_KEY));
            if (spiderChapter == null) {
                count = count + 1;
                continue;
            }
            spiderChapter.setChapter(spiderChapter.getChapter().trim());
            spiderChapterList.add(spiderChapter);
            if (num == 100) {
                try {
                    spiderChapterDao.insertBatch(spiderChapterList);
                } catch (Exception e) {
                    LOGGER.error("*************batch insert error：{}************", e.getMessage());
                    for (SpiderChapter sc : spiderChapterList) {
                        redisUtils.lPush(RedisKeys.getCrawlerLogKey("chapter:obj"), sc);
                    }

                }
                spiderChapterList.clear();
                spiderChapterList = null;
                num = 0;

            }
            num = num + 1;
            count = count + 1;
        }
        if (spiderChapterList != null && !spiderChapterList.isEmpty()) {
            try {
                spiderChapterDao.insertBatch(spiderChapterList);
            } catch (Exception e) {
                LOGGER.error("*************batch insert error：{}************", e.getMessage());
                for (SpiderChapter sc : spiderChapterList) {
                    redisUtils.lPush(RedisKeys.getCrawlerLogKey("chapter:obj"), sc);
                }
            }
            spiderChapterList.clear();
            spiderChapterList = null;
        }

    }


    public void dbSpiderArticle() {
        Date start = new Date();
        long size = redisUtils.lSize(RedisKeys.getCrawlerLogKey(CrawlerLogPipeline.ARTICLE_OBJ_KEY));
        if (size == 0) {
            return;
        }
        LOGGER.info("开始执行文章入库");
        long count = 1;
        int num = 1;
        List<SpiderArticle> spiderArticleList = new ArrayList<SpiderArticle>(100);
        while (count <= size) {
            if (spiderArticleList == null) {
                spiderArticleList = new ArrayList<SpiderArticle>(100);
            }
            SpiderArticle spiderArticle = (SpiderArticle) redisUtils.lPop(RedisKeys.getCrawlerLogKey("article:obj"));
            if (spiderArticle == null) {
                count = count + 1;
                continue;
            }
            spiderArticleList.add(spiderArticle);
            if (num == 100) {
                try {
                    spiderArticleDao.insertBatch(spiderArticleList);
                } catch (Exception e) {
                    LOGGER.error("批量插入数据异常：{}", e.getMessage());
                    for (SpiderArticle sa : spiderArticleList) {
                        redisUtils.lPush(RedisKeys.getCrawlerLogKey("article:obj"), sa);
                    }
                }
                spiderArticleList.clear();
                spiderArticleList = null;
                LOGGER.error("执行sql插入");
                num = 0;
            }
            num = num + 1;
            count = count + 1;
        }
        if (spiderArticleList != null && !spiderArticleList.isEmpty()) {
            try {
                spiderArticleDao.insertBatch(spiderArticleList);
            } catch (Exception e) {
                LOGGER.error("批量插入数据异常：{}", e.getMessage());
                for (SpiderArticle sa : spiderArticleList) {
                    redisUtils.lPush(RedisKeys.getCrawlerLogKey("article:obj"), sa);
                }
            }
            spiderArticleList.clear();
            spiderArticleList = null;
            LOGGER.error("执行sql插入");
        }
        Date end = new Date();
        LOGGER.error("开始执行文章入库结束，耗时:{}秒", (end.getTime() - start.getTime()) / 1000);
    }


    private Map<String, String> getTypeMap(String type) {

        Map<String, String> map = new HashMap<String, String>(3);
        switch (type) {
            case "wuxia":
                map.put("typeid", String.valueOf(2));
                map.put("typename", "武侠修真");
                map.put("typevalue", "wuxia");
                break;
            case "xuanhuan":
                map.put("typeid", String.valueOf(1));
                map.put("typename", "玄幻奇幻");
                map.put("typevalue", "xuanhuan");
                break;
            case "dushi":
                map.put("typeid", String.valueOf(3));
                map.put("typename", "都市职场");
                map.put("typevalue", "dushi");
                break;
            case "yanqing":
                map.put("typeid", String.valueOf(8));
                map.put("typename", "言情校园");
                map.put("typevalue", "yanqing");
                break;
            case "chuanyue":
                map.put("typeid", String.valueOf(6));
                map.put("typename", "科幻穿越");
                map.put("typevalue", "kehuan");
                break;
            case "wangyou":
                map.put("typeid", String.valueOf(5));
                map.put("typename", "游戏竞技");
                map.put("typevalue", "youxi");
                break;
            case "kongbu":
                map.put("typeid", String.valueOf(7));
                map.put("typename", "恐怖悬疑");
                map.put("typevalue", "kongbu");
                break;
            case "kehuan":
                map.put("typeid", String.valueOf(6));
                map.put("typename", "科幻穿越");
                map.put("typevalue", "kehuan");
                break;
            case "qita":
                map.put("typeid", String.valueOf(9));
                map.put("typename", "其他");
                map.put("typevalue", "qita");
                break;
        }
        return map;
    }

    /**
     * 同步文章
     */
    public void novelArticleJob(String code) {


        LOGGER.info("--------------> start synchronization data ");

        Map<String, Object> paramMap = new HashMap<>();
        //paramMap.put("type", code);
        paramMap.put("status", 0);
        List<SpiderArticle> spiderArticleList = spiderArticleDao.selectByMap(paramMap);
        if (spiderArticleList == null || spiderArticleList.isEmpty()) {
            return;
        }
        for (SpiderArticle article : spiderArticleList) {
            try {


                NovelArticleEntity novelArticle = new NovelArticleEntity();
                novelArticle.setAuthor(article.getAuthor());
                novelArticle.setAllvisit(0);
                novelArticle.setAllvote(0);
                novelArticle.setArticletitle(article.getTitle());
                novelArticle.setAuthor(article.getAuthor());
                novelArticle.setAuthorid(0);
                novelArticle.setChaptercount(article.getChapternum());
                novelArticle.setCreatedate(new Date());
                novelArticle.setDayvisit(0);
                novelArticle.setDayvote(0);
                novelArticle.setAllvisit(0);
                novelArticle.setAllvote(0);
                novelArticle.setFullflag(true);
                novelArticle.setInitial(PinyinUtil.getPinYinHeadUperCharFirst(article.getTitle()));
                novelArticle.setIntro(article.getInfos());
                novelArticle.setKeywords(article.getTitle());
                novelArticle.setLastchapterid(0);
                novelArticle.setLastchaptertitle("");
                novelArticle.setLastupdate(new Date());
                novelArticle.setSize(0);
                novelArticle.setThumbnail(article.getImageurl());
                novelArticle.setWordcount(0);
                Map<String, String> typeMap = getTypeMap(article.getType());
                novelArticle.setTypeid(Integer.parseInt(typeMap.get("typeid")));
                novelArticle.setTypename(typeMap.get("typename"));
                novelArticle.setTypevalue(typeMap.get("typevalue"));
                novelArticleDao.insert(novelArticle);

                article.setStatus(1);
                spiderArticleDao.updateSpiderArticle(article);


                SpiderChapter spiderChapter = new SpiderChapter();
                spiderChapter.setStatus(0);
                spiderChapter.setArticleurl(article.getArticleurl());
                List<SpiderChapter> spiderChapterList = spiderChapterDao.selectSpiderChapter(spiderChapter);
                int len = spiderChapterList.size();
                int articleSize = 0;
                long lastchapterid = 0;
                String lastchaptername = "";
                NovelArticleEntity novel = new NovelArticleEntity();


                for (int i = 0; i < len; i++) {
                    try {
                        SpiderChapter chapter = spiderChapterList.get(i);
                        int size = chapter.getContent().length();
                        articleSize += size;
                        NovelChapterEntity novelChapter = new NovelChapterEntity();
                        novelChapter.setPostdate(new Date());
                        novelChapter.setArticleid(novelArticle.getArticleid());
                        novelChapter.setArticletitle(novelArticle.getArticletitle());
                        novelChapter.setTypeid(Integer.parseInt(typeMap.get("typeid")));
                        novelChapter.setTypename(typeMap.get("typename"));
                        novelChapter.setSize(size);
                        novelChapter.setChapterorder(chapter.getSort());
                        novelChapter.setChaptertitle(chapter.getChapter());
                        novelChapter.setContent(chapter.getContent());
                        novelChapter.setDisplay(1);
                        novelChapter.setPostdate(new Date());
                        novelChapter.setPublishtime(new Date());



                        novelChapterDao.insert(novelChapter);

                        chapter.setStatus(1);
                        spiderChapterDao.updateSpiderChapter(chapter);

                        if (i == len - 1) {
                            lastchapterid = novelChapter.getChapterid();
                            lastchaptername = novelChapter.getChaptertitle();
                            novel.setLastchaptertitle(lastchaptername);
                            novel.setLastchapterid(lastchapterid);
                        }
                    } catch (Exception e) {
                        LOGGER.error("insert or update chapter error:{}", e.getMessage());
                    }
                }
                novel.setArticleid(novelArticle.getArticleid());
                novel.setSize(articleSize);
                novel.setChaptercount(len);
                novelArticleDao.updateById(novel);
            } catch (Exception e) {
                LOGGER.error("insert or update error:{}", e.getMessage());
            }
        }
        LOGGER.info("--------------> end synchronization data ");
    }




}
