package com.modouyu.modules.crawler.spider;

import com.modouyu.common.utils.*;
import com.modouyu.modules.crawler.spider.pipeline.CrawlerLogPipeline;
import com.modouyu.modules.oss.cloud.CloudStorageConfig;
import com.qiniu.common.QiniuException;
import com.qiniu.common.Zone;
import com.qiniu.storage.BucketManager;
import com.qiniu.storage.Configuration;
import com.qiniu.util.Auth;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpStatus;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.processor.PageProcessor;

import java.util.Date;
import java.util.List;
import java.util.UUID;

/**
 * @author yangcb
 * @email mudouyu@aliyun.com
 * @create 2018-01-10 16:52
 **/
public class CrawlerLogProcessor implements PageProcessor {

    private RedisUtils redisUtils;
    private String type;


    public CrawlerLogProcessor(RedisUtils redisUtils, String type) {
        this.redisUtils = redisUtils;
        this.type = type;
    }

    public CrawlerLogProcessor() {
        this.redisUtils = (RedisUtils) SpringContextUtils.getBean("redisUtils");
        this.type = "";
    }

    private static Logger LOGGER = LoggerFactory.getLogger(CrawlerLogProcessor.class);

    private Site site = Site
            .me()
            .setDomain("www.ybdu.com")
            .setSleepTime(3000)
            .setTimeOut(120000)
            .setUserAgent(
                    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.65 Safari/537.31");

    public static final String URL_LIST = "https://www\\.ybdu\\.com/book\\d+/0/\\d+";
    public static final String URL_POST = "https://www\\.ybdu\\.com/xiaoshuo/\\d+/\\d+/";
    public static final String URL_DETAIL = "https://www\\.ybdu\\.com/xiaoshuo/\\d+/\\d+/\\d+\\.html";

    /**
     * 分页页面key
     */
    public static final String PAGE_URL_KEY = "page:url:%s";
    /**
     * 文章目录页面key
     */
    public static final String ARTICLE_URL_KEY = "article:url:%s";
    /**
     * 章节key
     */
    public static final String CHAPTER_URL_KEY = "chapter:url:%s";
    /**
     * 作者key
     */
    public static final String ARTICLE_AUTHOR_KEY = "article:author:%s";


    private Logger logger = LoggerFactory.getLogger(CrawlerLogProcessor.class);


    @Override
    public void process(Page page) {
        try {
            if (HttpStatus.SC_OK != page.getStatusCode()) {
                logger.info("response error code={},-->{}", page.getStatusCode(), page.toString());
                return;
            }
            if (page.getUrl().regex(URL_LIST).match()) {
                //将爬取的分页页面标识为1
                Object val = redisUtils.hmGet(RedisKeys.getCrawlerLogKey(String.format(PAGE_URL_KEY, type)), page.getUrl().get());
                if (null == val) {
                    redisUtils.hmSet(RedisKeys.getCrawlerLogKey(String.format(PAGE_URL_KEY, type)), page.getUrl().get(), 1);
                } else {
                    //如果当前页已经爬过，直接跳出
                    if (1 == Integer.parseInt(val.toString())) {
                        return;
                    } else {
                        redisUtils.hmSet(RedisKeys.getCrawlerLogKey(String.format(PAGE_URL_KEY, type)), page.getUrl().get(), 1);
                    }
                }
                //文章地址
                List<String> articleList = page.getHtml().xpath("//li[@class=\"two\"]").links().regex(URL_POST).all();
                List<String> authorList = page.getHtml().xpath("//div[@class=\"rec_rullist\"]/ul/li[@class=\"four\"]/text()").all();
                if (articleList != null) {
                    int articleListSize = articleList.size();
                    for (int i = 0; i < articleListSize; i++) {
                        Object articleUrlObj = redisUtils.hmGet(RedisKeys.getCrawlerLogKey(String.format(ARTICLE_URL_KEY, type)), articleList.get(i));
                        if (articleUrlObj == null) {
                            redisUtils.hmSet(RedisKeys.getCrawlerLogKey(String.format(ARTICLE_URL_KEY, type)), articleList.get(i), 0);
                            page.addTargetRequest(articleList.get(i));
                        }
                        redisUtils.hmSet(RedisKeys.getCrawlerLogKey(String.format(ARTICLE_AUTHOR_KEY, type)), articleList.get(i), authorList.get(i));
                    }
                }
//                //文章列表地址
//                List<String> listUrls = page.getHtml().xpath("//div[@id=\"pagelink\"]").links().regex(URL_LIST).all();
//                for (String pageurl : listUrls) {
//                    //将待爬取的页面标识为0
//                    if (null == redisUtils.hmGet(RedisKeys.getCrawlerLogKey(String.format(PAGE_URL_KEY, type)), pageurl)) {
//                        redisUtils.hmSet(RedisKeys.getCrawlerLogKey(String.format(PAGE_URL_KEY, type)), pageurl, 0);
//                        page.addTargetRequest(pageurl);
//                    }
//                }
            } else {
                if (page.getUrl().regex(URL_DETAIL).match()) {
                    page.putField("chapter", page.getHtml().xpath("//div[@class='h1title']/h1/text()").get());
                    String content = page.getHtml().xpath("//div[@id='htmlContent']").get();
                    Document document = Jsoup.parse(content);
                    Element element = document.getElementsByTag("div").get(0);
                    element.select("div").remove();
                    page.putField("content", element.html());
                    String articleUrl = page.getUrl().get();
                    articleUrl = articleUrl.substring(0, articleUrl.lastIndexOf("/") + 1);
                    page.putField("articleurl", articleUrl);
                    page.putField("chapterurl", page.getUrl().toString());
                    page.putField("sort", redisUtils.hmGet(RedisKeys.getCrawlerLogKey(String.format(CHAPTER_URL_KEY, type)), page.getUrl().toString()));
                    page.putField("ischapter", true);
                } else {
                    if (page.getUrl().regex(URL_POST).match()) {

                        Object obj = redisUtils.hmGet(RedisKeys.getCrawlerLogKey(String.format(ARTICLE_URL_KEY, type)), page.getUrl().get());
                        if (obj == null) {
                            redisUtils.hmSet(RedisKeys.getCrawlerLogKey(String.format(ARTICLE_URL_KEY, type)), page.getUrl().get(), 1);
                        } else {
                            if (1 == Integer.parseInt(obj.toString())) {
                                LOGGER.info("重复爬取页面信息");
                                return;
                            } else {
                                redisUtils.hmSet(RedisKeys.getCrawlerLogKey(String.format(ARTICLE_URL_KEY, type)), page.getUrl().get(), 1);
                            }
                        }
                        //章节列表地址获取
                        List<String> urls = page.getHtml().xpath("//ul[@class=\"mulu_list\"]").links().regex(URL_DETAIL).all();
                        int num = urls.size();
                        for (int i = 0; i < num; i++) {
                            /**
                             * 章节顺序
                             */
                            redisUtils.hmSet(RedisKeys.getCrawlerLogKey(String.format(CHAPTER_URL_KEY, type)), urls.get(i), i + 1);
                        }
                        page.addTargetRequests(urls);
                        String detailUrl = page.getUrl().get();
                        String[] array = detailUrl.split("xiaoshuo");
                        String imgurl = array[0] + "files/article/image" + array[1] + array[1].split("/")[2] + "s.jpg";
                        page.putField("articleurl", page.getUrl().get());

                        Object objAuthor = redisUtils.hmGet(RedisKeys.getCrawlerLogKey(String.format(ARTICLE_AUTHOR_KEY, type)), page.getUrl().get());

                        page.putField("author", objAuthor == null ? "佚名" : objAuthor.toString());
                        page.putField("chapternum", num);
                        String thumbnail = uploadImg(imgurl);
                        page.putField("imageurl", StringUtils.isEmpty(thumbnail) ? "" : "http://" + thumbnail);
                        page.putField("infos", page.getHtml().xpath("//div[@class=\"mu_contain\"]/p/text()").get());
                        page.putField("title", page.getHtml().xpath("//div[@class=\"mu_h1\"]/h1/text()").get());
                        page.putField("ischapter", false);

                    }
                }
            }
        } catch (Exception e) {
            logger.error("*****************download page error*************" + e.getMessage());
        }

    }

    public static void main(String[] args) {
        Spider.create(new CrawlerLogProcessor()).addUrl("https://www.ybdu.com/book1/0/1/").addPipeline(new CrawlerLogPipeline()).run();
    }

    @Override
    public Site getSite() {
        return site;
    }

    private String uploadImg(String remoteSrcUrl) {
        CloudStorageConfig config = Constant.CLOUD_STORAGE_CONFIG;
        try {
            if (Constant.CloudService.QINIU.getValue() == config.getType()) {

                String key = path(config.getQiniuPrefix(), ".jpg");
                Configuration cfg = new Configuration(Zone.zone0());
                Auth auth = Auth.create(config.getQiniuAccessKey(), config.getQiniuSecretKey());
                BucketManager bucketManager = new BucketManager(auth, cfg);
                bucketManager.fetch(remoteSrcUrl, config.getQiniuBucketName(), key);
                return config.getQiniuDomain() + "/" + key;

            } else if (Constant.CloudService.ALIYUN.getValue() == config.getType()) {

            } else if (Constant.CloudService.QCLOUD.getValue() == config.getType()) {

            }
        } catch (QiniuException ex) {
            LOGGER.error("*******************upload image error*******************：" + ex.response.toString());
            return "";
        }
        return "";
    }


    private String path(String prefix, String suffix) {
        //生成uuid
        String uuid = UUID.randomUUID().toString().replaceAll("-", "");
        //文件路径
        String path = DateUtils.format(new Date(), "yyyyMMdd") + "/" + uuid;
        if (StringUtils.isNotBlank(prefix)) {
            path = prefix + "/" + path;
        }
        return path + suffix;
    }

}
