package com.dmcb.trade.business.crawlers.author;

import com.dmcb.common.business.utils.DateUtil;
import com.dmcb.common.business.utils.JsonUtil;
import com.dmcb.common.business.utils.WebUtil;
import com.dmcb.common.web.conversion.JsonResult;
import com.dmcb.common.business.utils.WebClientUtil;
import com.dmcb.trade.business.constants.CrawlConstant;
import com.dmcb.trade.business.constants.TradeConstant;
import com.dmcb.trade.business.crawlers.article.BaseParser;
import com.dmcb.trade.business.entities.Article;
import com.dmcb.trade.business.entities.Author;
import com.dmcb.trade.business.mappers.ArticleCrawlMapper;
import com.dmcb.trade.business.services.ArticleCrawlerService;
import com.dmcb.trade.business.utils.CrawlerUtil;
import com.gargoylesoftware.htmlunit.html.HtmlPage;
import org.apache.commons.lang3.StringUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.Date;
import java.util.Map;


/**
 * Created by Administrator on 2016/11/7.
 */
@Service
public class ZhiHuCrawler extends BaseCrawler {
    Logger logger = LoggerFactory.getLogger(ZhiHuCrawler.class);

    @Autowired
    private WebClientUtil webClientUtil;
    @Autowired
    private ArticleCrawlMapper articleCrawlMapper;
    @Autowired
    ArticleCrawlerService articleCrawlerService;

    private static final int ANSWER = 1;//评论
    private static final int POST = 2;//文章

    /**
     * @param id     id
     * @param link   文章链接
     * @param author 作者
     * @param doc    doc
     * @param all
     * @return
     */
    @Override
    public JsonResult crawl(int id, String link, Author author, Document doc, boolean all) {
        //https://www.zhihu.com/people/Kbanquan/answers
        //https://www.zhihu.com/people/Kbanquan/pins/posts
        int sum;
        String userId = link.substring(link.indexOf("/prople/")+8, link.lastIndexOf("/") );
        JsonResult jsonResult = crawlType("https://www.zhihu.com/people/" + userId + "/answers", id, author, "div#zh-profile-answer-list>div", ZhiHuCrawler.ANSWER);
        if (jsonResult.getStatus() == JsonResult.STATUS_ERROR) {
            return jsonResult;
        }
        sum = (Integer) jsonResult.getData();
        jsonResult = crawlType("https://www.zhihu.com/people/" + userId + "/pins/posts", id, author, "div#zh-profile-post-list>div", ZhiHuCrawler.POST);
        if (jsonResult.getStatus() == JsonResult.STATUS_ERROR) {
            return jsonResult;
        }
        sum += (Integer) jsonResult.getData();
        return success("更新结束" + sum);
    }


    /**
     * @param link       类型链接
     * @param id         id
     * @param author     作者
     * @param listSelect 列表样式
     * @param type       类别
     * @return 添加结果
     */
    public JsonResult crawlType(String link, int id, Author author, String listSelect, int type) {
        int sum = 0;
        String src;
        HtmlPage htmlPage;
        Document doc;
        for (int i = 1; true; i++) {
            String link1 = link + "?page=" + i;
            htmlPage = webClientUtil.getPage(link1);
            doc = Jsoup.parse(htmlPage.asXml());
            if (doc == null) {
                logger.info("解析文章失败，没有获取到文章内容，请核实解析失败原因: " + link1);
                return error("解析文章失败");
            }
            Elements elements = doc.select(listSelect);
            if (elements == null || elements.size() == 0) {
                logger.info("解析文章失败，没有获取到文章列表，请核实解析失败原因: " + link1);
                return error("解析文章失败");
            }
            int success = 0;
            for (Element element : elements) {
                src = element.select("h2>a").attr("href");
                if (StringUtils.isBlank(src)) {
                    continue;
                }
                //知乎的文章无法根据样式直接解析，需要解析json
                JsonResult jsonResult = type == ZhiHuCrawler.ANSWER ? answerCrawler(src, id, author) : postCrawler(src, id, author);
                if (jsonResult.getStatus() == JsonResult.STATUS_SUCCESS) {
                    sum++;
                    success++;
                }
            }
            if (success == 0 || elements.size() < 20) {
                break;
            }
        }
        return success(sum);
    }

    public String getTxt(Document doc, String select) {
        Element txtElement = doc.select(select).first();
        if (txtElement != null) {
            return txtElement.text();
        }
        return null;
    }

    /**
     * @param link   链接
     * @param id     id
     * @param author 作者
     * @return 问答解析
     */
    public JsonResult answerCrawler(String link, Integer id, Author author) {
        // /question/49279638/answer/115556765
        String src = "https://www.zhihu.com" + link;
        HtmlPage htmlPage = webClientUtil.getPage(src);
        Document doc = Jsoup.parse(htmlPage.asXml());
        String content = getTxt(doc, "div#zh-question-answer-wrap>div>div.js-collapse-body>div.zm-editable-content");
        if (StringUtils.isBlank(content) || content.length() < 220) {
            return error("获取评论内容失败，请核实");
        }
        String title = getTxt(doc, "div#zh-question-title>h2>a");
        if (StringUtils.isBlank(title)) {
            return error("获取原文标题失败，请核实");
        }
        String publishedTime = doc.select("a.answer-date-link").first().text();
        Date time = new Date();
        if (StringUtils.isNotBlank(publishedTime)) {
            time = DateUtil.parseDate(publishedTime.replace("发布于", "").replace("编辑于", "").trim());
        }
        Article article = getArticle(id, title, content, src, time);
        // 如果已经存在，直接返回
        if (article.getId() > 0) {
            logger.info(TradeConstant.MSG_ERROR_TITLE_EXIST);
            return error(TradeConstant.MSG_ERROR_TITLE_EXIST);
        }
        int tag = CrawlerUtil.tag("https://www.zhihu.com/");
        BaseParser baseParser = articleCrawlerService.parserMap.get(tag);
        return success(baseParser.save(author, article, article.getPlatform()));
    }

    /**
     * @param link   链接
     * @param id     id
     * @param author 作者
     * @return 单片文章解析
     */
    public JsonResult postCrawler(String link, Integer id, Author author) {
        //powerUser //zhuanlan.zhihu.com/p/23830896
        //src = http://zhuanlan.zhihu.com/p/23463653
        //https://zhuanlan.zhihu.com/api/posts/23463653
        String head = link.substring(link.indexOf("/"), link.lastIndexOf("."));
        String body = link.substring(link.lastIndexOf("/"), link.length());
        String src = "https:" + head + ".com/api/posts" + body;
        Map<String, Object> map = getResult(src);
        if (map == null) {
            return error("单篇文章解析失败，请核实json解析方式");
        }
        String content = (String) map.get("content");
        if (StringUtils.isNotBlank(content)) {
            content = content.replaceAll("\\&[a-zA-Z]{0,9};", "").replaceAll("<[^>]*>", "");
        }
        String title = (String) map.get("title");
        if (StringUtils.isNotBlank(title)) {
            title = WebUtil.URLDecode(title);
        }
        String publishedTime = (String) map.get("publishedTime");
        Date time = new Date();
        if (StringUtils.isNotBlank(publishedTime)) {
            time = DateUtil.parseDate(publishedTime);
        }
        // 校验该作者标题是否存在
        Article article = getArticle(id, title, content, src, time);
        // 如果已经存在，直接返回
        if (article.getId() > 0) {
            logger.info(TradeConstant.MSG_ERROR_TITLE_EXIST);
            return error(TradeConstant.MSG_ERROR_TITLE_EXIST);
        }

        int tag = CrawlerUtil.tag("https://www.zhihu.com/");
        BaseParser baseParser = articleCrawlerService.parserMap.get(tag);
        return success(baseParser.save(author, article, article.getPlatform()));
    }


    public Article getArticle(int id, String title, String content, String src, Date time) {
        // 校验该作者标题是否存在
        Article article = articleCrawlMapper.queryByAuthorTitle(id, title);
        if (article == null) {
            // 新文章
            article = new Article();
            article.setAuthorId(id);
            article.setTitle(title);
            article.setBody(content);
            article.setLink(src);
            article.setCreateTime(new Date());
            article.setPostTime(time);
            article.setPlatform("知乎");
        }
        return article;
    }

    public Map<String, Object> getResult(String src) {
        String json = WebUtil.buildRequest(src,false);
        if (json == null) {
            return null;
        }
        Map<String, Object> map = JsonUtil.getResult(json);
        if (map == null || map.size() == 0) {
            return null;
        }
        return map;
    }


    @Override
    protected String url() {
        return CrawlConstant.ZHIHU_URL;
    }

    @Override
    protected int tag() {
        return CrawlConstant.ZHIHU_TAG;
    }

    @Override
    protected String avatarSelect() {
        return "div.user-image>img";
    }

    @Override
    protected String channelNameSelect() {
        return "h1.user-name";
    }

    @Override
    protected String listSelect() {
        return "div.wwgs1>div";
    }

    @Override
    protected String titleSelect() {
        return "h2.article-title>a";
    }

    @Override
    protected String authorSelect() {
        return "h1.user-name";
    }

    @Override
    protected String authorLink() {
        return "div.PostIndex-author>a";
    }

    @Override
    protected JsonResult iterate(int authorId, Document doc) {
        return super.iterate(authorId, doc);
    }

    @Override
    protected JsonResult parse(int authorId, String link) throws Exception {
        return new JsonResult();
    }

    @Override
    protected String getBody(Document doc) {
        return "";
    }
}
