package com.ftf.tp.blog.infrastructure.repository;

import co.elastic.clients.elasticsearch._types.query_dsl.Query;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.core.toolkit.ObjectUtils;
import com.ftf.tp.blog.domain.article.aggregate.ArticleAgg;
import com.ftf.tp.blog.domain.article.dos.*;
import com.ftf.tp.blog.domain.article.repository.ArticleRepository;
import com.ftf.tp.blog.domain.configuration.dos.ColumnDO;
import com.ftf.tp.blog.domain.configuration.dos.TagDO;
import com.ftf.tp.blog.domain.configuration.repository.ColumnRepository;
import com.ftf.tp.blog.domain.configuration.repository.TagRepository;
import com.ftf.tp.blog.infrastructure.config.EsQueryWrapper;
import com.ftf.tp.blog.infrastructure.converter.ArticleConverter;
import com.ftf.tp.blog.infrastructure.converter.ColumnConverter;
import com.ftf.tp.blog.infrastructure.converter.TagConverter;
import com.ftf.tp.blog.infrastructure.mapper.*;
import com.ftf.tp.blog.infrastructure.mapper.pos.*;
import com.ftf.tp.blog.infrastructure.util.ElasticsearchUtil;
import com.ftf.tp.common.constants.RedisConstant;
import com.ftf.tp.common.core.constants.BasePage;
import com.ftf.tp.common.core.constants.CommonConstant;
import com.ftf.tp.common.core.redis.annotation.DistributedLocks;
import com.ftf.tp.common.core.redis.constans.SceneEnum;
import com.ftf.tp.common.core.redis.utils.RedissonHelper;
import com.ftf.tp.common.core.utils.*;
import lombok.extern.slf4j.Slf4j;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.redisson.Redisson;
import org.redisson.api.RLock;
import org.redisson.client.protocol.ScoredEntry;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;

import javax.annotation.Resource;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

/**
 * @author tanghao
 * @date 2022/7/28
 */

@Slf4j
@Repository
public class ArticleRepositoryImpl implements ArticleRepository {

    @Resource
    private RedissonHelper redissonHelper;

    @Resource
    private ArticleDataMapper articleDataMapper;

    @Resource
    private ArticleSupportMapper articleSupportMapper;

    @Resource
    private ArticleCommentMapper articleCommentMapper;

    @Resource
    private ArticleMapper articleMapper;

    @Resource
    private Redisson redissonClient;

    @Resource
    private ColumnRepository columnRepository;

    @Resource
    private TagRepository tagRepository;

    @Resource
    private BrowseArticleMapper browseArticleMapper;

    @Resource
    private ElasticsearchUtil elasticsearchUtil;

    @Resource
    private TagMapper tagMapper;

    /**
     * @param articleId 文章id
     */
    @Override
    public ArticleDO byId(Long articleId) {
        return ArticleConverter.INSTANCE.poToDo(articleMapper.selectById(articleId));
    }

    @Override
    public void initialSave(ArticleAgg articleAgg) {
        Object principal = SecurityContextHolder.getContext().getAuthentication().getPrincipal();
        ArticlePO articlePo = ArticleConverter.INSTANCE.aggToPo(articleAgg);
        articleMapper.insert(articlePo);
        articleDataMapper.insert(ArticleConverter.INSTANCE.doToPo(articleAgg.getArticleDataDO()));
    }


    /**
     * 根据id列表获取文章首页信息
     *
     * @param idList id列表
     */
    @Override
    public List<ArticleIndexDO> queryArticleMainInfo(List<Long> idList) {
        EsQueryWrapper esQueryWrapper = new EsQueryWrapper();
        Query.of(q -> q.termsSet(t -> t.field("_id")
                .terms(idList.stream().map(String::valueOf).collect(Collectors.toList()))));
        PageResult<ArticleEs> pageResult = elasticsearchUtil.page(ArticleEs.class,
                new BasePage(1, idList.size()),
                esQueryWrapper
                        .in("_id", idList.stream().map(id -> id + "").collect(Collectors.toList()))
                        .excludes("article_content_markdown", "article_content_html")
        );
        List<ArticleIndexDO> articleIndexDOList = ArticleConverter.INSTANCE.esToDoList(pageResult.getRecords());
        List<String> keySupportList = new ArrayList<>();
        for (ArticleIndexDO articleIndexDO : articleIndexDOList) {
            Long id = articleIndexDO.getId();
            String keySupport = RedisConstant.REDIS_KEY_ARTICLE_SUPPORT_SUM + id;
            keySupportList.add(keySupport);
            //todo 获取收藏数
            articleIndexDO.setCollectionAmount(0L);
            //todo 获取评论数
            articleIndexDO.setCommentAmount(0L);
        }
        if (CollectionUtils.isNotEmpty(articleIndexDOList)) {
            //获取点赞数
            Map<String, Object> blogSupportList = getBlogSupportSumList(keySupportList);
            articleIndexDOList.forEach(articleIndexDO ->
                    articleIndexDO.setSupportAmount(Long.valueOf(String.valueOf(
                            blogSupportList.get(RedisConstant.REDIS_KEY_ARTICLE_SUPPORT_SUM
                                    + articleIndexDO.getId())))));
        }
        return articleIndexDOList;
    }

    /**
     * 新增文章 同步ES数据 读写分离
     * 除了文章内容其他数据已经通过canal同步过去了
     *
     * @param articleAgg 文章实体
     */
    @Override
    public void syncEsArticle(ArticleAgg articleAgg) {
        ArticleEs articleEs = new ArticleEs();
        articleEs.setArticleContentMarkdown(articleAgg.getArticleContentMarkdown());
        articleEs.setArticleContentHtml(MarkdownUtils.markdownToHtml(articleAgg.getArticleContentMarkdown()));
        //控制同步es失败次数
        int times = 0;
        boolean flag = true;
        do {
            try {
                elasticsearchUtil.update(articleEs);
                log.info("文章内容 es数据同步成功 {} ", articleAgg.getId());
                flag = false;
            } catch (Exception e) {
                //重试两次
                if (++times > 2) {
                    //结束循环
                    flag = false;
                    //todo 记录es数据同步失败
                }
                log.error("文章内容 es数据同步失败 id:{} {}次", articleAgg.getId(), times, e);
            }
        } while (flag);
    }


    /**
     * 批量获取文章点赞数根据id列表
     *
     * @param keyList 文章idList
     * @return 点赞数
     */
    @Override
    public Map<String, Object> getBlogSupportSumList(List<String> keyList) {
        Map<String, Object> valueBatch = redissonHelper.getValueBatch(keyList);
        List<Long> emptyList = new ArrayList<>();
        //找出为缓存中不存在或已过期的id
        keyList.forEach(key -> {
            if (ObjectUtils.isEmpty(valueBatch.get(key))) {
                emptyList.add(Long.valueOf(key.split(":")[1]));
            }
        });
        //从数据库中批量查询
        if (CollectionUtils.isNotEmpty(emptyList)) {
            //list中的每个map为字段和值的映射
            List<Map<String, Long>> supportMap = articleDataMapper.getSupportByIdList(emptyList);
            if (CollectionUtils.isNotEmpty(supportMap)) {
                for (Map<String, Long> map : supportMap) {
                    Long key = null;
                    Long value = null;
                    for (Map.Entry<String, Long> entry : map.entrySet()) {
                        if ("article_id".equals(entry.getKey())) {
                            key = entry.getValue();
                        }
                        if ("support_amount".equals(entry.getKey())) {
                            value = entry.getValue();
                        }
                    }
                    valueBatch.put(RedisConstant.REDIS_KEY_ARTICLE_SUPPORT_SUM + key, value);
                }
                redissonHelper.setValueBatch(valueBatch);
            }
        }
        return valueBatch;
    }


    /**
     * 根据文章id获取文章点赞数
     */
    @Override
    @DistributedLocks(prefix = RedisConstant.REDIS_KEY_ARTICLE_SUPPORT_SUM_LOCK, Key = "{#articleId}",
            scene = SceneEnum.BREAKDOWN, dataPrefix = RedisConstant.REDIS_KEY_ARTICLE_SUPPORT_SUM)
    public Long getSupportByArticleId(Long articleId) {
        String sumKey = RedisConstant.REDIS_KEY_ARTICLE_SUPPORT_SUM + articleId;
        Long supportCount = articleSupportMapper.getSupportCount(articleId);
        return redissonHelper.addAndGet(sumKey, supportCount, 7L);

    }

    /**
     * 根据文章id获取文章热度
     */
    @Override
    @DistributedLocks(prefix = RedisConstant.REDIS_KEY_BLOG_HOT_VALUE_LOCK, Key = "{#articleId}",
            scene = SceneEnum.BREAKDOWN, dataPrefix = RedisConstant.REDIS_KEY_BLOG_HOT_VALUE)
    public Long getHotValueByArticleId(Long articleId) {
        String sumKey = RedisConstant.REDIS_KEY_BLOG_HOT_VALUE + articleId;
        Long supportCount = articleDataMapper.getBlogHot(articleId);
        return redissonHelper.addAndGet(sumKey, supportCount, 7L);
    }

    /**
     * 根据文章id获取文章浏览数
     */
    @Override
    @DistributedLocks(prefix = RedisConstant.REDIS_KEY_ARTICLE_BROWSE_COUNT_LOCK, Key = "{#articleId}",
            scene = SceneEnum.BREAKDOWN, dataPrefix = RedisConstant.REDIS_KEY_ARTICLE_BROWSE_COUNT)
    public Long getLookByArticleId(Long articleId) {
        String sumKey = RedisConstant.REDIS_KEY_ARTICLE_BROWSE_COUNT + articleId;
        Long blogLookAmount = articleDataMapper.getBlogLookAmount(articleId);
        return redissonHelper.addAndGet(sumKey, blogLookAmount, 7L);
    }


    /**
     * 根据文章id获取文章基本配置
     */
    @Override
    @DistributedLocks(prefix = RedisConstant.REDIS_KEY_ARTICLE_CONFIGURE_LOCK, Key = "{#articleId}",
            scene = SceneEnum.BREAKDOWN, dataPrefix = RedisConstant.REDIS_KEY_ARTICLE_CONFIGURE)
    public ArticleConfigureDO getArticleConfigureById(Long articleId) {
        String sumKey = RedisConstant.REDIS_KEY_ARTICLE_CONFIGURE + articleId;
        List<Long> tagIdList = articleMapper.getTagId(articleId);
        List<Long> configureList = articleMapper.getColumnId(articleId);
        ArticleConfigureDO blogConfigure = getBlogConfigure(tagIdList, configureList);
        redissonHelper.setValueAndTimeUnit(sumKey, blogConfigure, 7, TimeUnit.DAYS);
        return blogConfigure;

    }

    public ArticleConfigureDO getBlogConfigure(List<Long> tagIdList, List<Long> columnIdList) {
        List<TagDO> tagDOList = tagRepository.getTagByIdList(tagIdList);
        List<ColumnDO> columnDOList = new ArrayList<>();
        if (CollectionUtils.isNotEmpty(columnIdList)) {
            columnDOList = columnRepository.getColumnByIdList(columnIdList);
        }
        return ArticleConfigureDO.builder().tagDOList(tagDOList)
                .columnDOList(columnDOList)
                .build();
    }

    /**
     * 每次浏览 文章浏览数加+1
     *
     * @param sumKey key
     * @param blogId 文章id
     */
    @Override
    public void browseBlog(String sumKey, Long blogId) {
        String lockKey = RedisConstant.REDIS_KEY_ARTICLE_BROWSE_COUNT + "-" + blogId;
        if (ObjectUtils.isEmpty(redissonHelper.getValue(sumKey))) {
            //分布式锁防止缓存击穿
            RLock lock = redissonHelper.getLock(lockKey);
            try {
                if (lock.tryLock(100, 10000, TimeUnit.MILLISECONDS)) {
                    try {
                        Long sum = articleDataMapper.selectOne(new QueryWrapper<ArticleDataPO>().eq("article_id", blogId)).getLookAmount();
                        //再加上现有数
                        redissonHelper.addAndGet(sumKey, sum + 1, 7L);
                    } finally {
                        lock.unlock();
                    }
                } else {
                    browseBlog(sumKey, blogId);
                }
            } catch (InterruptedException e) {
                log.error("文章{} 浏览缓存操作失败", blogId, e);
            }
        } else {
            //浏览总数加1
            redissonHelper.getIncrement(sumKey, 7L);
        }
    }

    /**
     * 文章点赞 点赞数+1
     *
     * @param sumKey   key
     * @param targetId 文章id
     */
    @Override
    public void articleSupportSum(String sumKey, Long targetId) {
        String lockKey = RedisConstant.REDIS_KEY_ARTICLE_SUPPORT_SUM + "-" + targetId;
        if (ObjectUtils.isEmpty(redissonHelper.getValue(sumKey))) {
            //分布式锁防止缓存击穿
            RLock lock = redissonHelper.getLock(lockKey);
            try {
                if (lock.tryLock(100, 10000, TimeUnit.MILLISECONDS)) {
                    try {
                        Long sum = articleSupportMapper.getSupportCount(targetId);
                        redissonHelper.addAndGet(sumKey, sum + 1, 7L);
                    } finally {
                        lock.unlock();
                    }
                } else {
                    articleSupportSum(sumKey, targetId);
                }
            } catch (InterruptedException e) {
                log.error("文章:{} 点赞数增加失败", targetId, e);
            }
        } else {
            //点赞数加1
            redissonHelper.getIncrement(sumKey, 7L);
        }
    }


    /**
     * 文章评论 评论数+1
     *
     * @param sumKey   key
     * @param targetId 文章id
     */
    @Override
    public void commentBlog(String sumKey, Long targetId) {
        String lockKey = RedisConstant.REDIS_KEY_ARTICLE_COMMENT_SUM + "-" + targetId;
        if (ObjectUtils.isEmpty(redissonHelper.getValue(sumKey))) {
            //分布式锁防止缓存击穿
            RLock lock = redissonHelper.getLock(lockKey);
            try {
                if (lock.tryLock(100, 10000, TimeUnit.MILLISECONDS)) {
                    try {
                        redissonHelper.getIncrement(sumKey, 7L);
                        Long sum = articleCommentMapper.getCommentCount(targetId);
                        redissonHelper.addAndGet(sumKey, sum, 7L);
                    } finally {
                        lock.unlock();
                    }
                } else {
                    commentBlog(sumKey, targetId);
                }
            } catch (InterruptedException e) {
                log.error("文章{} 评论操作失败", targetId, e);
            }
        } else {
            //点赞数加1
            redissonHelper.getIncrement(sumKey, 7L);
        }
    }

    @Override
    public Long getSupportCount(Long targetId) {
        return articleSupportMapper.getSupportCount(targetId);
    }

    /**
     * 根据文章id获取文章基本信息
     *
     * @param articleId 文章id
     * @return 文章基本信息
     */
    @Override
    public ArticleInfoDO getArticleInfoById(Long articleId) {
        ArticleEs articleEs = elasticsearchUtil.byId(String.valueOf(articleId), ArticleEs.class);
        return ArticleConverter.INSTANCE.esToInfoDO(articleEs);
    }

    @Override
    public List<Long> getIdListRand() {
        return articleMapper.getIdListRand();
    }

    /**
     * 批量新增文章 爬虫用
     */
    @Override
    public void insertEsArticleBatch(List<ArticleEsDO> articleEsDOList) {
        List<ArticleEs> articleEsList = ArticleConverter.INSTANCE.doToPoList(articleEsDOList);
        elasticsearchUtil.updateDataList(articleEsList, false);
    }

    @Override
    public void articleTagAdd(Long id, List<TagDO> tagDOList) {
        List<TagPO> tagPOS = TagConverter.INSTANCE.doToPOList(tagDOList);
        articleMapper.articleTagAdd(id, tagPOS);
    }

    @Override
    public void articleColumnAdd(ArticleDO articleDO, List<ColumnDO> columnDOList) {
        ArticlePO articlePo = ArticleConverter.INSTANCE.doToPo(articleDO);
        List<ColumnPO> columnPoList = ColumnConverter.INSTANCE.doToPoList(columnDOList);
        articleMapper.articleColumnAdd(articlePo, columnPoList);
    }

    /**
     * 是否点过赞
     *
     * @param userId    用户id
     * @param articleId 文章id
     */
    @Override
    public Boolean isSupport(Long userId, Long articleId) {
        String supportRecordKey = RedisConstant.REDIS_KEY_ARTICLE_SUPPORT + articleId;
        Double score = redissonHelper.getScore(supportRecordKey, userId + "");
        if (Objects.isNull(score)) {
            return false;
        } else {
            return true;
        }
//        return articleSupportMapper.existSupport(userId, articleId);
    }

    /**
     * 点赞
     *
     * @param supportDO 点赞实体
     */
    @Override
    public void support(SupportDO supportDO) {
        articleSupportMapper.insert(ArticleSupportPO.builder()
                .userId(supportDO.getUserId())
                .articleId(supportDO.getTargetId())
                .createdTime(supportDO.getSupportTime())
                .build());
    }


    /**
     * 取消点赞
     *
     * @param userId   用户id
     * @param targetId 文章id
     */
    @Override
    public void cancelSupport(Long userId, Long targetId) {
        articleSupportMapper.deleteById(ArticleSupportPO.builder()
                .userId(userId)
                .articleId(targetId)
                .build());
    }


    /**
     * 写入浏览记录
     */
    @Override
    public void browseRecordWrite() {
        List<BrowseArticlePO> browseArticlePoList = new ArrayList<>();
        List<ArticleDataPO> articleDataPoList = new ArrayList<>();
        //根据前缀获取所有的用户浏览记录key
        for (String key : redissonHelper.scanGet(RedisConstant.MAP_KEY_USER_BROWSE_RECORD)) {
            //获取浏览记录总数
            int sum = redissonHelper.getZsetCount(key);
//            Integer pageSize = Integer.valueOf(XxlJobHelper.getJobParam())
            //每次取10条
            int pageSize = 1000;
            if (sum > 0) {
                for (int i = 0; i <= sum / pageSize; i++) {
                    Collection<ScoredEntry<Object>> scanZset = redissonHelper.getScanZset(key, i * pageSize, (i + 1) * pageSize);
                    scanZset.forEach(browse -> {
                        Long score = browse.getScore().longValue();
                        String userId = key.split(":")[1];
                        BrowseArticlePO browseArticlePO = BrowseArticlePO.builder()
                                .articleId(Long.valueOf((String) browse.getValue()))
                                .createdTime(DateUtils.parse(DateUtils.convertTimeToString(score), DateUtils.DATE_TIME_PATTERN))
                                .userId(Long.valueOf(userId))
                                .build();
                        browseArticlePoList.add(browseArticlePO);
                    });
                }
            }
        }
        //更新浏览数
        redissonHelper.scanGetStream(RedisConstant.REDIS_KEY_ARTICLE_BROWSE_COUNT).forEach(key -> {
            Long blogId = Long.valueOf(key.split(":")[1]);
            Long value = redissonHelper.getValueAto(key);
            ArticleDataPO articleDataPO = ArticleDataPO.builder()
                    .articleId(blogId)
                    .lookAmount(value)
                    .build();
            articleDataPoList.add(articleDataPO);
        });
        //批量插入浏览记录
        if (CollectionUtils.isNotEmpty(browseArticlePoList)) {
            browseArticleMapper.batchInsert(browseArticlePoList);
        }
        //查找出重复的浏览记录，除开最新的之外全部删除
        List<Long> idList = browseArticleMapper.selectBrowseRepeat();
        if (CollectionUtils.isNotEmpty(idList)) {
            browseArticleMapper.deleteBatchIds(idList);
        }
        //批量更新浏览数
        if (CollectionUtils.isNotEmpty(articleDataPoList)) {
            articleDataMapper.updateBatchByBlogId(articleDataPoList);
        }
        log.info("浏览记录入库定时任务结束,本次共新增浏览记录{}条,删除重复的{}条", browseArticlePoList.size(), idList.size());
    }

    /**
     * 更新热榜
     */
    @Override
    public void hotRank() {
        log.info("开始更新热榜");
        List<Long> idList = articleDataMapper.getHotRank();
        redissonHelper.listClear(RedisConstant.REDIS_KEY_ARTICLE_HOT_RANK);
        redissonHelper.listAddAllLong(RedisConstant.REDIS_KEY_ARTICLE_HOT_RANK, idList);
        log.info("更新热榜成功");
    }


    /**
     * 爬文章
     */
    @Override
    public void climbBlog(String userName, Long userId, String authorName, Long bigTag) {
        log.info("》》》》》》》爬虫开始《《《《《《《");
        String baseUrl = "https://blog.csdn.net/" + userName + "/";
        String secondUrl = baseUrl + "article/list/";
        for (int i = 1; ; i++) {
            // 从第一页开始爬取
            String startUrl = secondUrl + i;
            Document doc = null;
            try {
                doc = Jsoup.connect(startUrl).get();
            } catch (IOException e) {
                log.warn("jsoup获取url失败" + e.getMessage());
            }
            Element element = doc.body();
            //找到div class='article-list'
            element = element.select("div.article-list").first();
            if (element == null) {
                break;
            }
            Elements elements = element.children();
            List<ArticleEsDO> blogEsPOList = new ArrayList<>();
            for (Element e : elements) {
                // 拿到文章id
                String articleId = e.attr("data-articleid");
                // 爬取单篇文章
                ArticleEsDO blogEsPO = null;
                try {
                    blogEsPO = climbDetailById(baseUrl, articleId, userId, userName, authorName, bigTag);
                } catch (Exception ex) {
                    ex.printStackTrace();
                }
                if (ObjectUtils.isNotEmpty(blogEsPO)) {
                    blogEsPOList.add(blogEsPO);
                }
            }
            if (CollectionUtils.isNotEmpty(blogEsPOList)) {
                insertEsArticleBatch(blogEsPOList);
            }
        }
        log.info("》》》》》》》爬虫结束《《《《《《《");

    }

    @Transactional(rollbackFor = Exception.class)
    public ArticleEsDO climbDetailById(String baseUrl, String articleId, Long userId, String userName, String authorName, Long bigTag) {
        ArticlePO articlePO = new ArticlePO();
        String startUrl = baseUrl + "article/details/" + articleId;
        Document doc = null;
        try {
            doc = Jsoup.connect(startUrl).get();
        } catch (IOException e) {
            log.warn("jsoup获取url失败" + e.getMessage());
        }
        Element element = doc.body();
        Element htmlElement = element.select("div#content_views").first();
        Element titleElement = element.selectFirst(".title-article");
        String fileName = titleElement.text();


        articlePO.setArticleTitle(fileName);
        Elements elements = element.select("div.tags-box");
        List<TagPO> tagsBoxValue = null;
        tagsBoxValue = getTagsBoxValue(elements, bigTag);

        // 获取时间
        Element timeElement = element.selectFirst("span.time");
        String time = timeElement.text().substring(2, 22).trim();
        articlePO.setCreatedTime(DateUtils.parse(time, DateUtils.DATE_TIME_PATTERN));
        articlePO.setUpdatedTime(DateUtils.parse(time, DateUtils.DATE_TIME_PATTERN));
        articlePO.setId(SnowIdUtils.uniqueLong());
        String md = null;
        try {
            md = Html2Md.getMarkDownText(htmlElement);
        } catch (Exception e) {
            md = "文章错误";
            return null;
        }
        String toHtml = MarkdownUtils.markdownToHtml(md);
        if ("".equals(md) || "".equals(toHtml)) {
            return null;
        }
        if (toHtml.length() > 800) {
            articlePO.setArticleAbstract(toHtml.substring(0, 800));
        } else {
            articlePO.setArticleAbstract(toHtml);
        }
        if ("".equals(articlePO.getArticleAbstract())) {
            return null;
        }
        articlePO.setArticleAuthorId(userId);
        articlePO.setArticleAuthorName(authorName);
        articlePO.setArticlePublishStatus(1);
        articlePO.setArticleType(0);
        articlePO.setArticleStatus(40);
        articlePO.setCommentStatus(0);
        articleMapper.insert(articlePO);
        //文章文章内容初始化ES
        ArticleEsDO blogEsPO = new ArticleEsDO();
        ConverterUtil.copyPropertiesIgnoreNull(articlePO, blogEsPO);
        blogEsPO.setArticleContentMarkdown(md);
        blogEsPO.setArticleContentHtml(toHtml);

        //初始文章数据
        articleDataMapper.insert(ArticleDataPO.builder().articleId(articlePO.getId()).collectionAmount(0L).lookAmount(0L).supportAmount(0L).commentAmount(0L)
                //初始热度为发布的时间戳  单位小时
                .hotValue(articlePO.getCreatedTime().getTime() / CommonConstant.HOT_VALUE_UNIT).build());
        if (CollectionUtils.isNotEmpty(tagsBoxValue)) {
            articleMapper.articleTagAdd(articlePO.getId(), tagsBoxValue);
        }
        return blogEsPO;
    }

    private List<TagPO> getTagsBoxValue(Elements elements, Long bigTag) {
        Elements categories = elements.get(0).select("a.tag-link");
        List<TagPO> tagPOList = new ArrayList<>();
        List<TagPO> tagPOInsertList = new ArrayList<>();
        for (Element e : categories) {
            String temp = e.text().replace("\t", "").replace("\n", "").replace("\r", "");
            TagPO tagPO = articleMapper.selectByTagName(temp);
            if (ObjectUtils.isEmpty(tagPO)) {
                tagPO = new TagPO();
                tagPO.setId(SnowIdUtils.uniqueLong());
                tagPO.setTagName(temp);
                tagPO.setTagStatus(0);
                tagPO.setTagParentId(bigTag);
                tagPO.setTagType(1);
                tagPO.setCreatedBy(0L);
                tagPO.setFollowNumber(0);
                tagPO.setCreatedTime(new Date());
                tagPOInsertList.add(tagPO);
            }
            tagPOList.add(tagPO);
        }
        if (CollectionUtils.isNotEmpty(tagPOInsertList)) {
            //根据name去重
            tagPOInsertList.forEach(tagPO -> {
                try {
                    tagMapper.insert(tagPO);
                } catch (Exception e) {
                    log.error("异常", e);
                }
            });

        }
        return tagPOList;
    }

    /**
     * 热榜信息
     *
     * @param idList 热榜id
     * @return 热榜信息
     */
    @Override
    public List<ArticleIndexDO> hotRankInfo(List<Long> idList) {
        PageResult<ArticleEs> hotRankList = elasticsearchUtil.page(
                ArticleEs.class,
                new BasePage(1, idList.size()),
                new EsQueryWrapper()
                        .in("_id", idList.stream().map(String::valueOf).collect(Collectors.toList()))
                        .excludes("article_content_markdown", "article_content_html"));
        List<ArticleEs> recordList = hotRankList.getRecords();
        return ArticleConverter.INSTANCE.esToDoList(recordList);
    }

    @Override
    public void createIndex() throws Exception {
        elasticsearchUtil.createIndexSettingsMappings(ArticleEs.class);
    }


    @Override
    public void get() {
//        try {
//            new EsQueryWrapper().aggTerms("agg", "age",wrapper->wrapper.size(5).addTermsChild());
//            log.info("完成");
//        } catch (IOException e) {
//            e.printStackTrace();
//        }
    }
}
