package com.heima.recommend.service.impl;


import com.alibaba.fastjson.JSON;
import com.heima.common.constants.ArticleConstants;
import com.heima.model.article.entity.ApArticle;
import com.heima.model.article.vo.HotArticleVo;
import com.heima.recommend.dao.ApArticleDao;
import com.heima.recommend.service.HotArticleService;
import com.heima.utils.common.DateUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.data.redis.core.DefaultTypedTuple;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.data.redis.core.ZSetOperations;
import org.springframework.data.redis.support.collections.DefaultRedisSet;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;

@Service
@Slf4j
public class HotArticleServiceImpl implements HotArticleService {

    @Resource
    private ApArticleDao apArticleDao;

    @Resource
    private StringRedisTemplate stringRedisTemplate;

    @Resource
    private ThreadPoolTaskExecutor threadPoolTaskExecutor;

    /**
     * 热点文章计算
     */
    @Override
    public void computeHotArticle() {

        //清空之前计算的旧数据
        Set<String> keys = stringRedisTemplate.keys(ArticleConstants.HOT_ARTICLE_PREFIX + "*");
        stringRedisTemplate.delete(keys);

        //1.查询5天内发布的文章
        //1.1找到5天前的时间
        Date begin = DateUtils.addDay(new Date(), -5);
        //1.2每次查询5条
        Integer size = 5;
        while (true) {
            //查询文章列表
            List<ApArticle> apArticleList = apArticleDao.queryAfterByLimit(begin, size);
            //如果为空，代表循环文章完毕
            if (CollectionUtils.isEmpty(apArticleList)) {
                return;
            }

            //将查询到的最后一条文章的发布时间, 当做开始时间, 进行下一次查询
            begin = apArticleList.get(apArticleList.size() - 1).getPublishTime();

            //通过异步线程池调用计算分值任务
            Runnable runnable = asyncComputeHotArticle(apArticleList);
            threadPoolTaskExecutor.execute(runnable);

        }
    }

    //@Async("taskExecutor")
    public Runnable asyncComputeHotArticle(List<ApArticle> apArticleList) {
        return () -> {
            log.info("异步线程任务开启,线程名称{}", Thread.currentThread().getName());
            //2.计算文章分值并存入对象中
            List<HotArticleVo> hotArticleVoList = apArticleList.stream().map(apArticle -> {
                HotArticleVo hotArticleVo = new HotArticleVo();
                BeanUtils.copyProperties(apArticle, hotArticleVo);
                int score = getScore(apArticle);
                hotArticleVo.setScore(score);
                return hotArticleVo;
            }).collect(Collectors.toList());

            //3.将文章数据保存到缓存中
            Map<Long, List<HotArticleVo>> map = hotArticleVoList.stream().collect(Collectors.groupingBy(HotArticleVo::getChannelId));
            map.forEach((channelId, hotArticleVos) -> {

                //获取批量存储的集合
                Set<ZSetOperations.TypedTuple<String>> tuples = hotArticleVos.stream().map(hotArticleVo ->
                        new DefaultTypedTuple<String>(hotArticleVo.getId() + "", hotArticleVo.getScore().doubleValue())
                ).collect(Collectors.toSet());

                //3.1批量操作为每个频道缓存文章分值数据
                stringRedisTemplate.opsForZSet().add(ArticleConstants.HOT_ARTICLE_PREFIX + channelId, tuples);

                //3.2 ，为推荐频道缓存文章分值数据 --- 所有数据缓存 --- > 每个频道数据累加
                stringRedisTemplate.opsForZSet().add(ArticleConstants.HOT_ARTICLE_PREFIX + ArticleConstants.DEFAULT_TAG, tuples);

                //3.3 缓存文章基本信息数据  key: 每个文章的ID , Value : 每个文章对应的JSON数据
                Map<String, String> collect = hotArticleVos.stream().collect(Collectors.toMap(hotArticleVo -> hotArticleVo.getId() + "",
                        hotArticleVo -> JSON.toJSONString(hotArticleVo)));

                stringRedisTemplate.opsForHash().putAll(ArticleConstants.HOT_ARTICLE_HASH, collect);

            });
        };


    }

    /**
     * 计算分值
     *
     * @param apArticle
     * @return
     */
    private int getScore(ApArticle apArticle) {
        int score = 0;
        if (apArticle.getLikes() != null) {
            score += apArticle.getLikes() * ArticleConstants.HOT_ARTICLE_LIKE_WEIGHT;
        }

        if (apArticle.getViews() != null) {
            score += apArticle.getViews() * ArticleConstants.HOT_ARTICLE_VIEW_WEIGHT;
        }

        if (apArticle.getComment() != null) {
            score += apArticle.getComment() * ArticleConstants.HOT_ARTICLE_COMMENT_WEIGHT;
        }

        if (apArticle.getCollection() != null) {
            score += apArticle.getCollection() * ArticleConstants.HOT_ARTICLE_COLLECT_WEIGHT;
        }
        return score;
    }
}
