package com.heima.recommend.service.impl;

import com.alibaba.fastjson.JSON;
import com.heima.common.constants.ArticleContants;
import com.heima.model.article.entity.ApArticle;
import com.heima.model.article.vo.HotArticleVo;
import com.heima.recommend.service.HotArticleService;
import com.heima.recommend.dao.ApArticleDao;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.time.DateUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.data.redis.core.DefaultTypedTuple;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.data.redis.core.ZSetOperations;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;

import javax.annotation.Resource;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;

/**
 * @author Administrator
 */
@Service
@Slf4j
public class HotArticleServiceImpl implements HotArticleService {

    @Resource
    private ApArticleDao apArticleDao;

    @Resource
    private StringRedisTemplate stringRedisTemplate;

    @Override
    public void computeHotArticle() {
        //1. 查询5天内发布的文章数据     1000万   --->   1000
        //1.1 找到5天前的时间
        Date begin = DateUtils.addDays(new Date(), -5);
        Integer size = 5;
        while (true) {
            log.info("热爱文章计算开始执行，查询时间：{}",begin);
            //1.2 查询文章列表
            List<ApArticle> apArticleList = apArticleDao.queryAfterByLimit(begin, size);
            log.info("加载文章数据完毕，数据列表：{}",apArticleList);
            //如果根据时间没有查询到文章，代表文章计算完毕
            if (CollectionUtils.isEmpty(apArticleList)) {
                return;
            }

            //将查询到最后一条文章的发布时间，当做开始时间，进行下一次查询
            begin = apArticleList.get(apArticleList.size() - 1).getPublishTime();

            //2.计算文章分值
            List<HotArticleVo> hotArticleVoList = computeArticleScore(apArticleList);

            //3. 保存文章分值到缓存
            saveHotArticleToCache(hotArticleVoList);
        }
    }

    /**
     * 保存文章数据到缓存
     *
     * @param hotArticleVoList
     */
    private void saveHotArticleToCache(List<HotArticleVo> hotArticleVoList) {
        Map<Long, List<HotArticleVo>> map = hotArticleVoList.stream().collect(Collectors.groupingBy(HotArticleVo::getChannelId));
        map.forEach((channelId, hotArticleVos) -> {

            //Redis Zset集合中实现元素排序使用的是TypedTuple实现的 , 会将每一个元素以及对应的分值封装成TypedTuple类型
            Set<ZSetOperations.TypedTuple<String>> tupleSet = hotArticleVos.stream()
                    //将每一个文章对象 , 转化为一个 TypedTuple
                    .map(hotArticleVo -> new DefaultTypedTuple<String>(hotArticleVo.getId() + "", hotArticleVo.getScore().doubleValue()))
                    //收集流中的数据
                    .collect(Collectors.toSet());

            //3.1 为每个频道缓存文章分值数据
            stringRedisTemplate.opsForZSet().add(ArticleContants.HOT_ARTICLE_PREFIX + channelId, tupleSet);

            //3.2 为推荐频道缓存文章分值数据 --- 所有数据缓存 --- > 每个频道数据累加
            stringRedisTemplate.opsForZSet().add(ArticleContants.HOT_ARTICLE_PREFIX + ArticleContants.DEFAULT_TAG, tupleSet);

            //3.3 缓存文章基本信息数据  key: 每个文章的ID , Value : 每个文章对应的JSON数据
            Map<String, String> collect = hotArticleVos.stream().collect(Collectors.toMap(hotArticleVo -> hotArticleVo.getId().toString(), hotArticleVo -> JSON.toJSONString(hotArticleVo)));

            stringRedisTemplate.opsForHash().putAll(ArticleContants.HOT_ARTICLE_HASH, collect);
        });
    }

    /**
     * 计算文章分值
     *
     * @param apArticleList
     * @return
     */
    private List<HotArticleVo> computeArticleScore(List<ApArticle> apArticleList) {
        List<HotArticleVo> hotArticleVoList = apArticleList.stream().map(apArticle -> {
            HotArticleVo hotArticleVo = new HotArticleVo();
            BeanUtils.copyProperties(apArticle, hotArticleVo);

            Integer score = computeScore(apArticle);
            hotArticleVo.setScore(score);
            return hotArticleVo;
        }).collect(Collectors.toList());
        return hotArticleVoList;
    }

    private int computeScore(ApArticle apArticle) {
        int score = 0;

        if (apArticle.getLikes() != null) {
            score += apArticle.getLikes() * ArticleContants.HOT_ARTICLE_LIKE_WEIGHT;
        }

        if (apArticle.getViews() != null) {
            score += apArticle.getViews() * ArticleContants.HOT_ARTICLE_VIEW_WEIGHT;
        }

        if (apArticle.getComment() != null) {
            score += apArticle.getComment() * ArticleContants.HOT_ARTICLE_COMMENT_WEIGHT;
        }

        if (apArticle.getCollection() != null) {
            score += apArticle.getCollection() * ArticleContants.HOT_ARTICLE_COLLECT_WEIGHT;
        }

        return score;
    }
}
