package com.heima.recommend.service.impl;

import cn.hutool.core.collection.CollectionUtil;
import com.alibaba.fastjson.JSON;
import com.heima.model.article.entity.ApArticle;
import com.heima.model.article.vo.HotArticleVo;
import com.heima.model.constants.ArticleContants;
import com.heima.recommend.service.HotArticleService;
import com.heima.recommend.mapper.ApArticleMapper;
import com.heima.utils.common.DateUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.data.redis.core.DefaultTypedTuple;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.data.redis.core.ZSetOperations;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.util.*;
import java.util.stream.Collectors;

@Service
@Slf4j
public class HotArticleServiceImpl implements HotArticleService {

    @Resource
    private ApArticleMapper apArticleMapper;
    @Resource
    private StringRedisTemplate stringRedisTemplate;

    @Resource
    private ThreadPoolTaskExecutor threadPoolTaskExecutor;
    @Override
    public void computeHotArticle() {
        //查询5天内发布的文章数据
        //5天前的时间
        Date begin = DateUtils.addDay(new Date(), -5);
        Integer size=5;

        while (true) {
            //查询文章列表
            List<ApArticle> apArticleList= apArticleMapper.selectListLimit(begin,size);

            log.info("加载文章数据完毕,数据列表:{}",apArticleList);
            if(CollectionUtil.isEmpty(apArticleList)){
                //查询文章列表为空, 文章计算完毕
                return ;
            }

            //将查询到的最后一条文章的发布时间，当作开始时间，进行下一次查询
            begin=apArticleList.get(apArticleList.size()-1).getPublishTime();
//            //计算文章分值
//            List<HotArticleVo> hotArticleVoList=computerArticleScore(apArticleList);
//
//            //保存文章分组到缓存
//            saveHotArticleToCache(hotArticleVoList);
            threadPoolTaskExecutor.execute(()->computeAndCacheHotArticleScore(apArticleList));
        }


    }

    private void computeAndCacheHotArticleScore(List<ApArticle> apArticleList) {
        List<HotArticleVo> articleVoList = computerArticleScore(apArticleList);

        saveHotArticleToCache(articleVoList);
    }

    /**
     * 保存文章数据到缓存
     * @param hotArticleVoList
     */
    private void saveHotArticleToCache(List<HotArticleVo> hotArticleVoList) {

        Map<Long, List<HotArticleVo>> map = hotArticleVoList.stream().collect(Collectors.groupingBy(HotArticleVo::getChannelId));

//        map.forEach((channelId,hotArticleVos)->{
//            hotArticleVos.forEach(hotArticleVo -> {
//                //为每个频道缓存文章分值数据
//                stringRedisTemplate.opsForZSet().add(ArticleContants.HOT_ARTICLE_PREFIX_+hotArticleVo.getChannelId(),hotArticleVo.getId().toString(),hotArticleVo.getScore());
//                //为频道缓存文章分值数据--所有数据缓存--每个频道数据累加
//                stringRedisTemplate.opsForZSet().add(ArticleContants.HOT_ARTICLE_PREFIX_+ArticleContants.DEFAULT_TAG,hotArticleVo.getId().toString(),hotArticleVo.getScore());
//                //缓存文章基本信息数据，key：每个文章的ID，Value:每个文章对应的JSON数据
//                stringRedisTemplate.opsForHash().put(ArticleContants.HOT_ARTICLE_HASH,hotArticleVo.getId().toString(), JSON.toJSONString(hotArticleVo));
//            });
//        });

        map.forEach((channeId,hotArticleVos)->{
            Set<ZSetOperations.TypedTuple<String>> tupleSet=hotArticleVos.stream()
                    .map(hotArticleVo->new DefaultTypedTuple<String>(hotArticleVo.getId().toString(),hotArticleVo.getScore().doubleValue()))
                    .collect(Collectors.toSet());
            //为每个频道缓存文章分值数据
            //3.1 为每个频道缓存文章分值数据
            stringRedisTemplate.opsForZSet().add(ArticleContants.HOT_ARTICLE_PREFIX + channeId, tupleSet);

            //3.2 为推荐频道缓存文章分值数据 --- 所有数据缓存 --- > 每个频道数据累加
            stringRedisTemplate.opsForZSet().add(ArticleContants.HOT_ARTICLE_PREFIX + ArticleContants.DEFAULT_TAG, tupleSet);

            //3.3 缓存文章基本信息数据  key: 每个文章的ID , Value : 每个文章对应的JSON数据
            Map<String, String> collect = hotArticleVos.stream().collect(Collectors.toMap(hotArticleVo -> hotArticleVo.getId().toString(), hotArticleVo -> JSON.toJSONString(hotArticleVo)));

            stringRedisTemplate.opsForHash().putAll(ArticleContants.HOT_ARTICLE_HASH, collect);
        });
    }

    /**
     * 计算文章评分
     * @param apArticleList
     * @return
     */
    private List<HotArticleVo> computerArticleScore(List<ApArticle> apArticleList) {

        List<HotArticleVo> hotArticleVoList = new ArrayList<>(apArticleList.size());
        apArticleList.stream().forEach(apArticle -> {

            HotArticleVo hotArticleVo = new HotArticleVo();
            BeanUtils.copyProperties(apArticle,hotArticleVo);
            hotArticleVo.setScore(computeScore(apArticle));
            hotArticleVoList.add(hotArticleVo);
        });

        return hotArticleVoList;
    }

    /**
     * 计算分数
     * @param apArticle
     * @return
     */
    private int computeScore(ApArticle apArticle){

        int score=0;
        if(apArticle.getLikes()!=null){
            score=apArticle.getLikes()* ArticleContants.HOT_ARTICLE_LIKE_WEIGHT;
        }
        if(apArticle.getViews()!=null){
            score=apArticle.getViews()*ArticleContants.HOT_ARTICLE_VIEW_WEIGHT;
        }
        if(apArticle.getComment()!=null){
            score=apArticle.getComment()*ArticleContants.HOT_ARTICLE_COMMENT_WEIGHT;
        }
        if(apArticle.getCollection()!=null){
            score=apArticle.getCollection()*ArticleContants.HOT_ARTICLE_COLLECT_WEIGHT;
        }
        return score;
    }


}
