package com.heima.article.service.impl;

import com.alibaba.fastjson.JSON;
import com.heima.article.mapper.ApArticleMapper;
import com.heima.article.service.HotArticleService;
import com.heima.common.constants.ArticleConstants;
import com.heima.common.redis.CacheService;
import com.heima.feign.wemedia.IWemediaClient;
import com.heima.model.article.pojos.ApArticle;
import com.heima.model.article.vos.HotArticleVo;
import lombok.extern.slf4j.Slf4j;
import org.joda.time.DateTime;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;

/**
 * @Auther: Tangguo
 * @Date: 2023-06-24  15:20
 */
@Service
@Transactional
@Slf4j
public class HotArticleServiceImpl implements HotArticleService {

    @Autowired
    private ApArticleMapper apArticleMapper;

    @Autowired
    private IWemediaClient wemediaClient;

    @Autowired
    private CacheService cacheService;

    /**
     * 计算热点文章
     */
    @Override

    public void computeHotArticle() {
        //1.查询前5天的文章数据
        Date dateParam = DateTime.now().minusDays(5).toDate();
        List<ApArticle> apArticleList = apArticleMapper.findArticleListByLast5days(dateParam);

        //2.计算文章分值
        List<HotArticleVo> hotArticleVoList = apArticleList.stream().map(apArticle -> {
            HotArticleVo hotArticleVo = new HotArticleVo();
            BeanUtils.copyProperties(apArticle, hotArticleVo);
            Integer score = computeScore(apArticle);
            hotArticleVo.setScore(score);
            return hotArticleVo;
        }).collect(Collectors.toList());

        //3.为每个频道缓存30条分值较高的文章
        //3.1根据查询的文章数据,获取所有的频道id 并且去重
        List<Integer> channelIds = apArticleList.stream().map(ApArticle::getChannelId).distinct().collect(Collectors.toList());
        for (Integer channelId : channelIds) {
            //3.2对带分值的文章集合按照频道进行分类 排序 取前30条 缓存到redis
            List<HotArticleVo> channelArticleVoList = hotArticleVoList.stream().filter(article -> article.getChannelId().equals(channelId))
                    .collect(Collectors.toList());
            List<HotArticleVo> hotArticleVos = channelArticleVoList.stream().sorted((o1, o2) -> o2.getScore() - o1.getScore()).limit(30).collect(Collectors.toList());

            //将数据存入到缓存
            cacheService.set(ArticleConstants.HOT_ARTICLE_FIRST_PAGE + channelId, JSON.toJSONString(hotArticleVos));
        }

        //3.3处理推荐频道中的数据
        //单独处理“推荐”频道
        //按分数进行排序：降序
        //排序后只要前30条
        List<HotArticleVo> hotArticleVos = hotArticleVoList.stream().sorted((o1, o2) -> o2.getScore() - o1.getScore()).limit(30).collect(Collectors.toList());
        //存入reddis缓存
        cacheService.set(ArticleConstants.HOT_ARTICLE_FIRST_PAGE + ArticleConstants.DEFAULT_TAG, JSON.toJSONString(hotArticleVos));

        }


     /**
     * 计算文章的具体分值
     *
     * @param apArticle:
     * @return Integer
     */
    private Integer computeScore(ApArticle apArticle) {
        Integer scere = 0;
        if (apArticle.getLikes() != null) {
            scere += apArticle.getLikes() * ArticleConstants.HOT_ARTICLE_LIKE_WEIGHT;
        }
        if (apArticle.getViews() != null) {
            scere += apArticle.getViews();
        }
        if (apArticle.getComment() != null) {
            scere += apArticle.getComment() * ArticleConstants.HOT_ARTICLE_COMMENT_WEIGHT;
        }
        if (apArticle.getCollection() != null) {
            scere += apArticle.getCollection() * ArticleConstants.HOT_ARTICLE_COLLECTION_WEIGHT;
        }

        return scere;
    }
}
