package com.heima.article.service.Impl;

import com.alibaba.fastjson.JSON;
import com.heima.article.mapper.ApArticleMapper;
import com.heima.article.service.HotArticleService;
import com.heima.common.constants.ArticleConstants;
import com.heima.common.redis.CacheService;
import com.heima.feign.wemedia.WemediaFeignClient;
import com.heima.model.article.pojos.ApArticle;
import com.heima.model.article.pojos.ApArticleContent;
import com.heima.model.article.vos.HotArticleVo;
import com.heima.model.common.dtos.ResponseResult;
import com.heima.model.wemedia.pojos.WmChannel;
import lombok.extern.slf4j.Slf4j;
import okhttp3.Cache;
import org.joda.time.DateTime;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;

/**
 * @Author ItZheng
 * @Date 2023/6/24 15:24
 * @Version 1.5
 */
@Service
@Slf4j
public class HotArticleServiceImpl implements HotArticleService {
    @Autowired
    private ApArticleMapper apArticleMapper;

    @Autowired
    private WemediaFeignClient wemediaFeignClient;

    @Autowired
    private CacheService cacheService;

    /**
     * 计算热点文章
     */
    @Override
    public void computeHotArticle() {
        //1：获取前五天的文章
        //  获取五天前的Date
        Date selectDate = DateTime.now().minusDays(5).toDate();
        List<ApArticle> apArticleList = apArticleMapper.findArticleListByLast5days(selectDate);

        //2：计算前五天 每个文章的分数, 并封装成 vo集合。
        //  使用Stream流：
        //  apArticleList.stream() 将apArticleList变为stream流
        //  .collect(Collectors.toList()); 将 转为集合
        List<HotArticleVo> hotArticleVoList = apArticleList.stream().map(apArticle -> {
            HotArticleVo hotArticleVo = new HotArticleVo();
            //拷贝属性：
            BeanUtils.copyProperties(apArticle, hotArticleVo);
            //将计算的分数封装给vo
            hotArticleVo.setScore(computeScore(apArticle));
            //将vo返回
            return hotArticleVo;
        }).collect(Collectors.toList());

        //3：根据各个频道。缓存分数最高的30条文章

/**        //获取所有的频道：
 ResponseResult result = wemediaFeignClient.getChannels();
 //校验：
 if (result.getCode().equals(200)) {
 //转为json字符串
 //String jsonString = JSON.toJSONString(result.getData());
 //json字符串转为 WmChannel 集合
 //List<WmChannel> wmChannelList1 = JSON.parseArray(jsonString, WmChannel.class);
 List<WmChannel> wmChannelList = (List<WmChannel>) result.getData();
 //获取频道Id
 List<Integer> channelIds = wmChannelList.stream()
 .map(WmChannel::getId).collect(Collectors.toList());

 //将每个频道的前五条数据 加入缓存
 //遍历：倒序排序，获取每个频道前5条数据
 for (Integer channelId : channelIds) {
 //filter 筛选过滤  获取到当前频道Id相同的 所有数据
 List<HotArticleVo> hotArticleVoByChannelId = hotArticleVoList.stream().filter(hotArticleVo -> {
 return hotArticleVo.getChannelId().equals(channelId);
 }).collect(Collectors.toList());

 //降序：获取前五个：limit(5) 截取 o2-o1降序 o1-o2升序
 List<HotArticleVo> hotArticleVos = hotArticleVoByChannelId.stream().sorted((o1, o2) -> {
 return o2.getScore() - o1.getScore();
 }).limit(5).collect(Collectors.toList());

 //存入缓存：
 cacheService.set(ArticleConstants.HOT_ARTICLE_FIRST_PAGE+channelId,
 JSON.toJSONString(hotArticleVos));
 }

 //将推荐页前五条数据存入缓存：
 //降序：获取前五个：limit(5) 截取
 List<HotArticleVo> hotArticleVos = hotArticleVoList.stream().sorted((o1, o2) -> {
 return o2.getScore() - o1.getScore();
 }).limit(5).collect(Collectors.toList());

 //存入缓存：
 cacheService.set(ArticleConstants.HOT_ARTICLE_FIRST_PAGE+ArticleConstants.DEFAULT_TAG,
 JSON.toJSONString(hotArticleVos));

 }*/
        //  此处可以不用调用远程接口，直接调用 使用上面 apArticle集合 中的 channelId
        //  distinct() 去重   .collect(Collectors.toList()封装集合
        List<Integer> channelIds = apArticleList.stream()
                .map(ApArticle::getChannelId).distinct()
                .collect(Collectors.toList());

        //  遍历： 将对应频道的数据截取 五个 封装到 redis中
        for (Integer channelId : channelIds) {
            //.filter 过滤  获取每个频道的所有数据
            List<HotArticleVo> hotArticleVoByChannelId = hotArticleVoList.stream()
                    .filter(hotArticleVo -> {
                        return hotArticleVo.getChannelId().equals(channelId);
                    }).collect(Collectors.toList());
            //倒序排序，获取前五个：（当前频道的所有数据）
            List<HotArticleVo> hotArticleVos = hotArticleVoByChannelId.stream().sorted(((o1, o2) -> {
                return o2.getScore() - o1.getScore();
            })).limit(5).collect(Collectors.toList());
            //上传至Redis中
            cacheService.set(ArticleConstants.HOT_ARTICLE_FIRST_PAGE + channelId,
                    JSON.toJSONString(hotArticleVos));
        }
        //  处理推荐页
        //  倒序排序，获取前五个：(所有数据)
        List<HotArticleVo> hotArticleVos = hotArticleVoList.stream().sorted(((o1, o2) -> {
            return o2.getScore() - o1.getScore();
        })).limit(5).collect(Collectors.toList());
        //  上传至Redis中
        cacheService.set(ArticleConstants.HOT_ARTICLE_FIRST_PAGE + ArticleConstants.DEFAULT_TAG,
                JSON.toJSONString(hotArticleVos));

    }

    /**
     * 计算每个文章的分值
     * @param apArticle
     * @return
     */
    private Integer computeScore(ApArticle apArticle) {
        Integer scere = 0;

        if (apArticle.getLikes() != null) {
            scere += apArticle.getLikes() * ArticleConstants.HOT_ARTICLE_LIKE_WEIGHT;
        }
        if (apArticle.getViews() != null) {
            scere += apArticle.getViews();
        }
        if (apArticle.getComment() != null) {
            scere += apArticle.getComment() * ArticleConstants.HOT_ARTICLE_COMMENT_WEIGHT;
        }
        if (apArticle.getCollection() != null) {
            scere += apArticle.getCollection() * ArticleConstants.HOT_ARTICLE_COLLECTION_WEIGHT;
        }

        return scere;
    }

}
