package com.heima.article.service.impl;

import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.heima.apis.wemedia.IWemediaClient;
import com.heima.article.mapper.ApArticleMapper;
import com.heima.article.service.HotArticleService;
import com.heima.common.redis.CacheService;
import com.heima.model.article.pojos.ApArticle;
import com.heima.model.common.constants.ArticleConstants;
import com.heima.model.common.constants.BeHaviorConstants;
import com.heima.model.common.dtos.app.LikesBehaviorDto;
import com.heima.model.common.dtos.common.ResponseResult;
import com.heima.model.common.vo.HotApArticleVo;
import com.heima.model.user.pojos.ApUser;
import com.heima.model.wemedia.pojos.WmChannel;
import com.heima.utils.thread.AppThreadLocalUtil;
import org.joda.time.DateTime;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import javax.xml.crypto.Data;
import java.util.*;
import java.util.stream.Collectors;

@Service
public class HotArticleServiceImpl implements HotArticleService {
    @Autowired
    private CacheService cacheService;
    @Autowired
    private ApArticleMapper articleMapper;
    @Autowired
    private IWemediaClient iWemediaClient;
    /**
     * 计算热点文章
     */
    @Override
    public void computeHotArticle() {
        //获取前十五天的所有文章
        Date Last5Days = DateTime.now().minusDays(15).toDate();
        //调用mapper获取前十五天所有的文章信息
        List<ApArticle> Last5daysApArticleList=articleMapper.findArticleListByLast5Days(Last5Days);
        //计算文章的分值
        List<HotApArticleVo> hotArticleVoList=new ArrayList<>();
        for (ApArticle apArticle : Last5daysApArticleList) {
            //给每一篇文章设置初始分值
            Integer score=0;
            HotApArticleVo hotArticleVo=new HotApArticleVo();
            //属性拷贝
            BeanUtils.copyProperties(apArticle,hotArticleVo);
            //判断该文章是否被查看，查看一次加1分
            if (apArticle.getViews()!=null&&apArticle.getViews()>0) {
                score+=apArticle.getViews();
            }
            //判断该文章是否被点赞，点赞一次加3分
            if(apArticle.getLikes()!=null&&apArticle.getLikes()>0){
                score+=apArticle.getLikes()* ArticleConstants.HOT_ARTICLE_LIKE_WEIGHT;
            }
            //判断该文章是否被评论,评论一次加5分
            if(apArticle.getComment()!=null&&apArticle.getComment()>0){
                score+=apArticle.getComment()* ArticleConstants.HOT_ARTICLE_COMMENT_WEIGHT;
            }
            //判断该文章是否被收藏，收藏一次加8分
            if(apArticle.getCollection()!=null&&apArticle.getCollection()>0){
                score+=apArticle.getCollection()* ArticleConstants.HOT_ARTICLE_COLLECTION_WEIGHT;
            }
            hotArticleVo.setScore(Long.valueOf(score));
            hotArticleVoList.add(hotArticleVo);
        }

        //为每个频道选出热点较高的30篇文章,放入redis中，根据频道分类
        ResponseResult channels = iWemediaClient.channels();
        if (channels.getCode() != 200) {
            return;
        }
        //获取自媒体端所有的频道信息
        String jsonString = JSON.toJSONString(channels.getData());
        List<WmChannel> wmChannelList = JSON.parseArray(jsonString, WmChannel.class);
        for (WmChannel wmChannel : wmChannelList) {
            List<HotApArticleVo> collect=hotArticleVoList
                    .stream()
                    .filter(vo -> vo.getChannelId().equals(wmChannel.getId()))
                    .collect(Collectors.toList());
            sortAndCache(ArticleConstants.HOT_ARTICLE_FIRST_PAGE+wmChannel.getId(),collect);

        }
        //设置推荐的文章
        sortAndCache(ArticleConstants.HOT_ARTICLE_FIRST_PAGE+ArticleConstants.DEFAULT_TAG,hotArticleVoList);
    }

    /**
     * 排序截取前三十个文章信息并存入redis
     * @param key
     * @param collect
     */
    private void sortAndCache(String key,List<HotApArticleVo> collect) {
        //将分类好的集合根据成绩进行降序排序，确保成绩最高的排在最前面
        collect=collect.stream()
                .sorted(Comparator.comparing(HotApArticleVo::getScore).reversed())
                .collect(Collectors.toList());
        //如果这个频道的文章数量大于30条，则只要前30条
        if(collect.size()>30){
            collect = collect.subList(0, 30);
        }
        //将排序好的集合根据频道名称放入redis中
        cacheService.set(key,JSON.toJSONString(collect));
    }
}
