package com.heima.article.service.impl;

import com.alibaba.fastjson.JSON;
import com.heima.apis.wemedia.IWemediaClient;
import com.heima.article.mapper.ApArticleMapper;
import com.heima.article.service.HotArticleService;
import com.heima.common.constants.ArticleConstants;
import com.heima.common.redis.CacheService;
import com.heima.model.article.pojos.ApArticle;
import com.heima.model.article.vos.HotArticleVo;
import com.heima.model.common.dtos.ResponseResult;
import com.heima.model.wemedia.pojos.WmChannel;
import lombok.extern.slf4j.Slf4j;
import org.joda.time.DateTime;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.util.ArrayList;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;

@Service
@Slf4j
@Transactional
public class HotArticleServiceImpl implements HotArticleService {
//1.查询所有频道，遍历每个频道，查询每个频道下30条分值较高的文章存到redis里，作为每个频道的热点内容

//2.按照所有的文章进行分值排序，截取前30条，然后按照分值倒序排序，最后存到redis里，作为推荐页数据

    @Autowired
    private ApArticleMapper apArticleMapper;

    /**
     * 计算热点文章
     *///job.ComputeHotArticleJob里的@XxlJob("computeHotArticleJob")定时任务调用computeHotArticle()
    @Override
    public void computeHotArticle() {
        //1.查询前5天的文章数据，minusDays(5)减去5天
        Date dateParam = DateTime.now().minusDays(300).toDate();
        List<ApArticle> apArticleList = apArticleMapper.findArticleListByLast5days(dateParam);

        //2.计算文章的分值，ApArticle里没有文章分值这个字段，所以需要新建一个HotArticleVo对象
        List<HotArticleVo> hotArticleVoList = computeHotArticle(apArticleList);

        //3.有了带分值的文章列表hotArticleVoList之后，为每个频道缓存30条分值较高的文章
        //Tag就是当前某一个频道
        cacheTagToRedis(hotArticleVoList);

    }

    @Autowired
    private IWemediaClient wemediaClient;

    @Autowired
    private CacheService cacheService;

    /**
     * 为每个频道缓存30条分值较高的文章
     * @param hotArticleVoList
     */
    private void cacheTagToRedis(List<HotArticleVo> hotArticleVoList) {
        //每个频道缓存30条分值较高的文章
        log.info("远程接口查询所有频道");
        //需要在leadnews-feign-api里配置@FeignClient("leadnews-wemedia")
        // @GetMapping("/api/v1/channel/list")
        // public ResponseResult getChannels();
        //article服务调用了远程接口，就在article服务的启动类里添加 @EnableFeignClients("com.heima.apis")
        ResponseResult responseResult = wemediaClient.getChannels();
        log.info("远程接口查询所有频道返回结果：{}",responseResult.getData());
        if(responseResult.getCode().equals(200)){//远程接口调用成功
            //获得频道列表(json数据)
            String channelJson = JSON.toJSONString(responseResult.getData());
            //转换成对象
            List<WmChannel> wmChannels = JSON.parseArray(channelJson, WmChannel.class);
            //检索出每个频道的文章
            if(wmChannels != null && wmChannels.size() > 0){
                for (WmChannel wmChannel : wmChannels) {
                    log.info("查询频道{}下的所有带分值的文章",wmChannel.getName());
                    log.info("所有带分值的文章:{}，",hotArticleVoList);
                    log.info("wmChannel.getId:{}",wmChannel.getId());
//                    if (hotArticleVoList != null && hotArticleVoList.size() > 0 && wmChannel != null) {
                        List<HotArticleVo> hotArticleVos = hotArticleVoList.stream()
                                        .peek(x -> System.out.println("带热度的文章x的ChannelId是：" + x.getChannelId())) // 打印当前的 x
                                        .filter(x -> x.getChannelId().equals(wmChannel.getId())).collect(Collectors.toList());
                        //给文章进行排序，取30条分值较高的文章存入redis  key：频道id   value：30条分值较高的文章
                        sortAndCache(hotArticleVos, ArticleConstants.HOT_ARTICLE_FIRST_PAGE + wmChannel.getId());
//                    }
                }
            }
        }
        log.info("推荐文章(不分频道)");
        //设置推荐数据(没有必要检索每个频道的文章)
        //给文章进行排序，取30条分值较高的文章(不分频道)存入redis  key：频道id   value：30条分值较高的文章
        sortAndCache(hotArticleVoList, ArticleConstants.HOT_ARTICLE_FIRST_PAGE+ArticleConstants.DEFAULT_TAG);
    }

    /**
     * 排序文章并且缓存30条分值较高的文章存入redis
     * @param hotArticleVos
     * @param key
     */
    private void sortAndCache(List<HotArticleVo> hotArticleVos, String key) {
        //Comparator.comparing(HotArticleVo::getScore)按照分值排序，reversed()倒序，转成集合        .limit(30) ????
        hotArticleVos = hotArticleVos.stream().sorted(Comparator.comparing(HotArticleVo::getScore).reversed()).collect(Collectors.toList());
        if (hotArticleVos.size() > 30) {
            //截取前30条
            hotArticleVos = hotArticleVos.subList(0, 30);
        }
        cacheService.set(key, JSON.toJSONString(hotArticleVos));
    }

    /**
     * 计算所有文章的分值
     * @param apArticleList
     * @return
     */
    private List<HotArticleVo> computeHotArticle(List<ApArticle> apArticleList) {

        List<HotArticleVo> hotArticleVoList = new ArrayList<>();

        if(apArticleList != null && apArticleList.size() > 0){
            for (ApArticle apArticle : apArticleList) {
                HotArticleVo hot = new HotArticleVo();
                log.info("apArticle是：{}",apArticle);
                BeanUtils.copyProperties(apArticle,hot);
                log.info("hot是：{}",hot);
                Integer score = computeScore(apArticle);
                hot.setScore(score); //设置文章总分值
                log.info("hot多了一个score：{}",hot);
                hotArticleVoList.add(hot);
            }
        }
        return hotArticleVoList;
    }

    /**
     * 计算每个文章的具体分值
     * @param apArticle
     * @return
     */
    private Integer computeScore(ApArticle apArticle) {
        Integer score = 0;
        if(apArticle.getLikes() != null){
            score += apArticle.getLikes() * ArticleConstants.HOT_ARTICLE_LIKE_WEIGHT;
        }
        if(apArticle.getViews() != null){
            score += apArticle.getViews(); //阅读权重是1，不需要乘以任何值(相当于乘以1)
        }
        if(apArticle.getComment() != null){
            score += apArticle.getComment() * ArticleConstants.HOT_ARTICLE_COMMENT_WEIGHT;
        }
        if(apArticle.getCollection() != null){
            score += apArticle.getCollection() * ArticleConstants.HOT_ARTICLE_COLLECTION_WEIGHT;
        }

        return score;
    }
}