package com.heima.article.service.impl;

import com.alibaba.druid.sql.visitor.functions.Now;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.heima.article.dto.ArticleStreamMessage;
import com.heima.article.entity.ApArticle;
import com.heima.article.entity.ArticleCache;
import com.heima.article.service.IApArticleService;
import com.heima.article.service.IComputeService;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.stereotype.Service;

import java.util.Date;
import java.util.List;
import java.util.concurrent.TimeUnit;

@Service
public class ComputeServiceImpl implements IComputeService {


    @Autowired
    private IApArticleService articleService;

    @Autowired
    private StringRedisTemplate redisTemplate;

    /**
     * 计算热点文章分值
     */
    @Override
    public void compute() {


        System.out.println("热点文章分值开始计算......");

        LambdaQueryWrapper<ApArticle> query = new LambdaQueryWrapper<>();

        Date now = new Date();
        Date end = new Date(now.getYear(), now.getMonth(), now.getDate());
        Date start = new Date(end.getTime() - 5 * 24 * 60 * 60 * 1000);
        query.eq(ApArticle::getIsDelete, false);
        query.eq(ApArticle::getIsDown, false);
        query.lt(ApArticle::getPublishTime, end);
        query.gt(ApArticle::getPublishTime, start);
        List<ApArticle> list = articleService.list(query);
        for (ApArticle article : list) {
            double score = computeScore(article);

            String key = "hot_article_0";

            ArticleCache articleCache = new ArticleCache();
            BeanUtils.copyProperties(article, articleCache);
            String value = JSON.toJSONString(articleCache);
            redisTemplate.opsForZSet().add(key, value, score);

            String channelKey = "hot_article_" + article.getChannelId();
            redisTemplate.opsForZSet().add(channelKey, value, score);


            redisTemplate.expire(key, 23 * 50 * 58, TimeUnit.MINUTES);
            redisTemplate.expire(channelKey, 23 * 50 * 58, TimeUnit.MINUTES);
        }
    }

    /**
     * 更新文章分值
     *
     * @param message
     */
    @Override
    public void update(ArticleStreamMessage message) {
        //根据id查询文章
        ApArticle article = articleService.getById(message.getArticleId());

        //计算本次聚合分值
        double scorePlus = computeScorePlus(message);
        //更新redis中的分值
        String key="hot_article_"+article.getChannelId();
        ArticleCache articleCache=new ArticleCache();
        BeanUtils.copyProperties(article,articleCache);
        String value=JSON.toJSONString(articleCache);
        Double score = redisTemplate.opsForZSet().score(key, value);
        if (score==null){
            //如果不存在,先计算历史分值,在计算增量分值,写入到redis中
            double hisScore = computeScore(article);
         double totalScore= hisScore+scorePlus;
         redisTemplate.opsForZSet().add(key,value,totalScore);

         redisTemplate.opsForZSet().add("hot_article_0",value,totalScore);

         //添加过期时间
            redisTemplate.expire(key, 23 * 60 + 58, TimeUnit.MINUTES);
            redisTemplate.expire("hot_article_0", 23 * 60 + 58, TimeUnit.MINUTES);

            System.out.println("在redis中新增文章数据: 文章内容: " + value + " ,总分值:  " + totalScore);
        }else {
            redisTemplate.opsForZSet().incrementScore("hot_article_0",value,scorePlus);
            System.out.println("文章已经存在与redis中,添加增量分值: " + scorePlus);
        }


        //更新表中数据
        LambdaUpdateWrapper<ApArticle> updateWrapper = new LambdaUpdateWrapper<>();
        updateWrapper.eq(ApArticle::getId,message.getArticleId());
        updateWrapper.setSql("views = views + " + message.getView());
        updateWrapper.setSql("likes = likes + " + message.getLike());
        updateWrapper.setSql("comment = comment + " + message.getComment());
        updateWrapper.setSql("collection = collection + " + message.getCollect());
        articleService.update(updateWrapper);
        System.out.println("更新文章表数据: 阅读量: " + message.getView() + ",点赞量: " + message.getLike() + ",评论量: " + message.getComment()
                + " ,收藏量: " + message.getCollect());
    }


    /**
     * 计算当日的增量分值
     * @param message
     * @return
     */
    private double computeScorePlus(ArticleStreamMessage message) {
        double score = 0;
        score += message.getView() * 1 * 3;
        score += message.getLike() * 3 * 3;
        score += message.getComment() * 5 * 3;
        score += message.getCollect() * 8 * 3;
        return score;
    }

    private double computeScore(ApArticle article) {
        double score = 0;
        if (article.getViews() != null) {
            score += article.getViews() * 1;
        }
        if (article.getLikes() != null) {
            score += article.getLikes() * 3;
        }
        if (article.getComment() != null) {
            score += article.getComment() * 5;
        }
        if (article.getCollection() != null) {
            score += article.getCollection() * 8;
        }
        return score;
    }
}
