package com.heima.article.service.impl;

import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.heima.article.mapper.ApArticleConfigMapper;
import com.heima.article.mapper.ApArticleContentMapper;
import com.heima.article.mapper.ApArticleMapper;
import com.heima.article.service.ApArticleService;
import com.heima.common.constants.ArticleConstants;
import com.heima.common.exception.CustomException;
import com.heima.common.redis.CacheService;
import com.heima.model.article.dtos.ArticleDto;
import com.heima.model.article.dtos.ArticleHomeDto;
import com.heima.model.article.pojos.ApArticle;
import com.heima.model.article.pojos.ApArticleConfig;
import com.heima.model.article.pojos.ApArticleContent;
import com.heima.model.article.vos.HotArticleVo;
import com.heima.model.common.dtos.ResponseResult;
import com.heima.model.common.enums.AppHttpCodeEnum;
import com.heima.model.mess.ArticleVisitStreamMess;
import com.heima.model.search.vos.SearchArticleVo;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;

@Service
@Slf4j
public class ApArticleServiceImpl extends ServiceImpl<ApArticleMapper,ApArticle> implements ApArticleService {

    @Autowired
    private ApArticleMapper apArticleMapper;

    @Autowired
    private ApArticleConfigMapper configMapper;

    @Autowired
    private ApArticleContentMapper contentMapper;

    @Autowired
    private KafkaTemplate<String,String> kafkaTemplate;

    @Autowired
    private CacheService cacheService;

    /**
     * 根据参数加载文章列表
     *
     * @param loadtype 1为加载最新 2为加载更多
     * @param dto
     * @return
     */
    @Override
    public ResponseResult load(Short loadtype, ArticleHomeDto dto) {
        //1. 对数据进行校验
        if(null == loadtype){
            loadtype = 1;
        }

        if(null == dto.getSize() || dto.getSize() == 0){
            dto.setSize(10);
        }

        dto.setSize(Math.min(dto.getSize(), 30));

        if(StringUtils.isEmpty(dto.getTag())){
            dto.setTag("__all__");
        }

        if(null == dto.getMinBehotTime() && dto.getMaxBehotTime() == null){
            dto.setMinBehotTime(new Date(0));
            dto.setMaxBehotTime(new Date());
        }

        //2. 调用mamaper查询数据
        List<ApArticle> list = apArticleMapper.load(loadtype, dto);

        return ResponseResult.okResult(list);
    }

    /**
     * 加载文章列表
     *
     * @param dto
     * @param type      1 加载更多   2 加载最新
     * @param firstPage true 是首页  false 不是首页
     * @return
     */
    @Override
    public ResponseResult load2(ArticleHomeDto dto, Short type, boolean firstPage) {
        if(firstPage){
            String json = cacheService.get(ArticleConstants.HOT_ARTICLE_FIRST_PAGE + dto.getTag());
            if(StringUtils.isNotEmpty(json)){
                List<HotArticleVo> voList = JSON.parseArray(json, HotArticleVo.class);
                return ResponseResult.okResult(voList);
            }
        }
        return load(type, dto);
    }

    /**
     * 保存app端相关文章
     *
     * @param dto
     * @return
     */
    @Override
    public ResponseResult saveArticle(ArticleDto dto) {
        //1. 校验参数
        if(null == dto || StringUtils.isEmpty(dto.getContent())){
            throw new CustomException(AppHttpCodeEnum.PARAM_REQUIRE);
        }

        //2. 判断文章的id是否存在 如果存在则修改 如果不存在则新增
        ApArticle apArticle = new ApArticle();
        BeanUtils.copyProperties(dto, apArticle);

        if(null == dto.getId()){
            //新增
            save(apArticle);

            ApArticleConfig config = new ApArticleConfig();
            config.setArticleId(apArticle.getId());
            config.setIsDown(false);
            config.setIsComment(true);
            config.setIsDelete(false);
            config.setIsForward(true);

            configMapper.insert(config);

            ApArticleContent articleContent = new ApArticleContent();
            articleContent.setContent(dto.getContent());
            articleContent.setArticleId(apArticle.getId());

            contentMapper.insert(articleContent);

        }else{
            //修改
            updateById(apArticle);

            ApArticleContent articleContent = contentMapper.selectOne(Wrappers.<ApArticleContent>lambdaQuery().eq(ApArticleContent::getArticleId, dto.getId()));
            articleContent.setContent(dto.getContent());

            contentMapper.updateById(articleContent);

        }

        //发送消息到kafka - 缺少了静态地址
        transrportMsg(apArticle, dto.getContent());

        //3. 返回文章的id
        return ResponseResult.okResult(apArticle.getId());
    }

    /**
     * 更新文章的分值  同时更新缓存中的热点文章数据
     *
     * @param mess
     */
    @Override
    public void updateScore(ArticleVisitStreamMess mess) {
        //1. 更新数据库
        ApArticle apArticle = updateArticle(mess);
        //2. 重新计算得分-当日得分要*3
        Integer score = compute(apArticle);
        score *= 3;

        //3. 更新当前文章平道的redis数据
        replaceDataToRedis(apArticle, score,
                ArticleConstants.HOT_ARTICLE_FIRST_PAGE + apArticle.getChannelId());

        //4. 更新推荐数据的redis数据
        replaceDataToRedis(apArticle, score,ArticleConstants.HOT_ARTICLE_FIRST_PAGE
                + ArticleConstants.DEFAULT_TAG);
    }

    private void replaceDataToRedis(ApArticle apArticle, Integer score, String key) {
        String json = cacheService.get(key);
        //表示当前的频道中有数据
        if(StringUtils.isNotEmpty(json)){
            List<HotArticleVo> channelData = JSON.parseArray(json, HotArticleVo.class);

            //当前更新的文章已经再缓存中了 则更新当前文章的得分即可
            for (HotArticleVo vo : channelData) {
                if(vo.getId().equals(apArticle.getId())){
                    vo.setScore(score);

                    channelData = channelData.stream()
                            .sorted(Comparator.comparing(HotArticleVo::getScore)
                                    .reversed()).collect(Collectors.toList());

                    cacheService.set(key, JSON.toJSONString(channelData));
                    return;
                }
            }

            //如果不在缓存中 则找到缓冲中得分最小的元素跟 score进行比较 如果score大于最小得分则替换
            HotArticleVo vo = new HotArticleVo();
            BeanUtils.copyProperties(apArticle, vo);
            vo.setScore(score);
            channelData.add(vo);

            channelData = channelData.stream()
                    .sorted(Comparator.comparing(HotArticleVo::getScore)
                            .reversed()).collect(Collectors.toList());

            if(channelData.size() > 30){
                channelData = channelData.stream().limit(30).collect(Collectors.toList());
            }
            cacheService.set(key, JSON.toJSONString(channelData));

        }else{
            //没有当前频道的数据 - 直接添加即可
            List<HotArticleVo> channelData = new ArrayList<>();
            HotArticleVo vo = new HotArticleVo();
            BeanUtils.copyProperties(apArticle, vo);
            vo.setScore(score);

            cacheService.set(key, JSON.toJSONString(channelData));
        }
    }

    // 得分具体的算法
    private Integer compute(ApArticle apArticle) {
        Integer score = 0;

        if(apArticle.getLikes() != null){
            score += apArticle.getLikes()* ArticleConstants.HOT_ARTICLE_LIKE_WEIGHT;
        }

        if(apArticle.getViews() != null){
            score += apArticle.getViews();
        }

        if(apArticle.getComment() != null){
            score += apArticle.getComment()* ArticleConstants.HOT_ARTICLE_COMMENT_WEIGHT;
        }

        if(apArticle.getCollection() != null){
            score += apArticle.getCollection()* ArticleConstants.HOT_ARTICLE_COLLECTION_WEIGHT;
        }

        return score;
    }

    private ApArticle updateArticle(ArticleVisitStreamMess mess) {
        ApArticle apArticle = apArticleMapper.selectById(mess.getArticleId());

        //累加上当前统计的结果
        apArticle.setCollection((apArticle.getCollection() == null? 0:apArticle.getCollection())
                + mess.getCollect());
        apArticle.setComment((apArticle.getComment() == null ? 0:apArticle.getComment())
                + mess.getComment());
        apArticle.setLikes((apArticle.getLikes()==null?0:apArticle.getLikes())+mess.getLike());
        apArticle.setViews((apArticle.getViews()==null?0:apArticle.getViews())+mess.getView());

        apArticleMapper.updateById(apArticle);

        return apArticle;
    }

    private void transrportMsg(ApArticle apArticle, String content) {
        SearchArticleVo vo = new SearchArticleVo();
        BeanUtils.copyProperties(apArticle, vo);
        vo.setContent(content);
        vo.setStaticUrl("http://www.baidu.com");

        kafkaTemplate.send(ArticleConstants.ARTICLE_ES_SYNC_TOPIC, JSON.toJSONString(vo));
    }
}
