package com.heima.article.service.impl;

import com.alibaba.cloud.commons.lang.StringUtils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.heima.article.mapper.ApArticleConfigMapper;
import com.heima.article.mapper.ApArticleContentMapper;
import com.heima.article.mapper.ApArticleMapper;
import com.heima.article.service.ApArticleService;
import com.heima.common.constants.ArticleConstants;
import com.heima.common.redis.CacheService;
import com.heima.model.article.dtos.ArticleDto;
import com.heima.model.article.dtos.ArticleHomeDto;
import com.heima.model.article.pojos.ApArticle;
import com.heima.model.article.pojos.ApArticleConfig;
import com.heima.model.article.pojos.ApArticleContent;
import com.heima.model.article.vo.HotArticleVo;
import com.heima.model.common.dtos.ResponseResult;
import com.heima.model.common.enums.AppHttpCodeEnum;
import com.heima.model.mess.ArticleVisitStreamMess;
import com.heima.model.search.pojo.SearchArticleVo;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;

@Service
@Slf4j
public class ApArticleServiceImpl extends ServiceImpl<ApArticleMapper, ApArticle> implements ApArticleService {

    @Autowired
    private ApArticleMapper apArticleMapper;

    @Autowired
    private ApArticleContentMapper contentMapper;

    @Autowired
    private ApArticleConfigMapper configMapper;

    // 单页最大加载的数字
    private final static short MAX_PAGE_SIZE = 50;

    @Autowired
    private KafkaTemplate<String,String> kafkaTemplate;

    @Autowired
    private CacheService cacheService;
    /**
     * 查询更多的老数据
     *
     * @param dto
     * @return
     */
    @Override
    public ResponseResult load(ArticleHomeDto dto, Short type) {
        //1.校验参数
        Integer size = dto.getSize();
        if(size == null || size == 0){
            size = 10;
        }
        size = Math.min(size,MAX_PAGE_SIZE);
        dto.setSize(size);

        //文章频道校验
        if(StringUtils.isEmpty(dto.getTag())){
            dto.setTag(ArticleConstants.DEFAULT_TAG);
        }

        //时间校验
        if(dto.getMaxBehotTime() == null) dto.setMaxBehotTime(new Date());
        if(dto.getMinBehotTime() == null) dto.setMinBehotTime(new Date());

        //调用mapper查询数据
        List<ApArticle> list = apArticleMapper.list(dto,type);

        //封装结果并返回
        return ResponseResult.okResult(list);
    }

    @Override
    public ResponseResult load2(ArticleHomeDto dto, Short type, boolean firstPage) {
        if(firstPage){
            String json = cacheService.get(ArticleConstants.HOT_ARTICLE_FIRST_PAGE + dto.getTag());
            if(StringUtils.isNotEmpty(json) && !"[]".equals(json)){
                List<HotArticleVo> voList = JSONArray.parseArray(json, HotArticleVo.class);
                return ResponseResult.okResult(voList);
            }
        }
        return load(dto, type);
    }

    /**
     * 处理自媒体段发布过来的文章数据
     * 新增或者修改文章数据
     *
     * @param articleDto
     * @return
     */
    @Override
    @Transactional
    public ResponseResult saveOrUpdateArticle(ArticleDto articleDto) {
        //1. 参数校验
        if (null == articleDto || org.apache.commons.lang.StringUtils.isEmpty(articleDto.getContent())) {
            return ResponseResult.errorResult(AppHttpCodeEnum.PARAM_INVALID);
        }

        //2. 判断是否有id属性 如果没有则走新增  如果有则走修改
        ApArticle apArticle = new ApArticle();
        BeanUtils.copyProperties(articleDto, apArticle);

        if (null == articleDto.getId()) {
            //新增
            apArticleMapper.insert(apArticle);

            ApArticleConfig config = new ApArticleConfig();
            config.setArticleId(apArticle.getId());
            config.setIsComment(Boolean.TRUE);
            config.setIsDown(Boolean.FALSE);
            config.setIsDelete(Boolean.FALSE);
            config.setIsForward(Boolean.TRUE);
            configMapper.insert(config);

            ApArticleContent content = new ApArticleContent();
            content.setArticleId(apArticle.getId());
            content.setContent(articleDto.getContent());
            contentMapper.insert(content);
        } else {
            //修改
            apArticleMapper.updateById(apArticle); //修改文章基本数据

            ApArticleContent content = contentMapper.selectOne(Wrappers.<ApArticleContent>lambdaQuery()
                    .eq(ApArticleContent::getArticleId, apArticle.getId()));
            content.setContent(articleDto.getContent());

            contentMapper.updateById(content);
        }

        //同步完成以后使用kafka发送消息
        createIndex(apArticle, articleDto.getContent(), "http://www.baidu.com");

        //3. 返回文章的id
        return ResponseResult.okResult(apArticle.getId());
    }

    /**
     * 文章数据的时时更新
     *
     * @param mess
     */
    @Override
    public void updateArticle(ArticleVisitStreamMess mess) {
        //1. 更新数据库的数据
        ApArticle apArticle = updateDB(mess);

        //2. 重新计算得分 当日得分需要*3
        Integer score = computeHotArticleScore(apArticle);
        score *= 3;

        //3. 替换对应频道的redis数据
        replaceDataToRedis(apArticle, score, ArticleConstants.HOT_ARTICLE_FIRST_PAGE + apArticle.getChannelId());

        //4. 替换推荐的redis的数据
        replaceDataToRedis(apArticle, score, ArticleConstants.HOT_ARTICLE_FIRST_PAGE + ArticleConstants.DEFAULT_TAG);
    }

    /**
     * 替换redis中的数据
     * @param apArticle
     * @param score
     * @param key
     */
    private void replaceDataToRedis(ApArticle apArticle, Integer score, String key) {
        String json = cacheService.get(key);
        if(StringUtils.isNotEmpty(json)){
            //缓存的文章数据
            List<HotArticleVo> voList = JSONArray.parseArray(json, HotArticleVo.class);
            //判断文章是否已经被缓存了 如果已经被缓存则更新得分即可
            boolean flag = false;
            for (HotArticleVo hotArticleVo : voList) {
                if(hotArticleVo.getId().equals(apArticle.getId())){
                    hotArticleVo.setScore(score);
                    flag = true;
                    break;
                }
            }

            if(!flag){
                //判断文章的数量是否已经大于等于30  如果不大于直接放进去 如果大于 则需要找到得分最小的 然后比较并替换
                HotArticleVo vo = new HotArticleVo();
                BeanUtils.copyProperties(apArticle, vo);
                vo.setScore(score);

                if(voList.size() < 30){
                    voList.add(vo);
                }else{
                    //如果大于 则需要找到得分最小的 然后比较并替换
                    voList = voList.stream().sorted(Comparator.comparing(HotArticleVo::getScore).reversed()).collect(Collectors.toList());
                    HotArticleVo lost = voList.get(voList.size() - 1);//得分最小的
                    if(lost.getScore() < score){
                        voList.remove(lost);
                        voList.add(vo);
                    }
                }
            }

            voList = voList.stream().sorted(Comparator.comparing(HotArticleVo::getScore).reversed()).collect(Collectors.toList());
            cacheService.set(key, JSON.toJSONString(voList));
        }
    }

    private Integer computeHotArticleScore(ApArticle apArticle) {
        Integer score = 0;
        if(null != apArticle.getViews()){
            score += apArticle.getViews();
        }
        if(null != apArticle.getLikes()){
            score += apArticle.getLikes() * ArticleConstants.HOT_ARTICLE_LIKE_WEIGHT;
        }
        if(null != apArticle.getComment()){
            score += apArticle.getComment() * ArticleConstants.HOT_ARTICLE_COMMENT_WEIGHT;
        }
        if(null != apArticle.getCollection()){
            score += apArticle.getCollection() * ArticleConstants.HOT_ARTICLE_COLLECTION_WEIGHT;
        }

        return score;
    }

    /**
     * 根据文章的数据到数据库
     * @param mess
     * @return
     */
    private ApArticle updateDB(ArticleVisitStreamMess mess) {
        ApArticle apArticle = getById(mess.getArticleId());
        apArticle.setCollection(apArticle.getCollection() == null ? mess.getCollect():apArticle.getCollection() + mess.getCollect());
        apArticle.setLikes(apArticle.getLikes() == null ? mess.getLike():apArticle.getLikes() + mess.getLike());
        apArticle.setComment(apArticle.getComment() == null ? mess.getComment() : apArticle.getComment() + mess.getComment());
        apArticle.setViews(apArticle.getViews() == null ? mess.getView() : apArticle.getViews() + mess.getView());
        updateById(apArticle);
        return apArticle;
    }


    private void createIndex(ApArticle apArticle, String content, String url) {
        SearchArticleVo vo = new SearchArticleVo();
        BeanUtils.copyProperties(apArticle, vo);
        vo.setContent(content);
        vo.setStaticUrl(url);

        kafkaTemplate.send(ArticleConstants.ARTICLE_ES_SYNC_TOPIC, JSON.toJSONString(vo));
    }

}
