package com.heima.article.service.impl;

import cn.hutool.core.bean.BeanUtil;
import com.alibaba.fastjson.JSON;
import com.heima.article.dao.ApArticleConfigDao;
import com.heima.article.dao.ApArticleContentDao;
import com.heima.article.service.ApArticleFreemarkerService;
import com.heima.common.constants.ArticleConstants;
import com.heima.common.constants.KafkaMessageConstants;
import com.heima.model.article.dto.ArticleDto;
import com.heima.model.article.dto.ArticleHomeDto;
import com.heima.model.article.entity.ApArticle;
import com.heima.article.dao.ApArticleDao;
import com.heima.article.service.ApArticleService;
import com.heima.model.article.entity.ApArticleConfig;
import com.heima.model.article.entity.ApArticleContent;
import com.heima.model.article.vo.HotArticleVo;
import com.heima.model.common.dtos.ResponseResult;
import com.heima.model.common.enums.AppHttpCodeEnum;
import com.heima.model.search.vos.SearchArticleVo;
import com.heima.utils.common.SnowflakeIdWorker;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.data.redis.core.ZSetOperations;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;

import javax.annotation.Resource;
import java.util.*;
import java.util.stream.Collectors;

/**
 * APP端文章数据库(ApArticle)表服务实现类
 *
 * @author makejava
 * @since 2022-04-16 16:29:45
 */
@Service("apArticleService")
public class ApArticleServiceImpl implements ApArticleService {
    @Resource
    private ApArticleDao apArticleDao;

    @Resource
    private ApArticleConfigDao apArticleConfigDao;

    @Resource
    private ApArticleContentDao apArticleContentDao;

    @Autowired
    private SnowflakeIdWorker snowflakeIdWorker;

    @Resource
    private ApArticleFreemarkerService apArticleFreemarkerService;

    @Resource
    private KafkaTemplate kafkaTemplate;

    @Resource
    private StringRedisTemplate stringRedisTemplate;

    /**
     * 通过ID查询单条数据
     *
     * @param id 主键
     * @return 实例对象
     */
    @Override
    public ApArticle queryById(Long id) {
        return this.apArticleDao.queryById(id);
    }

    /**
     * 分页查询
     *
     * @param apArticle   筛选条件
     * @param pageRequest 分页对象
     * @return 查询结果
     */
    @Override
    public Page<ApArticle> queryByPage(ApArticle apArticle, PageRequest pageRequest) {
        long total = this.apArticleDao.count(apArticle);
        return new PageImpl<>(this.apArticleDao.queryAllByLimit(apArticle, pageRequest), pageRequest, total);
    }

    /**
     * 新增数据
     *
     * @param apArticle 实例对象
     * @return 实例对象
     */
    @Override
    public ApArticle insert(ApArticle apArticle) {
        this.apArticleDao.insert(apArticle);
        return apArticle;
    }

    /**
     * 修改数据
     *
     * @param apArticle 实例对象
     * @return 实例对象
     */
    @Override
    public ApArticle update(ApArticle apArticle) {
        this.apArticleDao.update(apArticle);
        return this.queryById(apArticle.getId());
    }

    /**
     * 通过主键删除数据
     *
     * @param id 主键
     * @return 是否成功
     */
    @Override
    public boolean deleteById(Long id) {
        return this.apArticleDao.deleteById(id) > 0;
    }

    /**
     * 发布文章
     *
     * @param dto
     */
    @Override
    public ResponseResult save(ArticleDto dto) {
        //将自媒体文章信息数据  --> 保存到APP文章数据库
        //1.参数校验
        if (dto == null || dto.getNewsId() == null) {
            return ResponseResult.errorResult(AppHttpCodeEnum.PARAM_REQUIRE);
        }

        ApArticle apArticle = new ApArticle();
        BeanUtils.copyProperties(dto, apArticle);
        //2.根据文章id查询文章数据库，不存在新增，存在修改
        ApArticle article = apArticleDao.queryByNewsId(dto.getNewsId());
        if (article == null) {
            //3.新增操作
            //3.1保存APP端数据库
            apArticle.setId(snowflakeIdWorker.nextId());
            apArticleDao.insert(apArticle);
            //3.2保存文章配置表
            ApArticleConfig apArticleConfig = new ApArticleConfig();
            apArticleConfig.setArticleId(apArticle.getId());
            apArticleConfig.setEnable(1);
            apArticleConfig.setIsDelete(0);
            apArticleConfig.setIsComment(1);
            apArticleConfig.setIsForward(1);
            apArticleConfig.setCreatedTime(new Date());
            apArticleConfig.setUpdatedTime(new Date());
            apArticleConfigDao.insert(apArticleConfig);
            //3.3保存文章内容信息表
            ApArticleContent apArticleContent = new ApArticleContent();
            apArticleContent.setArticleId(apArticle.getId());
            apArticleContent.setContent(dto.getContent());
            apArticleContent.setCreatedTime(new Date());
            apArticleContent.setUpdatedTime(new Date());
            apArticleContentDao.insert(apArticleContent);
        } else {
            //4.修改操作
            //4.1修改APP端数据库
            apArticle.setId(article.getId());
            apArticleDao.update(apArticle);
            //4.2修改文章内容信息表
            ApArticleContent apArticleContent = new ApArticleContent();
            apArticleContent.setArticleId(apArticle.getId());
            apArticleContent.setContent(dto.getContent());
            apArticleContent.setUpdatedTime(new Date());
            apArticleContentDao.update(apArticleContent);
        }
        //发布成功后，生成静态html页面上传到minio中，并保存url路径
        String url = apArticleFreemarkerService.buildArticle2Minio(apArticle.getId(), dto.getContent());
        if (StringUtils.isEmpty(url)) {
            throw new RuntimeException("生成文章详情静态页面失败");
        }
        //修改ap_article表，回填url路径
        apArticle.setStaticUrl(url);
        apArticleDao.update(apArticle);

        //发送消息给MQ，同步ES索引库
        SearchArticleVo searchArticleVo = new SearchArticleVo();
        BeanUtils.copyProperties(apArticle, searchArticleVo);
        searchArticleVo.setContent(dto.getContent());

        kafkaTemplate.send(KafkaMessageConstants.AP_ARTICLE_ES_SAVE_TOPIC, JSON.toJSONString(searchArticleVo));
        return ResponseResult.okResult("发布成功");
    }

    /**
     * 文章加载
     *
     * @param dto
     * @param loadType
     * @return
     */
    @Override
    public ResponseResult load(ArticleHomeDto dto, Integer loadType) {
        //1. 参数校验
        //1.1 每页展示数据条数校验 , 默认 展示10条, 最多不超过50条
        Integer size = dto.getSize();
        if (size == null || size == 0) {
            size = 10;
        }
        dto.setSize(Math.min(size, 50));
        //1.2时间校验
        if (dto.getMaxBehotTime() == null) {
            dto.setMaxBehotTime(new Date());
        }
        if (dto.getMinBehotTime() == null) {
            dto.setMinBehotTime(new Date());
        }
        //1.3频道,为空查询所有
        if (StringUtils.isEmpty(dto.getTag())) {
            dto.setTag(ArticleConstants.DEFAULT_TAG);
        }

        //2.封装请求参数，并查询文章数据库，
        //查询条件-->文章是上架的且没有删除，因此用到多表联合查询
        Map<String, Object> queryParam = BeanUtil.beanToMap(dto);
        //封装一个查询类型
        queryParam.put("loadType", loadType);
        List<ApArticle> list = apArticleDao.load(queryParam);
        return ResponseResult.okResult(list);
    }

    @Override
    public ApArticle query(Long newsId) {
        return apArticleDao.queryByNewsId(newsId);
    }

    /**
     * @param dto
     * @param loadTypeMore
     * @return
     */
    @Override
    public ResponseResult loadV2(ArticleHomeDto dto, Integer loadTypeMore) {
        //1.参数校验
        String channelId = StringUtils.isEmpty(dto.getTag()) ? ArticleConstants.DEFAULT_TAG : dto.getTag();
        //2.从ZSET集合中查询（文章对应的分值）数据
        Set<ZSetOperations.TypedTuple<String>> typedTuples = stringRedisTemplate.opsForZSet().reverseRangeWithScores(ArticleConstants.HOT_ARTICLE_PREFIX + channelId, 0, 30);
        //如果为空，使用原来的方法
        if (CollectionUtils.isEmpty(typedTuples)) {
            return load(dto, loadTypeMore);
        }

        //3.从HASH集合中查询文章基本数据详情
        //3.1将typedTuples转化为文章id和分值的map集合
        Map<String, Double> map = typedTuples.stream().collect(Collectors.toMap(tuple -> tuple.getValue(), tuple -> tuple.getScore()));

        Set articleIds = map.keySet();
        //3.2所有的文章json集合  --根据文章id
        List<String> list = stringRedisTemplate.opsForHash().multiGet(ArticleConstants.HOT_ARTICLE_HASH, articleIds);

        List<HotArticleVo> hotArticleVoList = list.stream().map(str -> {
            HotArticleVo hotArticleVo = JSON.parseObject(str, HotArticleVo.class);
            hotArticleVo.setScore(map.get(hotArticleVo.getId() + "").intValue());
            return hotArticleVo;
        }).sorted((o1, o2) -> o2.getScore() - o1.getScore()).collect(Collectors.toList());


        //4. 封装数据返回
        return ResponseResult.okResult(hotArticleVoList);

    }
}
