package com.heima.article.service.impl;


import cn.hutool.core.bean.BeanUtil;
import com.alibaba.fastjson.JSON;
import com.heima.article.dao.ApArticleConfigDao;
import com.heima.article.dao.ApArticleContentDao;

import com.heima.article.dao.ApArticleDao;
import com.heima.article.service.ApArticleFreemarkerService;
import com.heima.article.service.ApArticleService;
import com.heima.common.constants.ArticleContants;
import com.heima.common.constants.KafkaMessageConstant;
import com.heima.model.article.dto.ArticleDto;
import com.heima.model.article.entity.ApArticle;
import com.heima.model.article.entity.ApArticleConfig;
import com.heima.model.article.entity.ApArticleContent;
import com.heima.model.article.vo.HotArticleVo;
import com.heima.model.common.dtos.ResponseResult;
import com.heima.model.common.enums.AppHttpCodeEnum;
import com.heima.model.search.enntity.SearchArticleVo;
import com.heima.model.user.dto.ArticleHomeDto;
import com.heima.utils.common.SnowflakeIdWorker;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.redis.core.ZSetOperations;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;


import javax.annotation.Resource;
import java.lang.reflect.InvocationTargetException;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;

/**
 * 文章基础信息表(ApArticle)表服务实现类
 *
 * @author makejava
 * @since 2022-04-16 17:58:29
 */
@Service("apArticleService")
public class ApArticleServiceImpl implements ApArticleService {
    @Resource
    private ApArticleDao apArticleDao;

    @Resource
    private ApArticleContentDao apArticleContentDao;

    @Resource
    private ApArticleConfigDao apArticleConfigDao;

    @Resource
    private SnowflakeIdWorker snowflakeIdWorker;

    @Resource
    private ApArticleFreemarkerService apArticleFreemarkerService;

    @Resource
    private KafkaTemplate kafkaTemplate;

    /**
     * 通过ID查询单条数据
     *
     * @param id 主键
     * @return 实例对象
     */
    @Override
    public ApArticle queryById(Long id) {
        return this.apArticleDao.queryById(id);
    }

    /**
     * 分页查询
     *
     * @param apArticle   筛选条件
     * @param pageRequest 分页对象
     * @return 查询结果
     */
    @Override
    public Page<ApArticle> queryByPage(ApArticle apArticle, PageRequest pageRequest) {
        return null;
    }

    /**
     * 新增数据
     *
     * @param apArticle 实例对象
     * @return 实例对象
     */
    @Override
    public ApArticle insert(ApArticle apArticle) {
        this.apArticleDao.insert(apArticle);
        return apArticle;
    }

    /**
     * 修改数据
     *
     * @param apArticle 实例对象
     * @return 实例对象
     */
    @Override
    public ApArticle update(ApArticle apArticle) {
        this.apArticleDao.update(apArticle);
        return this.queryById(apArticle.getId());
    }

    /**
     * 通过主键删除数据
     *
     * @param id 主键
     * @return 是否成功
     */
    @Override
    public boolean deleteById(Long id) {
        return this.apArticleDao.deleteById(id) > 0;
    }

    @Override
    public ResponseResult save(ArticleDto dto) {
        //1.效验参数
        if (dto == null || StringUtils.isBlank(dto.getTitle()) || dto.getNewsId() == null) {
            return ResponseResult.errorResult(AppHttpCodeEnum.PARAM_REQUIRE);
        }
        //2.判断是新增文章数据还是修改文章数据
        ApArticle apArticle = apArticleDao.queryByNewsId(dto.getNewsId());

        ApArticle article = new ApArticle();
        BeanUtils.copyProperties(dto,article);

        //3.如果是新增，插入数据到数据库表
        if (apArticle == null) {
            //3.1.新增文章基本信息表

            article.setId(snowflakeIdWorker.nextId());
            apArticleDao.insert(apArticle);
            //3.2新增文章配置信息表
            ApArticleConfig apArticleConfig = new ApArticleConfig();
            apArticleConfig.setArticleId(article.getId());
            apArticleConfig.setEnable(1);
            apArticleConfig.setIsComment(1);
            apArticleConfig.setIsDelete(0);
            apArticleConfig.setIsForward(1);
            apArticleConfig.setCreatedTime(new Date());
            apArticleConfig.setUpdatedTime(new Date());

            apArticleConfigDao.insert(apArticleConfig);

            //3.3新增文章内部信息表
            ApArticleContent apArticleContent = new ApArticleContent();
            apArticleContent.setArticleId(article.getId());
            apArticleContent.setContent(dto.getContent());
            apArticleContent.setCreatedTime(new Date());
            apArticleContent.setUpdatedTime(new Date());

            apArticleContentDao.insert(apArticleContent);
        } else {
            //4.如果是修改，更新数据库数据
            //4.1修改文章基本信息表数据
            article.setId(apArticle.getId());
            apArticleDao.update(apArticle);

            //4.2修改文章内容信息表数据
            ApArticleContent apArticleContent = new ApArticleContent();
            apArticleContent.setArticleId(article.getId());
            apArticleContent.setContent(dto.getContent());
            apArticleContent.setUpdatedTime(new Date());

            apArticleContentDao.update(apArticleContent);
        }
//使用Freemarker生成静态HTML页面，上传到MinIO
        String url = apArticleFreemarkerService.buildArticle2Minio(article.getId(), dto.getContent());
        if (StringUtils.isEmpty(url)) {
            throw new RuntimeException("生成静态页面出错");
        }
        article.setStaticUrl(url);
        apArticleDao.update(article);

        //文章数据同步导ES索引库 ------>发送文章数据导Kafka <------搜索
        SearchArticleVo searchArticle = new SearchArticleVo();

        kafkaTemplate.send(KafkaMessageConstants.AP_ARTICLE_ES_SYNC_TOPIC, JSON.toJSONString(searchArticleVo));

        return ResponseResult.okResult(article);
    }

    @Override
    public ApArticle queryByNewsId(Long newsId) {

        return null;
    }

    @Override
    public ResponseResult load(ArticleHomeDto dto, short loadTypeMore) {
        //1. 参数校验
        //1.1 每页展示数据条数校验 , 默认 展示10条, 最多不超过50条
        Integer size = dto.getSize();
        if (size == null || size == 0) {
            size = 10;
        }
        dto.setSize(Math.min(size, 50));
        //1.2 时间校验
        if (dto.getMaxBehotTime() == null) {
            dto.setMaxBehotTime(new Date());
        }

        if (dto.getMinBehotTime() == null) {
            dto.setMinBehotTime(new Date());
        }
        //1.3 频道
        if (StringUtils.isEmpty(dto.getTag())) {
            dto.setTag(ArticleContants.DEFAULT_TAG);
        }

        Map<String, Object> queryParam = BeanUtil.beanToMap(dto);
        queryParam.put("loadType", loadTypeMore);

        List<ApArticle> articleList = apArticleDao.load(queryParam);

        return ResponseResult.okResult(articleList);
    }

    @Override
    public ResponseResult loadmore(ArticleHomeDto dto, short loadTypeMore) {
        return null;
    }

    @Override
    public ResponseResult loadnew(ArticleHomeDto dto, short loadTypeNew) {
        return null;
    }

    /**
     *加载APP端文章首页数据
     *
     * @param dto
     * @param loadMore
     * @return
     */
    @Override
    public ResponseResult loadV2(ArticleHomeDto dto, Integer loadMore) {
        //1. 校验参数
        String channelId = StringUtils.isEmpty(dto.getTag()) ? ArticleContants.DEFAULT_TAG : dto.getTag();

        //2. 从ZSET查询分值最高的30条文章数据
        Set<ZSetOperations.TypedTuple<String>> typedTupleSet = stringRedisTemplate.opsForZSet().reverseRangeWithScores(ArticleConstant.HOT_ARTICLE_PREFIX + channelId, 0, 30);

        if (CollectionUtils.isEmpty(typedTupleSet)) {
            return load(dto, loadMore.shortValue());
        }

        //3. 从HASH集合中查询文章详情
        Map<String, Double> map = typedTupleSet.stream().collect(Collectors.toMap(tuple -> tuple.getValue(), tuple -> tuple.getScore()));

        Set articleIds = map.keySet();

        List<String> list = stringRedisTemplate.opsForHash().multiGet(ArticleContants.HOT_ARTICLE_HASH, articleIds);

        //4. 从新封装文章分值, 并排序
        List<HotArticleVo> hotArticleVoList = list.stream().map(str -> {

            HotArticleVo hotArticleVo = JSON.parseObject(str, HotArticleVo.class);
            hotArticleVo.setScore(map.get(hotArticleVo.getId() + "").intValue());

            return hotArticleVo;
        }).sorted((o1, o2) -> o2.getScore() - o1.getScore()).collect(Collectors.toList());

        //5. 封装数据返回
        return ResponseResult.okResult(hotArticleVoList);
    }
}
