package com.blog.service.impl;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.blog.constant.MsgConstant;
import com.blog.constant.RedisConstant;
import com.blog.mapper.ArticleMapper;
import com.blog.pojo.doc.ArticleDoc;
import com.blog.pojo.dto.ArticleDTO;
import com.blog.pojo.entity.Article;
import com.blog.service.ArticleService;
import com.blog.util.ESUtil;
import lombok.extern.slf4j.Slf4j;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

@Service
@Slf4j
@Transactional(rollbackFor = Exception.class) // 添加事务回滚注解，当遇到错误时，会回滚事务，确保数据的完整
public class ArticleServiceImpl extends ServiceImpl<ArticleMapper, Article> implements ArticleService {

    @Autowired
    private StringRedisTemplate stringRedisTemplate;
    @Autowired
    private ThreadPoolTaskExecutor threadPoolTaskExecutor;


    /**
     * 文章分页查询
     *
     * @param pageNum
     * @param pageSize
     * @param title
     * @param categoryId
     * @param status
     * @return
     */
    @Override
    @Transactional(readOnly = true) // 添加只读事务管理，不会进行数据锁，可以解放部分资源
    public IPage<Article> pageQuery(Integer pageNum, Integer pageSize,
                                    String title, Long categoryId, Long status) {
        // 判断是否有缓存
        Boolean hasKey = stringRedisTemplate.hasKey(RedisConstant.ARTICLE_REDIS_KEY);
        if (hasKey) {
            // 调用方法，使用ES进行条件查询出文章id，通过Redis缓存获取数据
            IPage<Article> articleIPage = null;
            try {
                articleIPage = searchArticlePageAndCache(pageNum, pageSize, title, categoryId, status);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
            if (articleIPage != null) {
                return articleIPage;
            }
        }
        // 没有缓存，从数据库中获取
        try {
            // 1. 创建分页对象，给予分页的条件
            Page<Article> page = new Page<>(pageNum, pageSize);
            // 2. 构建wrapper，添加查询的条件
            LambdaQueryWrapper<Article> wrapper = new LambdaQueryWrapper<>();
            // 3. 添加模糊查询的条件
            if (StrUtil.isNotBlank(title)) {
                wrapper.like(Article::getTitle, title);
            }
            // 4. 添加分类id的查询条件
            if (categoryId != null) {
                wrapper.eq(Article::getCategoryId, categoryId);
            }
            // 5. 添加发布状态的查询条件
            if (status != null) {
                wrapper.eq(Article::getStatus, status);
            }
            // 6. 添加查询到的数据的排列条件（根据创建时间排序）
            wrapper.orderByDesc(Article::getCreateTime);
            // 7. 进行分页查询
            Page<Article> pageArticle = this.page(page, wrapper);
            // 8. 开启新线程调用缓存方法，进行文章缓存（Hash）
            threadPoolTaskExecutor.submit(() -> addArticleToRedis());
            log.info("MP中查询到的total：{}", pageArticle.getTotal());
            // 9. 返回数据
            return pageArticle;
        } catch (Exception e) {
            log.info("文章查询失败:", e);
            throw new RuntimeException(MsgConstant.ARTICLE_SELECT_ERROR);
        }
    }

    /**
     * 高亮搜索版分页查询
     *
     * @param pageNum
     * @param pageSize
     * @param searchContent
     * @return
     */
    @Override
    public IPage<Article> searchArticleHighLightPage(Integer pageNum, Integer pageSize, Long status, String searchContent) {
        log.info("开始进行ES高亮分页查询");
        // 1. 使用ES进行条件查询出id
        // 1.1 获取ES客户端
        RestHighLevelClient client = ESUtil.getClient();
        // 1.2 准备Request
        SearchRequest request = new SearchRequest("article");
        // 创建BoolQueryBuilder对象
        BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
//        boolQuery.must(QueryBuilders.matchQuery("title", searchContent));
//        boolQuery.must(QueryBuilders.matchQuery("description", searchContent));
        boolQuery.must(QueryBuilders.multiMatchQuery(searchContent, "title", "description"));
        if (status != null) {
            boolQuery.must(QueryBuilders.termQuery("status", status));
        }
        // 1.3 组织DSL参数
        request.source().query(boolQuery);
        // 1.4 设置分页参数
        request.source().from((pageNum - 1) * pageSize).size(pageSize);
        // 1.5 设置排序字段
        request.source().sort("createTime", SortOrder.DESC);
        // 1.7 设置高亮字段
        request.source().highlighter(
                SearchSourceBuilder.highlight()
                        .field("title")
                        .field("description")
                        .preTags("<em>")
                        .postTags("</em>"));
        // 1.8 执行查询
        List<Article> articleList = null;
        long total = 0L;
        try {
            SearchResponse response = client.search(request, RequestOptions.DEFAULT);
            // 获取一级数据层
            SearchHits searchHits = response.getHits();
            total = searchHits.getTotalHits().value;
            log.info("ES中高亮查询到的total：{}", total);
            // 获取二级数据层，真正包含需要的数据
            SearchHit[] hits = searchHits.getHits();
            // 使用stream流把数据一个个转换成需要的Long集合
            articleList = Arrays.stream(hits).map(hit -> {
                // 数据在Source中，是JSON类型的，先转换成Doc再转换成实体对象
                ArticleDoc articleDoc = JSONUtil.toBean(hit.getSourceAsString(), ArticleDoc.class);
                Article article = BeanUtil.copyProperties(articleDoc, Article.class);
                // 获取高亮字段数据
                Map<String, HighlightField> highlightFields = hit.getHighlightFields();
                if (highlightFields.containsKey("title")) {
                    article.setTitle(highlightFields.get("title").getFragments()[0].toString());
                }
                if (highlightFields.containsKey("description")) {
                    article.setDescription(highlightFields.get("description").getFragments()[0].toString());
                }
                // 根据key获取value，转换成String再转换成Long
                return article;
            }).collect(Collectors.toList());
            log.info("ES高亮查询文章成功:{}", articleList);
        } catch (IOException e) {
            log.error("ES查询文章失败：", e);
            return null;
        } finally {
            try {
                client.close();
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
        //  构建Page对象用于返回
        IPage iPage = new Page();
        iPage.setRecords(articleList);
        iPage.setTotal(total);
        return iPage;
    }

    /**
     * 添加文章
     *
     * @param articleDTO
     */
    @Override
    public void addArticle(ArticleDTO articleDTO) {
        // 1. 先将DTO参数实体转 换成 Article实体
        Article article = BeanUtil.copyProperties(articleDTO, Article.class);
        // 2. 设置创建时间和修改时间
        article.insertTime();
        try {
            // 3. 进行保存
            this.save(article);
            // 4. 删除缓存
            stringRedisTemplate.delete(RedisConstant.ARTICLE_REDIS_KEY);
            // 5. 保存新数据到ES中
            threadPoolTaskExecutor.submit(() -> {
                addESArticle();
            });
        } catch (Exception e) {
            log.info("文章添加失败：", e);
            throw new RuntimeException(MsgConstant.ARTICLE_ADD_ERROR);
        }
    }

    /**
     * 批量删除文章
     *
     * @param ids 要删除的文章ID列表
     */
    @Override
    public void deleteBatchArticle(List<Long> ids) {
        // 1. 先检查要删除的文章中是否有不存在的文章
        LambdaQueryWrapper<Article> wrapper = new LambdaQueryWrapper<>();
        wrapper.in(Article::getId, ids);
        if (this.count(wrapper) != ids.size()) {
            throw new RuntimeException(MsgConstant.ARTICLE_ADD_ERROR);
        }
        RestHighLevelClient client = ESUtil.getClient();
        // 2. 进行批量删除
        try {
            this.removeBatchByIds(ids);
            // 3. 删除缓存
            stringRedisTemplate.delete(RedisConstant.ARTICLE_REDIS_KEY);
            // 4. 删除ES中的数据
            BulkRequest request = new BulkRequest();
            for (Long id : ids) {
                request.add(new DeleteRequest("article").id(id.toString()));
            }
            client.bulk(request, RequestOptions.DEFAULT);
        } catch (Exception e) {
            log.info("批量删除失败：", e);
            throw new RuntimeException(MsgConstant.ARTICLE_DELETE_ERROR);
        } finally {
            try {
                client.close();
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    }

    /**
     * 删除单个文章
     *
     * @param id
     */
    @Override
    public void deleteOneArticle(Long id) {
        // 1. 先检查要删除的文章是否存在于数据库中
        LambdaQueryWrapper<Article> wrapper = new LambdaQueryWrapper<>();
        wrapper.eq(Article::getId, id);
        if (this.count(wrapper) != 1) {
            throw new RuntimeException(MsgConstant.ARTICLE_DELETE_ERROR);
        }
        RestHighLevelClient client = ESUtil.getClient();
        // 2. 进行删除
        try {
            this.removeById(id);
            // 3. 删除缓存
            stringRedisTemplate.delete(RedisConstant.ARTICLE_REDIS_KEY);
            // 4. 删除ES中的数据
            DeleteRequest deleteRequest = new DeleteRequest("article", id.toString());
            client.delete(deleteRequest, RequestOptions.DEFAULT);
        } catch (Exception e) {
            log.info("文章删除失败: ", e);
            throw new RuntimeException(MsgConstant.ARTICLE_DELETE_ERROR);
        } finally {
            try {
                client.close();
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    }

    /**
     * 修改文章
     *
     * @param articleDTO
     */
    @Override
    public void updateArticle(ArticleDTO articleDTO) {
        // 1. 先检查要修改的文章存不存在
        LambdaQueryWrapper<Article> wrapper = new LambdaQueryWrapper<>();
        wrapper.eq(Article::getId, articleDTO.getId());
        if (this.count(wrapper) != 1) {
            throw new RuntimeException(MsgConstant.ARTICLE_UPDATE_ERROR);
        }

        // 2. 将DTO转换成Article实体
        Article article = BeanUtil.copyProperties(articleDTO, Article.class);

        article.updateTime();
        // 3. 进行修改
        try {
            this.updateById(article);
            // 4. 删除缓存
            stringRedisTemplate.delete(RedisConstant.ARTICLE_REDIS_KEY);
            // 5. 修改ES中的数据
            threadPoolTaskExecutor.submit(() -> {
                addESArticle();
            });
        } catch (Exception e) {
            log.info("修改文章失败：", e);
            throw new RuntimeException(MsgConstant.ARTICLE_UPDATE_ERROR);
        }
    }

    /**
     * 根据id查询单个文章的信息
     *
     * @param id
     * @return
     */
    @Override
    public Article getArticleDetail(Long id) {
        Object o = stringRedisTemplate.opsForHash().get(RedisConstant.ARTICLE_REDIS_KEY, id.toString());
        // 1. 判断是否有缓存
//        Boolean hasKey = stringRedisTemplate.hasKey(RedisConstant.ARTICLE_REDIS_KEY);
        if (o != null) {
            // 1.1 有缓存，从缓存中获取数据返回
            String aticleJson = o.toString();
            // 转换成实体并返回
            return JSONUtil.toBean(aticleJson, Article.class);
        } else {    // 没有缓存，从数据库中获取
            // 先进行一个缓存击穿策略，如果redis中的String中存在数据，说明是恶意的废物请求，获取废物数据返回
            String junkData = stringRedisTemplate.opsForValue()
                    .get(RedisConstant.ARTICLE_REDIS_ONE_KEY + id.toString());
            if(junkData != null){
                // 存在垃圾数据，抛出异常
                throw new RuntimeException(MsgConstant.ARTICLE_NO_EXISTS);
            }
            // 2. 先检查该文章是否存在于数据库
            LambdaQueryWrapper<Article> wrapper = new LambdaQueryWrapper<>();
            wrapper.eq(Article::getId, id);
            if (this.count(wrapper) != 1) {
                // 不存在当前id的文章，进行垃圾数据缓存
                stringRedisTemplate.opsForValue()
                        .set(RedisConstant.ARTICLE_REDIS_ONE_KEY + id.toString(), "junkData",
                                RedisConstant.ARTICLE_REDIS_TTL, TimeUnit.MINUTES);
                throw new RuntimeException(MsgConstant.ARTICLE_NO_EXISTS);
            }
            // 3. 开启新的线程，存储文章缓存
            threadPoolTaskExecutor.submit(() -> addArticleToRedis());
            // 4. 获取文章返回
            return this.getById(id);
        }
    }

    /**
     * 获取文章数量
     *
     * @return
     */
    @Override
    public Long getArticleCount() {
        long count = 0;
        try {
            count = this.count();
            return count;
        } catch (Exception e) {
            log.error("获取文章数量失败：", e);
            throw new RuntimeException(MsgConstant.ARTICLE_SELECT_ERROR);
        }
    }

    /**
     * 获取所有文章
     *
     * @return
     */
    @Override
    public List<Article> getArticleAll() {
        // 1. 判断是否有缓存
        Boolean hasKey = stringRedisTemplate.hasKey(RedisConstant.ARTICLE_REDIS_KEY);
        if (hasKey) {
            // 2. 存在缓存，进行缓存查询
            Map<Object, Object> objectMap = stringRedisTemplate.opsForHash().entries(RedisConstant.ARTICLE_REDIS_KEY);
            // 3. 先将Map转换成set，用于构建stream，再将map的value进行转换article收集成List
            List<Article> articleList = objectMap.entrySet().stream()
                    .map(entry -> JSONUtil.toBean(entry.getValue().toString(), Article.class))
                    .collect(Collectors.toList());
            return articleList;
        } else {
            return this.list();
        }
    }


    /**
     * 使用ES进行条件分页查询，根据id获取缓存
     *
     * @param pageNum
     * @param pageSize
     * @param title
     * @param categoryId
     * @param status
     * @return
     */
    private IPage<Article> searchArticlePageAndCache(Integer pageNum, Integer pageSize,
                                                     String title, Long categoryId,
                                                     Long status) throws IOException {
        log.info("开始进行ES分页查询");
        // 1. 使用ES进行条件查询出id
        // 1.1 获取ES客户端
        RestHighLevelClient client = ESUtil.getClient();
        // 1.2 准备Request
        SearchRequest request = new SearchRequest("article");
        // 创建BoolQueryBuilder对象
        BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
        // 动态添加条件（避免null值）
        if (StrUtil.isNotBlank(title)) {
            boolQuery.must(QueryBuilders.matchQuery("title", title));
        }
        if (categoryId != null) {
            boolQuery.must(QueryBuilders.termQuery("categoryId", categoryId));
        }
        if (status != null) {
            boolQuery.must(QueryBuilders.termQuery("status", status));
        }
        // 1.3 组织DSL参数
        request.source().query(boolQuery);
        // 1.4 设置分页参数
        request.source().from((pageNum - 1) * pageSize).size(pageSize);
        // 1.5 设置排序字段
        request.source().sort("createTime", SortOrder.DESC);
        // 1.6 执行查询
        List<Long> ids = null;
        long total = 0L;
        try {
            SearchResponse response = client.search(request, RequestOptions.DEFAULT);
            // 获取一级数据层
            SearchHits searchHits = response.getHits();
            total = searchHits.getTotalHits().value;
            log.info("ES中查询到的total：{}", total);
            // 获取二级数据层，真正包含需要的数据
            SearchHit[] hits = searchHits.getHits();
            // 使用stream流把数据一个个转换成需要的Long集合
            ids = Arrays.stream(hits).map(hit -> {
                // 数据在Source中，是JSON类型的，获取成Map
                Map<String, Object> sourceAsMap = hit.getSourceAsMap();
                // 根据key获取value，转换成String再转换成Long
                return Long.parseLong(sourceAsMap.get("id").toString());
            }).collect(Collectors.toList());
            log.info("ES查询文章成功:{}", ids);
        } catch (IOException e) {
            log.error("ES查询文章失败：", e);
            return null;
        } finally {
            client.close();
        }
        // 2. 判断ids是否有数据
        if (ids == null || ids.isEmpty()) {
            log.warn("未查询到符合条件的文档");
            return null;
        }
        // 3.根据id从Redis中获取数据
        List<Object> objectList = stringRedisTemplate.opsForHash().multiGet(
                RedisConstant.ARTICLE_REDIS_KEY,
                ids.stream().map(id -> id.toString()).collect(Collectors.toList()));
        // 4.判断是否成功获取数据
        if (objectList == null || objectList.isEmpty()) {
            return null;
        }
        // 5.将数据转换为Article对象
        List<Article> articleList = objectList.stream().filter(article -> article != null)
                .map(article -> JSONUtil.toBean(article.toString(), Article.class))
                .collect(Collectors.toList());
        // 6. 构建Page对象用于返回
        IPage iPage = new Page();
        iPage.setRecords(articleList);
        iPage.setTotal(total);
        return iPage;
    }

    /**
     * 从数据库中查询所有文章数据，并缓存进Redis中
     */
    private void addArticleToRedis() {
        // 1. 查询数据库中的所有文章
        List<Article> articles = this.list();
        // 2. 构建成Map
        Map<String, String> articleMap = articles.stream().collect(Collectors.toMap(
                article -> article.getId().toString(),
                article -> JSONUtil.toJsonStr(article)
        ));
        // 3. 缓存进Redis中
        stringRedisTemplate.opsForHash().putAll(RedisConstant.ARTICLE_REDIS_KEY, articleMap);
        // 4. 设置过期时间
        stringRedisTemplate.expire(RedisConstant.ARTICLE_REDIS_KEY, RedisConstant.ARTICLE_REDIS_TTL, TimeUnit.MINUTES);
    }

    /**
     * 添加ES文章数据
     */
    public void addESArticle() {
        RestHighLevelClient client = ESUtil.getClient();
        try {
            // 删除现有数据
            DeleteByQueryRequest deleteRequest = new DeleteByQueryRequest("article");
            deleteRequest.setQuery(new MatchAllQueryBuilder());
            client.deleteByQuery(deleteRequest, RequestOptions.DEFAULT);
            // 1. 查询数据库，获取数据
            List<Article> articleAll = this.getArticleAll();
            // 2. 构建bulk请求
            BulkRequest request = new BulkRequest();
            for (Article article : articleAll) {
                // 3. 把article数据转换成ES对应的Doc数据
                ArticleDoc articleDoc = BeanUtil.copyProperties(article, ArticleDoc.class);
                // 4. 把每个articleDoc数据添加进批量新增的请求
                request.add(
                        new IndexRequest("article")
                                .id(article.getId().toString())
                                .source(JSONUtil.toJsonStr(articleDoc), XContentType.JSON)
                );
            }
            // 5. 发起请求
            BulkResponse bulkItemResponses = client.bulk(request, RequestOptions.DEFAULT);
            System.out.println("批量新增成功:" + bulkItemResponses.getItems());
        } catch (IOException e) {
            throw new RuntimeException(e);
        } finally {
            try {
                client.close();
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }

    }
}
