package com.ruoyi.teaching.service.impl;

import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;

import com.alibaba.nacos.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.ruoyi.common.core.exception.ServiceException;
import com.ruoyi.common.core.utils.DateUtils;
import com.ruoyi.common.core.utils.StringUtils;
import com.ruoyi.common.core.utils.bean.BeanUtils;
import com.ruoyi.teaching.bo.InterviewMasteryDegreeListBo;
import com.ruoyi.teaching.bo.QueryInterviewMasteryDegreeBo;
import com.ruoyi.teaching.index.InterviewIndex;
import com.ruoyi.teaching.utils.IKAnalyzerUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.elasticsearch.index.query.QueryBuilders;
import org.redisson.api.RLock;
import org.redisson.api.RedissonClient;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.IndexOperations;
import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.data.elasticsearch.core.query.NativeSearchQuery;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.stereotype.Service;
import com.ruoyi.teaching.mapper.HighInterviewMapper;
import com.ruoyi.teaching.domain.HighInterview;
import com.ruoyi.teaching.service.IHighInterviewService;
import org.springframework.transaction.annotation.Transactional;

/**
 * 高频面试题Service业务层处理
 *
 * @author ruoyi
 * @date 2024-04-02
 */
@Service
@Slf4j
public class HighInterviewServiceImpl implements IHighInterviewService
{
    @Autowired
    private RabbitTemplate rabbitTemplate;

    @Autowired
    private HighInterviewMapper highInterviewMapper;

    @Autowired
    private ElasticsearchRestTemplate elasticsearchRestTemplate;
    /**
     * 查询高频面试题
     *
     * @param id 高频面试题主键
     * @return 高频面试题
     */
    @Override
    public HighInterview selectHighInterviewById(Long id)
    {
        return highInterviewMapper.selectHighInterviewById(id);
    }

    /**
     * 查询高频面试题列表
     *
     * @param highInterview 高频面试题
     * @return 高频面试题
     */
    @Override
    public List<HighInterview> selectHighInterviewList(HighInterview highInterview)
    {
        return highInterviewMapper.selectHighInterviewList(highInterview);
    }

    /**
     * 新增高频面试题
     *
     * @param highInterview 高频面试题
     * @return 结果
     */
    @Override
    @Transactional(rollbackFor = Exception.class)
    public int insertHighInterview(HighInterview highInterview)
    {
        highInterview.setCreateTime(DateUtils.getNowDate());
        int count = highInterviewMapper.insertHighInterview(highInterview);
        if (count > 0) {
            rabbitTemplate.convertAndSend("edu.interview.exchange", "interview.insert", highInterview);
        }
        return count;
    }

    /**
     * 修改高频面试题
     *
     * @param highInterview 高频面试题
     * @return 结果
     */
    @Override
    public int updateHighInterview(HighInterview highInterview)
    {
        highInterview.setUpdateTime(DateUtils.getNowDate());
        int count = highInterviewMapper.updateHighInterview(highInterview);
        if (count > 0) {
            rabbitTemplate.convertAndSend("edu.interview.exchange", "interview.update", highInterview);
        }
        return count;
    }

    /**
     * 批量删除高频面试题
     *
     * @param ids 需要删除的高频面试题主键
     * @return 结果
     */
    @Override
    public int deleteHighInterviewByIds(Long[] ids)
    {
        int count = highInterviewMapper.deleteHighInterviewByIds(ids);
        if (count > 0) {
            rabbitTemplate.convertAndSend("edu.interview.exchange", "interview.delete", Arrays.asList(ids));
        }
        return count;
    }

    /**
     * 删除高频面试题信息
     *
     * @param id 高频面试题主键
     * @return 结果
     */
    @Override
    public int deleteHighInterviewById(Long id)
    {
        return highInterviewMapper.deleteHighInterviewById(id);
    }

    @Override
    public List<InterviewMasteryDegreeListBo> selectInterviewMasteryDegree(QueryInterviewMasteryDegreeBo queryInterviewMasteryDegreeBo) {
        return highInterviewMapper.selectInterviewMasteryDegree(queryInterviewMasteryDegreeBo);
    }

    @Override
    @Transactional(readOnly = true)
    public Map<String, Object> queryCourseDesignList(Long courseDesignId, String courseDesignContent) {
        if (StringUtils.isEmpty(courseDesignContent)) {
            throw new RuntimeException("课程设计内容不能为空");
        }
        Map<String, Object> result = new HashMap<>(4);
        //1.根据courseDesignId查询出对应已经关联的面试题
        List<HighInterview> highInterviews = highInterviewMapper.selectHighInterviewByCourseDesignId(courseDesignId);

        //2.利用ik分词器解析courseDesignContent
        Set<String> keywords = IKAnalyzerUtils.parseSentence(courseDesignContent, true);

        //如果已经关联了面试题，直接返回
        if(CollectionUtils.isEmpty(highInterviews)){
            //3.根据分词结果查询出对应的面试题
            /*highInterviews = highInterviewMapper.selectHighInterviewListByKeywords(keywords);*/
            //使用elasticsearchRestTemplate从es中根据courseDesignContent关键字分词查询面试题
        NativeSearchQuery searchQuery = new NativeSearchQueryBuilder()
                .withQuery(QueryBuilders.matchQuery("all", courseDesignContent))
                .build();

        SearchHits<InterviewIndex> searchHits = elasticsearchRestTemplate.search(searchQuery, InterviewIndex.class);

        highInterviews = searchHits.stream()
                .map(hit->{
                    HighInterview highInterview = new HighInterview();
                    InterviewIndex interviewIndex = hit.getContent();
                    highInterview.setId(interviewIndex.getId());
                    highInterview.setQuestion(interviewIndex.getQuestion());
                    return highInterview;
                })
                .collect(Collectors.toList());
        }


        result.put("relatedInterviewList", highInterviews);
        result.put("keywords", keywords);
        result.put("relatedInterviewIdList", highInterviews.stream().map(HighInterview::getId).collect(Collectors.toList()));
        return result;
    }

    @Override
    @Transactional(readOnly = true)
    public List<HighInterview> queryRelatedInterviewByKeywords(List<String> keywords) {
        if (CollectionUtils.isEmpty(keywords)) {
            return Collections.emptyList();
        }
        return highInterviewMapper.selectHighInterviewListByKeywords(new HashSet<>(keywords));
    }
    private static final ExecutorService SINGLE_THREAD_POOL;
    //创建一个线程池,1个核心线程数，1个最大线程数，队列1
    static {
        ThreadFactory namedThreadFactory = new ThreadFactoryBuilder()
                .setNameFormat("export-es-%d").build();

        SINGLE_THREAD_POOL = new ThreadPoolExecutor(1, 1,
            0L, TimeUnit.MILLISECONDS,
            new LinkedBlockingQueue<>(1), namedThreadFactory, new ThreadPoolExecutor.AbortPolicy());
    }

    //同步数据进度
    private volatile Integer percentage = 0;


   /* private final Lock LOCK = new ReentrantLock();*/
    @Autowired
    private RedissonClient redissonClient;
    @Override
    public void exportEs() {
        //判断当前是否有其他线程正在操作该业务，如果有，则直接返回
        RLock lock = redissonClient.getLock("export-es-lock");
        if (lock.tryLock()) {
                //异步操作
                SINGLE_THREAD_POOL.submit(() -> {
                    try{
                        //1.重建索引库
                        createIndex();
                        //2.全量同步()
                        int pageNo = 1;
                        int pageSize = 10;//每页数据量
                        int size = 0;//每次读取到的数据
                        percentage = 0;
                        int saveCount = 0;
                        do {
                            //分批次从mysql读取数据（每次1000）
                            Page<HighInterview> page = highInterviewMapper.selectPage(new Page<>(pageNo++, pageSize), null);
                            long total = page.getTotal();
                            size = page.getRecords().size();
                            //将数据保存到es中
                            List<InterviewIndex> interviewIndices = page.getRecords().stream().map(highInterview -> {
                                InterviewIndex interviewIndex = new InterviewIndex();
                                interviewIndex.setAll(highInterview.getQuestion() + highInterview.getKnowledgePoint());
                                BeanUtils.copyProperties(highInterview, interviewIndex);
                                return interviewIndex;
                            }).collect(Collectors.toList());
                            elasticsearchRestTemplate.save(interviewIndices);

                            //计算同步进度
                            saveCount += size;
                            percentage = (int) (saveCount / (total * 0.01));
                            log.debug("全量同步进度:{}", percentage);
                        } while (size == pageSize);//循环(退出条件：mysql数据不足1000)
                    }finally {
                        lock.unlock();
                    }
                });
        }else {
            throw new ServiceException("当前有线程正在全量同步");
        }
    }

    @Override
    public Integer queryPercentage() {
        return percentage;
    }

    public void createIndex() {
        //使用elasticsearchRestTemplate创建索引
        IndexOperations indexOperations = elasticsearchRestTemplate.indexOps(InterviewIndex.class);
        indexOperations.delete();
        indexOperations.create();
        indexOperations.putMapping(indexOperations.createMapping(InterviewIndex.class));
    }
}
