package com.qqt.csr.im.service.sensitiveword;

import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.qqt.csr.common.acl.aics.AICustomerServiceClient;
import com.qqt.csr.common.acl.aics.resp.CsMemberInfoDTO;
import com.qqt.csr.common.enums.YesOrNotEnum;
import com.qqt.csr.common.exception.StatusCode;
import com.qqt.csr.common.utils.*;
import com.qqt.csr.common.vo.req.PageRequest;
import com.qqt.csr.common.vo.resp.PageResponse;
import com.qqt.csr.im.assembler.SensitiveWordRepoAssembler;
import com.qqt.csr.im.entity.SensitiveWord;
import com.qqt.csr.im.entity.SensitiveWordRepo;
import com.qqt.csr.im.mapper.SensitiveWordRepoMapper;
import com.qqt.csr.im.vo.req.SensitiveWordPageQueryRepVO;
import com.qqt.csr.im.vo.req.SensitiveWordRepoAddReqVO;
import com.qqt.csr.im.vo.req.SensitiveWordRepoDeleteReqVO;
import com.qqt.csr.im.vo.req.SensitiveWordRepoUpdateReqVO;
import com.qqt.csr.im.vo.resp.SensitiveWordRepoRespVO;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.redisson.api.BatchResult;
import org.redisson.api.RBatch;
import org.redisson.api.RedissonClient;
import org.springframework.stereotype.Service;

import java.time.Duration;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.stream.Collectors;

/**
 * 敏感词库 服务
 *
 * @author yang hao
 */
@Slf4j
@Service
@AllArgsConstructor
public class SensitiveWordRepoService extends ServiceImpl<SensitiveWordRepoMapper, SensitiveWordRepo> {
    private final SensitiveWordService sensitiveWordService;
    private final AICustomerServiceClient aiCustomerServiceClient;
    private final RedissonClient redissonClient;
    private final TransactionUtil transactionUtil;
    private final RedissonLockUtil redissonLockUtil;

    /**
     * 敏感词库分页查询
     *
     * @param repVO 敏感词库分页查询请求
     * @return 敏感词库分页响应
     */
    public PageResponse<SensitiveWordRepoRespVO> page(PageRequest<SensitiveWordPageQueryRepVO> repVO) {
        // 分页查询
        IPage<SensitiveWordRepo> page = baseMapper.page(new Page<>(repVO.getPage(), repVO.getPageSize()), repVO.getData());
        List<SensitiveWordRepo> records = page.getRecords();
        // 获取词量
        List<Long> repoIds = records.stream().map(SensitiveWordRepo::getId).toList();
        Map<Long, Long> wordCounts = sensitiveWordService.getWordCounts(repVO.getData().getTenantId(), repoIds);
        // 获取客服信息
        List<Long> creatorIds = records.stream().map(SensitiveWordRepo::getCreatorId).distinct().toList();
        Map<Long, CsMemberInfoDTO> memberInfo = aiCustomerServiceClient.queryByIdList(creatorIds)
                .stream().collect(Collectors.toMap(CsMemberInfoDTO::getUserId, v -> v, (o, n) -> o));
        // 响应装配并返回
        List<SensitiveWordRepoRespVO> result = SensitiveWordRepoAssembler.convert(records, wordCounts, memberInfo);
        return new PageResponse<>(page.getTotal(), repVO.getPageSize(), repVO.getPage(), result);
    }

    /**
     * 敏感词库详情
     *
     * @param id 敏感词库id
     * @return 敏感词库详情 响应
     */
    public SensitiveWordRepoRespVO detail(String tenantId, Long id) {
        // 获取敏感词库数据，若不存在，抛出异常
        SensitiveWordRepo repo = this.lambdaQuery().select().eq(SensitiveWordRepo::getId, id)
                .eq(SensitiveWordRepo::getTenantId, tenantId).one();
        ServiceAssert.notNull(repo, StatusCode.Common.NOT_EXIST.getCode(), "敏感词库不存在");
        // 获取词量
        Long wordCount = sensitiveWordService.getCountByRepoId(id);
        Map<Long, CsMemberInfoDTO> memberInfo = aiCustomerServiceClient.queryByIdList(Lists.newArrayList(repo.getCreatorId()))
                .stream().collect(Collectors.toMap(CsMemberInfoDTO::getUserId, v -> v, (o, n) -> o));
        // 响应装配并返回
        return SensitiveWordRepoAssembler.convert(repo, wordCount, memberInfo);
    }

    /**
     * 新增敏感词库
     *
     * @param reqVO 敏感词库新增请求
     * @return 敏感词库新增结果
     */
    public boolean save(String tenantId, SensitiveWordRepoAddReqVO reqVO) {
        // 实体装配
        String cacheKey = String.format(CacheKeyUtil.IM.IMPORT_SW_REPO_LOCK, tenantId);
        return redissonLockUtil.distributedLock(() -> {
            SensitiveWordRepo repo = SensitiveWordRepoAssembler.convert(reqVO);
            SensitiveWordRepo wordRepo = this.lambdaQuery().select(SensitiveWordRepo::getId)
                    .eq(SensitiveWordRepo::getName, repo.getName())
                    .eq(SensitiveWordRepo::getTenantId, tenantId)
                    .one();
            ServiceAssert.isTrue(wordRepo != null, StatusCode.Common.CUSTOMIZE_ERROR_MESSAGE.getCode(), "敏感词库名称已存在");
            // 保存词库
            return save(repo);
        },cacheKey);
    }

    /**
     * 批量保存
     *
     * @param tenantId
     * @param dataMap
     */
    public void batchSave(String tenantId, Long creatorId, Map<String, Set<String>> dataMap) {
        if (StringUtils.isBlank(tenantId) || MapUtils.isEmpty(dataMap)) {
            return;
        }

        List<SensitiveWord> saveWordList = new ArrayList<>();

        String cacheKey = String.format(CacheKeyUtil.IM.IMPORT_SW_REPO_LOCK, tenantId);
        redissonLockUtil.distributedLock(() -> {
            Map<String, Long> existRepoMap = this.lambdaQuery().select(SensitiveWordRepo::getId, SensitiveWordRepo::getName)
                    .eq(SensitiveWordRepo::getTenantId, tenantId)
                    .in(SensitiveWordRepo::getName, dataMap.keySet())
                    .list()
                    .stream().collect(Collectors.toMap(SensitiveWordRepo::getName, SensitiveWordRepo::getId));

            List<SensitiveWordRepo> saveRepoList = new ArrayList<>();
            dataMap.forEach((repoName, wordList) -> {
                Long repoId = existRepoMap.get(repoName);
                if (repoId == null) {
                    SensitiveWordRepo repo = SensitiveWordRepoAssembler.convert(tenantId, creatorId, repoName);
                    saveRepoList.add(repo);
                    repoId = repo.getId();
                }
                for (String word : wordList) {
                    if(StringUtils.isNotBlank(word)){
                        saveWordList.add(SensitiveWord.of(tenantId, creatorId, repoId, word));
                    }
                }
            });
            if (CollectionUtils.isNotEmpty(saveRepoList)) {
                saveBatch(saveRepoList);
            }
        }, cacheKey);

        sensitiveWordService.batchAdd(tenantId, saveWordList);
    }

    /**
     * 修改敏感词库
     *
     * @param reqVO 敏感词库修改请求
     * @return 敏感词库修改结果
     */
    public boolean update(String tenantId, SensitiveWordRepoUpdateReqVO reqVO) {
        SensitiveWordRepo wordRepo = this.lambdaQuery().select(SensitiveWordRepo::getTenantId)
                .eq(SensitiveWordRepo::getId, reqVO.getId()).one();
        ServiceAssert.notNull(wordRepo, StatusCode.Common.NOT_EXIST.getCode(), "敏感词库不存在");
        ServiceAssert.isTrue(tenantId.equals(wordRepo.getTenantId()), StatusCode.Common.NOT_EXIST.getCode(), "敏感词库不存在");

        // 实体装配
        SensitiveWordRepo repo = SensitiveWordRepoAssembler.convert(reqVO);
        // 修改词库
        boolean result = updateById(repo);
        if (result) {
            clearCache(repo.getId());
        }
        return result;
    }

    /**
     * 删除敏感词库
     *
     * @param id 敏感词库id
     * @return 删除结果
     */
    public boolean delete(String tenantId, Long id) {
        // 获取敏感词库数据，若不存在，抛出异常
        SensitiveWordRepo repo = getById(id);
        ServiceAssert.notNull(repo, StatusCode.Common.NOT_EXIST.getCode(), "敏感词库不存在");
        ServiceAssert.isTrue(tenantId.equals(repo.getTenantId()), StatusCode.Common.NOT_EXIST.getCode(), "敏感词库不存在");
        boolean result = this.lambdaUpdate().set(SensitiveWordRepo::getDeleteFlag, 1)
                .set(SensitiveWordRepo::getUpdateTime, new Date())
                .eq(SensitiveWordRepo::getId, id)
                .eq(SensitiveWordRepo::getTenantId, tenantId)
                .update();
        if (result) {
            clearCache(id);
        }
        return result;
    }

    /**
     * 批量删除敏感词库
     *
     * @param reqVO 敏感词库批量删除请求
     * @return 批量删除结果
     */
    public boolean batchDelete(String tenantId, SensitiveWordRepoDeleteReqVO reqVO) {
        // 判断ids是否为空
        if (CollectionUtils.isEmpty(reqVO.getIds())) {
            return false;
        }
        // 批量删除词库下的敏感词 && 批量删除词库
        boolean result = this.lambdaUpdate().set(SensitiveWordRepo::getDeleteFlag, YesOrNotEnum.YES.getValue())
                .set(SensitiveWordRepo::getUpdateTime, new Date())
                .in(SensitiveWordRepo::getId, reqVO.getIds())
                .eq(SensitiveWordRepo::getTenantId, tenantId)
                .update();
        ServiceAssert.isTrue(result, StatusCode.Common.NOT_EXIST.getCode(), "敏感词库不存在");
        clearCache(reqVO.getIds());
        return result;
    }

    /**
     * 获取启用未删除词库map
     *
     * @param repoIdList
     * @return
     */
    public Map<Long, SensitiveWordRepo> getEnableMapBy(List<Long> repoIdList) {
        if (CollectionUtils.isEmpty(repoIdList)) {
            return Maps.newHashMap();
        }
        List<SensitiveWordRepo> resultList = new ArrayList<>();
        // redis缓存获取
        Lists.partition(repoIdList, 100).forEach(pList -> {
            RBatch redisBatch = redissonClient.createBatch();
            pList.forEach(repoId -> {
                String redisKey = String.format(CacheKeyUtil.IM.SENSITIVE_WORD_REPO_KEY, repoId);
                redisBatch.getBucket(redisKey).getAsync();
            });
            BatchResult<?> batchResult = redisBatch.execute();
            List<?> objectList = Optional.ofNullable(batchResult.getResponses()).orElseGet(Lists::newArrayList);
            objectList.forEach(obj -> {
                if (obj != null) {
                    SensitiveWordRepo cacheResult = JsonUtil.toObject(obj.toString(), SensitiveWordRepo.class);
                    resultList.add(cacheResult);
                }
            });
        });

        List<Long> needQueryDbRepoIdList = repoIdList.stream()
                .filter(repoId -> resultList.stream().noneMatch(result -> result.getId().equals(repoId)))
                .collect(Collectors.toList());
        if (CollectionUtils.isEmpty(needQueryDbRepoIdList)) {
            return resultList.stream()
                    .filter(Objects::nonNull)
                    .filter(v -> v.getEnable().equals(YesOrNotEnum.YES.getValue()))
                    .collect(Collectors.toMap(SensitiveWordRepo::getId, v -> v, (o, n) -> o));
        }

        Map<Long, SensitiveWordRepo> dbMap = this.lambdaQuery().select()
                .in(SensitiveWordRepo::getId, needQueryDbRepoIdList)
                .eq(SensitiveWordRepo::getDeleteFlag, YesOrNotEnum.NOT.getValue())
                .list().stream().collect(Collectors.toMap(SensitiveWordRepo::getId, v -> v, (o, n) -> o));

        Lists.partition(needQueryDbRepoIdList, 100).forEach(pList -> {
            RBatch redisBatch = redissonClient.createBatch();
            pList.forEach(repoId -> {
                SensitiveWordRepo dbData = dbMap.get(repoId);
                if (dbData == null) {
                    dbData = SensitiveWordRepo.builder().id(repoId).deleteFlag(YesOrNotEnum.YES.getValue()).enable(YesOrNotEnum.NOT.getValue()).build();
                }
                String redisKey = String.format(CacheKeyUtil.IM.SENSITIVE_WORD_REPO_KEY, repoId);
                redisBatch.getBucket(redisKey).setAsync(JsonUtil.toJson(dbData), Duration.of(CacheKeyUtil.EXPIRE_TIME_8H, ChronoUnit.MILLIS));
                resultList.add(dbData);
            });
            redisBatch.execute();
        });


        return resultList.stream()
                .filter(Objects::nonNull)
                .filter(v -> v.getEnable().equals(YesOrNotEnum.YES.getValue()))
                .collect(Collectors.toMap(SensitiveWordRepo::getId, v -> v, (o, n) -> o));
    }

    private void clearCache(Long repoId) {
        this.clearCache(Lists.newArrayList(repoId));
    }

    private void clearCache(List<Long> repoIdList) {
        if (CollectionUtils.isEmpty(repoIdList)) {
            return;
        }

        if (repoIdList.size() == 1) {
            String redisKey = String.format(CacheKeyUtil.IM.SENSITIVE_WORD_REPO_KEY, repoIdList.get(0));
            redissonClient.getBucket(redisKey).deleteAsync();
            return;
        }

        Lists.partition(repoIdList, 100).forEach(pList -> {
            RBatch redisBatch = redissonClient.createBatch();
            pList.forEach(repoId -> {
                String redisKey = String.format(CacheKeyUtil.IM.SENSITIVE_WORD_REPO_KEY, repoId);
                redisBatch.getBucket(redisKey).deleteAsync();
            });
            redisBatch.executeAsync();
        });

    }

}
