package com.qqt.csr.im.service.sensitiveword;

import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson2.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.core.toolkit.StringPool;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.qqt.csr.common.acl.aics.AICustomerServiceClient;
import com.qqt.csr.common.acl.aics.resp.CsMemberInfoDTO;
import com.qqt.csr.common.enums.YesOrNotEnum;
import com.qqt.csr.common.exception.StatusCode;
import com.qqt.csr.common.sensitiveword.SensitiveWordMsg;
import com.qqt.csr.common.sensitiveword.SensitiveWordReplaceHandler;
import com.qqt.csr.common.session.SessionContextHolder;
import com.qqt.csr.common.utils.JsonUtil;
import com.qqt.csr.common.utils.ServiceAssert;
import com.qqt.csr.common.utils.SnowflakeUtil;
import com.qqt.csr.common.vo.req.PageRequest;
import com.qqt.csr.common.vo.resp.PageResponse;
import com.qqt.csr.im.dto.SensitiveWordCountDTO;
import com.qqt.csr.im.dto.SensitiveWordRefreshDTO;
import com.qqt.csr.im.dto.SensitiveWordResultDTO;
import com.qqt.csr.im.dto.SensitiveWordTagDTO;
import com.qqt.csr.im.entity.Account;
import com.qqt.csr.im.entity.SensitiveWord;
import com.qqt.csr.im.entity.SensitiveWordRepo;
import com.qqt.csr.im.entity.SensitiveWordRule;
import com.qqt.csr.im.enums.SensitiveWordRepoType;
import com.qqt.csr.im.mapper.SensitiveWordMapper;
import com.qqt.csr.im.vo.req.QueryingSensitiveWordReqVO;
import com.qqt.csr.im.vo.resp.SensitiveWordRespVO;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.util.*;
import java.util.stream.Collectors;

@Slf4j
@Service
public class SensitiveWordService extends ServiceImpl<SensitiveWordMapper, SensitiveWord> {
    @Autowired
    private AICustomerServiceClient aiCustomerServiceClient;
    @Autowired
    private SensitiveWordBsFactory sensitiveWordBsFactory;
    @Autowired
    private SensitiveWordReplaceHandler sensitiveWordReplaceHandler;
    @Autowired
    private SensitiveWordFilterResultHandler sensitiveWordFilterResultHandler;
    @Autowired
    private SensitiveWordTagService sensitiveWordTagService;
    @Autowired
    private SensitiveWordRepoService sensitiveWordRepoService;
    @Autowired
    private SensitiveWordRuleService sensitiveWordRuleService;


    public int updateBatch(List<SensitiveWord> list) {
        return baseMapper.updateBatch(list);
    }

    public int batchInsert(List<SensitiveWord> list) {
        return baseMapper.batchInsert(list);
    }


    /**
     * 根据词库id集合获取词库下敏感词数量
     *
     * @param repoIds 词库id集合
     * @return 词库id与敏感词数量映射关系
     */
    public Map<Long, Long> getWordCounts(String tenantId, List<Long> repoIds) {
        // 词库id为空，返回null
        if (CollectionUtils.isEmpty(repoIds)) {
            return Maps.newHashMap();
        }
        // 词库id不为空，查询词库下敏感词
        List<SensitiveWordCountDTO> sensitiveWordCountDTOList = baseMapper.countByRepoIds(repoIds, tenantId);
        // 返回词库id与敏感词数量映射关系
        return sensitiveWordCountDTOList.stream()
                .collect(Collectors.toMap(SensitiveWordCountDTO::getRepoId, SensitiveWordCountDTO::getCount, (o, n) -> o));
    }

    /**
     * 根据词库id获取词库下敏感词数量
     *
     * @param repoId 词库id
     * @return 词库下敏感词数量
     */
    public Long getCountByRepoId(Long repoId) {
        // 词库id为空，返回0
        if (repoId == null) {
            return 0L;
        }
        // 词库id不为空，查询词库下敏感词数量
        LambdaQueryWrapper<SensitiveWord> wrapper = new LambdaQueryWrapper<>();
        wrapper.eq(SensitiveWord::getRepoId, repoId);
        return baseMapper.selectCount(wrapper);
    }

    /**
     * 根据词库id删除词库下敏感词
     *
     * @param repoId 词库id
     * @return 是否删除成功
     */
    @SuppressWarnings("all")
    public boolean deleteByRepoId(Long repoId) {
        // 词库id为空，返回false
        if (repoId == null) {
            return false;
        }
        // 词库id不为空，删除词库下敏感词
        LambdaQueryWrapper<SensitiveWord> wrapper = new LambdaQueryWrapper<>();
        wrapper.eq(SensitiveWord::getRepoId, repoId);
        return baseMapper.delete(wrapper) > 0;
    }

    /**
     * 根据词库id集合批量删除词库下敏感词
     *
     * @param repoIds 词库id
     * @return 是否删除成功
     */
    @SuppressWarnings("all")
    public boolean batchDeleteByRepoIds(List<Long> repoIds) {
        // 词库id为空，返回false
        if (repoIds == null || repoIds.isEmpty()) {
            return false;
        }
        // 词库id不为空，删除词库下敏感词
        LambdaQueryWrapper<SensitiveWord> wrapper = new LambdaQueryWrapper<>();
        wrapper.in(SensitiveWord::getRepoId, repoIds);
        return baseMapper.delete(wrapper) > 0;
    }

    /**
     * 添加敏感词
     *
     * @param name
     * @param repoId
     * @param tenantId
     * @param creatorId
     */
    public void add(String name, Long repoId, String tenantId, Long creatorId) {
        if (StringUtils.isBlank(name) || repoId == null || repoId <= 0L) {
            return;
        }

        name = StringUtils.replaceChars(name, "；", StringPool.SEMICOLON);
        List<String> words = Lists.newArrayList(StringUtils.split(name, StringPool.SEMICOLON));

        Map<String, SensitiveWord> existSwMap = this.lambdaQuery().select(SensitiveWord::getName)
                .eq(SensitiveWord::getTenantId, tenantId)
                .eq(SensitiveWord::getRepoId, repoId)
                .list()
                .stream().collect(Collectors.toMap(SensitiveWord::getName, v -> v, (o, n) -> o));

        List<SensitiveWord> sensitiveWordList = words.stream()
                .filter(w -> !existSwMap.containsKey(w))
                .map(w -> SensitiveWord.of(tenantId, creatorId, repoId, w))
                .collect(Collectors.toList());

        if (CollectionUtils.isNotEmpty(sensitiveWordList)) {
            this.batchInsert(sensitiveWordList);
        }
    }

    /**
     * 批量创建
     *
     * @param list
     */
    public void batchAdd(String tenantId, List<SensitiveWord> list) {
        if (StringUtils.isBlank(tenantId) || CollectionUtils.isEmpty(list)) {
            return;
        }
        Set<Long> repoIdSet = list.stream().map(SensitiveWord::getRepoId).collect(Collectors.toSet());
        Map<Long, List<SensitiveWord>> existMap = this.lambdaQuery().select(SensitiveWord::getRepoId, SensitiveWord::getName)
                .eq(SensitiveWord::getTenantId, tenantId)
                .in(SensitiveWord::getRepoId, repoIdSet)
                .list().stream().collect(Collectors.groupingBy(SensitiveWord::getRepoId));

        List<SensitiveWord> saveList = list.stream()
                .filter(w -> !existMap.containsKey(w.getRepoId())
                        || existMap.get(w.getRepoId()).stream().noneMatch(sw -> sw.getName().equals(w.getName())))
                .collect(Collectors.toList());

        if (CollectionUtils.isNotEmpty(saveList)) {
            this.batchInsert(saveList);
        }
    }

    /**
     * 编辑敏感词
     *
     * @param sensitiveWord
     */
    @Transactional(rollbackFor = Throwable.class)
    public void edit(SensitiveWord sensitiveWord) {
        if (sensitiveWord == null) {
            return;
        }

        SensitiveWord dbWord = this.lambdaQuery().select()
                .eq(SensitiveWord::getId, sensitiveWord.getId())
                .eq(SensitiveWord::getTenantId, sensitiveWord.getTenantId())
                .one();
        ServiceAssert.notNull(dbWord, StatusCode.Common.NOT_EXIST.getCode(), "敏感词已被删除，编辑失败！");
        SensitiveWord existWord = this.lambdaQuery().select(SensitiveWord::getId)
                .eq(SensitiveWord::getTenantId, sensitiveWord.getTenantId())
                .eq(SensitiveWord::getRepoId, sensitiveWord.getRepoId())
                .eq(SensitiveWord::getName, sensitiveWord.getName())
                .one();
        ServiceAssert.isTrue(existWord == null, StatusCode.Common.UNIQUE_ERROR.getCode(), "相同名称敏感词已存在，编辑失败！");
        // 先删除后新增，是便于定时任务以更新时间为水位线刷新敏感词工具
        this.delete(Lists.newArrayList(sensitiveWord.getId()), sensitiveWord.getTenantId());
        sensitiveWord.setId(SnowflakeUtil.nextId());
        sensitiveWord.setCreateTime(dbWord.getCreateTime());
        sensitiveWord.setEnable(dbWord.getEnable());
        sensitiveWord.setDeleteFlag(dbWord.getDeleteFlag());
        sensitiveWord.setCreatorId(dbWord.getCreatorId());
        sensitiveWord.setRepoId(dbWord.getRepoId());
        this.save(sensitiveWord);
    }

    /**
     * 删除敏感词
     *
     * @param swIdList
     */
    public void delete(List<Long> swIdList, String tenantId) {
        if (CollectionUtils.isEmpty(swIdList)) {
            return;
        }
        this.lambdaUpdate().set(SensitiveWord::getDeleteFlag, YesOrNotEnum.YES.getValue())
                .set(SensitiveWord::getUpdateTime, new Date())
                .in(SensitiveWord::getId, swIdList)
                .eq(SensitiveWord::getTenantId, tenantId)
                .update();
    }

    /**
     * 启用或关闭敏感词
     *
     * @param swId
     * @param enable
     * @param tenantId
     */
    public void enable(Long swId, Integer enable, String tenantId) {
        this.lambdaUpdate().set(SensitiveWord::getEnable, enable)
                .set(SensitiveWord::getUpdateTime, new Date())
                .eq(SensitiveWord::getId, swId)
                .eq(SensitiveWord::getTenantId, tenantId)
                .update();
    }

    /**
     * 查询敏感词列表
     *
     * @param req
     * @return
     */
    public PageResponse<SensitiveWordRespVO> queryPage(PageRequest<QueryingSensitiveWordReqVO> req) {
        IPage<SensitiveWord> page = baseMapper.page(new Page<>(req.getPage(), req.getPageSize()), req.getData());
        List<SensitiveWord> resultList = Optional.ofNullable(page.getRecords()).orElseGet(Lists::newArrayList);

        List<Long> creatorIdList = resultList.stream().map(SensitiveWord::getCreatorId).distinct().collect(Collectors.toList());
        Map<Long, CsMemberInfoDTO> memberInfoMap = aiCustomerServiceClient.queryByIdList(creatorIdList)
                .stream().collect(Collectors.toMap(CsMemberInfoDTO::getUserId, v -> v, (o, n) -> o));

        List<SensitiveWordRespVO> records = Optional.ofNullable(page.getRecords()).orElseGet(Lists::newArrayList)
                .stream()
                .map(sw -> SensitiveWordRespVO.builder()
                        .creatorName(Optional.ofNullable(memberInfoMap.get(sw.getCreatorId())).map(CsMemberInfoDTO::getNickName).orElse(StringUtils.EMPTY))
                        .swId(sw.getId())
                        .name(sw.getName())
                        .updateTime(sw.getUpdateTime())
                        .enable(sw.getEnable())
                        .build())
                .collect(Collectors.toList());
        return new PageResponse<>(page.getTotal(), req.getPageSize(), req.getPage(), records);
    }

    /**
     * 敏感词源
     *
     * @param tenantId
     * @return
     */
    public List<String> denyBy(String tenantId) {
        Long lastId = 0L;
        Set<String> swSet = new HashSet<>();
        do {
            List<SensitiveWord> wordList = baseMapper.getWordListForDeny(tenantId, lastId, 3000);
            if (CollectionUtils.isEmpty(wordList)) {
                break;
            }
            for (SensitiveWord word : wordList) {
                swSet.add(word.getName());
                lastId = word.getId();
            }
        } while (true);
        return Lists.newArrayList(swSet);
    }

    /**
     * 获取所有租户id
     *
     * @return
     */
    public List<String> getAllTenantIdList() {
        return baseMapper.getAllTenantIdList();
    }

    /**
     * 获取有更新的敏感词，用于刷新本地敏感词缓存
     *
     * @param swUpdateTime
     * @param size
     * @return
     */
    public List<SensitiveWordRefreshDTO> getWordListForRefresh(Date swUpdateTime, Integer pageIndex, Integer size) {
        if (swUpdateTime == null) {
            return Lists.newArrayList();
        }
        return baseMapper.getWordListForRefresh(swUpdateTime, pageIndex, size);
    }

    /**
     * 根据敏感词名称查询敏感词
     *
     * @param name
     * @return
     */
    public List<SensitiveWord> getWorldListForTag(String name) {
        if (StringUtils.isBlank(name)) {
            return Lists.newArrayList();
        }

        return this.baseMapper.getWorldListForTag(name);
    }


    /**
     * 敏感词拦截
     *
     * @param tenantId
     * @param text
     * @param sender
     * @return 返回敏感词列表
     */
    public SensitiveWordResultDTO filter(String tenantId, String text, Account sender) {
        if (StringUtils.isBlank(text)) {
            return null;
        }

        Boolean isVisitor = isVisitor(sender);
        if (isVisitor == null) {
            log.warn("非访客或客服发的消息，不检测！accountId:{}", sender.getId());
            return null;
        }

        // 命中敏感词
        List<SensitiveWordResultDTO.Word> resultWordList = sensitiveWordBsFactory.getSensitiveWordBs(tenantId)
                .findAll(text, sensitiveWordFilterResultHandler)
                .stream()
                .filter(Objects::nonNull)
                .filter(wordInfo -> CollectionUtils.isNotEmpty(wordInfo.getTagDTOList()))
                .collect(Collectors.toList());
        if (CollectionUtils.isEmpty(resultWordList)) {
            return null;
        }

        List<Long> repoIdList = resultWordList.stream().flatMap(wordInfo -> wordInfo.getTagDTOList().stream())
                .map(SensitiveWordTagDTO::getRepId).distinct().collect(Collectors.toList());
        Map<Long, SensitiveWordRepo> wordRepoMap = sensitiveWordRepoService.getEnableMapBy(repoIdList);

        // 过滤发送方是否在检测的应用场景范围，不在范围的无需返回命中的敏感词
        resultWordList = resultWordList.stream()
                .filter(wordInfo -> isCheckWordScope(isVisitor, wordInfo, wordRepoMap))
                .collect(Collectors.toList());
        if (CollectionUtils.isEmpty(resultWordList)) {
            return null;
        }

        SensitiveWordRule ruleConfig = sensitiveWordRuleService.getRuleConfig(tenantId);

        return SensitiveWordResultDTO.of(tenantId, ruleConfig.getShowType(), resultWordList);
    }

    /**
     * 替换文本中的敏感词
     *
     * @param text
     * @param sensitiveWordMsgList
     * @return
     */
    public String replace(String text, List<SensitiveWordMsg> sensitiveWordMsgList) {
        return sensitiveWordReplaceHandler.replace(text, sensitiveWordMsgList);
    }

    /**
     * 是否访客
     *
     * @param account
     * @return
     */
    private Boolean isVisitor(Account account) {
        if (StringUtils.isBlank(account.getExt())) {
            return null;
        }
        JSONObject jsonObject = JsonUtil.toObject(account.getExt(), JSONObject.class);
        if (jsonObject == null || !jsonObject.containsKey("userType")) {
            return null;
        }
        return NumberUtils.INTEGER_ZERO.equals(jsonObject.getInteger("userType"));
    }

    /**
     * 是否需要检测敏感词，访客检测，还是客服检测，或者都检测
     *
     * @param isVisitor
     * @param wordResult
     * @param wordRepoMap
     * @return
     */
    private boolean isCheckWordScope(Boolean isVisitor, SensitiveWordResultDTO.Word wordResult, Map<Long, SensitiveWordRepo> wordRepoMap) {
        if (MapUtils.isEmpty(wordRepoMap)) {
            return false;
        }

        boolean isVisitorCheck = false;
        boolean isCsCheck = false;
        boolean isBothCheck = false;

        for (SensitiveWordTagDTO tag : wordResult.getTagDTOList()) {
            SensitiveWordRepo wordRepo = wordRepoMap.get(tag.getRepId());
            if (wordRepo == null || StringUtils.isBlank(wordRepo.getScope())) {
                continue;
            }
            List<Integer> scopes = JsonUtil.toList(wordRepo.getScope(), Integer.class);
            for (Integer scope : scopes) {
                if (SensitiveWordRepoType.VISITOR_SEND_MSG.getCode().equals(scope)) {
                    isVisitorCheck = true;
                } else if (SensitiveWordRepoType.CS_SEND_MSG.getCode().equals(scope)) {
                    isCsCheck = true;
                }
            }
            isBothCheck = isVisitorCheck && isCsCheck;
            if (isBothCheck) {
                break;
            }
        }

        return isBothCheck || (isVisitor && isVisitorCheck) || (!isVisitor && isCsCheck);
    }


}
