package com.msb.strategy.filter.impl;

import com.msb.common.constart.CacheConstant;
import com.msb.common.enums.ExceptionEnums;
import com.msb.common.exception.GraceException;
import com.msb.common.model.StandardSubmit;
import com.msb.strategy.feign.BeaconCacheClient;
import com.msb.strategy.filter.StrategyFilter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

import java.io.IOException;
import java.io.StringReader;
import java.util.HashSet;
import java.util.Set;

/**
 * 敏感词校验
 *
 * @Author: lmf
 * @Create: 2025/3/11 22:38
 * @module
 */
@Slf4j
@Service(value = "dirtyword")
public class DirtyWordStrategyFilter implements StrategyFilter {
    @Autowired
    private BeaconCacheClient cacheClient;


    @Override
    public void strategy(StandardSubmit submit) {
        log.info("敏感词校验");
        // 获取短信信息
        final String text = submit.getText();
        // 对内容进行分词，并方法set集合中
        Set<String> contents = new HashSet<>();

        IKSegmenter ik = new IKSegmenter(new StringReader(text), true);
        Lexeme lexeme = null;
        while (true) {
            try {
                if ((lexeme = ik.next()) == null) {
                    break;
                }

            } catch (IOException e) {
                throw new RuntimeException(e);
            }
            contents.add(lexeme.getLexemeText());
        }

        // 获取redis中敏感词集合
        final Set<String> set = cacheClient.sMembersString(CacheConstant.DIRTYWORD);

        if (!contents.retainAll(set)) {
            log.info("敏感词校验异常=>>>>>{}", set);
            GraceException.display(ExceptionEnums.ERROR_HAVE_DIRTY_WORD);
        }
    }
}
