package com.heima.utils.common;


import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

public class SensitiveWordUtil {

    private static final Map<Character, Node> DICTIONARY = new ConcurrentHashMap<>();
    private static final int DEFAULT_WORD_MAX_LENGTH = 100; // 默认最大敏感词长度

    // 节点类型
    private enum NodeType {
        INTERMEDIATE, // 中间节点
        TERMINAL      // 结束节点
    }

    // 节点类
    private static class Node {
        NodeType type;
        Map<Character, Node> next;

        Node(NodeType type) {
            this.type = type;
            this.next = new ConcurrentHashMap<>();
        }
    }

    /**
     * 初始化敏感词字典
     * @param words 敏感词集合
     */
    public static void initDictionary(Collection<String> words) {
        if (words == null || words.isEmpty()) {
            throw new IllegalArgumentException("敏感词列表不能为空");
        }

        DICTIONARY.clear();

        for (String word : words) {
            if (word == null || word.isEmpty()) {
                continue;
            }

            char[] chars = word.toCharArray();
            if (chars.length > DEFAULT_WORD_MAX_LENGTH) {
                continue; // 跳过过长的词
            }

            // 构建字典树
            Map<Character, Node> currentMap = DICTIONARY;

            for (int i = 0; i < chars.length; i++) {
                char c = chars[i];
                Node node = currentMap.get(c);

                if (node == null) {
                    node = new Node(i == chars.length - 1 ? NodeType.TERMINAL : NodeType.INTERMEDIATE);
                    currentMap.put(c, node);
                } else if (i == chars.length - 1) {
                    node.type = NodeType.TERMINAL; // 设置为结束节点
                }

                currentMap = node.next;
            }
        }
    }

    /**
     * 检查文本中是否包含敏感词
     * @param text 待检查文本
     * @return 匹配到的敏感词及其出现次数
     */
    public static Map<String, Integer> detectSensitiveWords(String text) {
        if (text == null || text.isEmpty() || DICTIONARY.isEmpty()) {
            return Collections.emptyMap();
        }

        Map<String, Integer> result = new HashMap<>();
        char[] chars = text.toCharArray();

        for (int i = 0; i < chars.length; i++) {
            int wordLength = checkSensitiveWord(chars, i);
            if (wordLength > 0) {
                String word = new String(chars, i, wordLength);
                result.merge(word, 1, Integer::sum);
                i += wordLength - 1; // 跳过已匹配的部分
            }
        }

        return result;
    }

    private static int checkSensitiveWord(char[] chars, int beginIndex) {
        Map<Character, Node> currentMap = DICTIONARY;
        int wordLength = 0;
        boolean isEnd = false;

        for (int i = beginIndex; i < chars.length; i++) {
            Node node = currentMap.get(chars[i]);
            if (node == null) {
                break;
            }

            wordLength++;
            if (node.type == NodeType.TERMINAL) {
                isEnd = true;
            }
            currentMap = node.next;
        }

        return isEnd ? wordLength : 0;
    }

    /**
     * 清空字典
     */
    public static void clear() {
        DICTIONARY.clear();
    }

    public static void main(String[] args) {
        SensitiveWordUtil.initDictionary(Arrays.asList("敏感词", "测试"));
        Map<String, Integer> map = SensitiveWordUtil.detectSensitiveWords("这是一个测试敏感词的案例, 测试");
        System.out.println(map);
    }
}
