package com.itheima.common.util;


import com.hankcs.algorithm.AhoCorasickDoubleArrayTrie;

import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;

public class SensitiveWordUtil {

    public static Map<String,Object> dictionaryMap = new HashMap<>();

    /**
     * 生成关键词字典库，这个比较难，而且效率不高，了解一下就行。
     * @param words
     * @return
     */
    public static void initMap(Collection<String> words){
        if (words == null){
            System.out.println("敏感词列表不能为空");
            return;
        }

        // map初始长度words.size()，整个字典库的入口字数(小于words.size()，因为不同的词可能会有相同的首字)
        Map<String, Object> map = new HashMap<>(words.size());
        // 遍历过程中当前层次的数据
        Map<String, Object> curMap = null;
        Iterator<String> iterator = words.iterator();

        while (iterator.hasNext()) {
            String word = iterator.next();
            curMap = map;
            int len = word.length();
            for (int i =0; i < len; i++) {
                // 遍历每个词的字
                String key = String.valueOf(word.charAt(i));
                // 当前字在当前层是否存在, 不存在则新建, 当前层数据指向下一个节点, 继续判断是否存在数据
                Map<String, Object> wordMap = (Map<String, Object>) curMap.get(key);
                if (wordMap == null) {
                    // 每个节点存在两个数据: 下一个节点和isEnd(是否结束标志)
                    wordMap = new HashMap<>(2);
                    wordMap.put("isEnd", "0");
                    curMap.put(key, wordMap);
                }
                curMap = wordMap;
                // 如果当前字是词的最后一个字，则将isEnd标志置1
                if (i == len -1) {
                    curMap.put("isEnd", "1");
                }
            }
        }

        dictionaryMap = map;
    }

    /**
     * 搜索文本中某个文字是否匹配关键词
     * @param text
     * @param beginIndex
     * @return
     */
    private static int checkWord(String text, int beginIndex) {
        if (dictionaryMap == null) {
            throw new RuntimeException("字典不能为空");
        }
        boolean isEnd = false;
        int wordLength = 0;
        Map<String, Object> curMap = dictionaryMap;
        int len = text.length();
        // 从文本的第beginIndex开始匹配
        for (int i = beginIndex; i < len; i++) {
            String key = String.valueOf(text.charAt(i));
            // 获取当前key的下一个节点
            curMap = (Map<String, Object>) curMap.get(key);
            if (curMap == null) {
                break;
            } else {
                wordLength ++;
                if ("1".equals(curMap.get("isEnd"))) {
                    isEnd = true;
                }
            }
        }
        if (!isEnd) {
            wordLength = 0;
        }
        return wordLength;
    }

    /**
     * 获取匹配的关键词和命中次数
     * @param text
     * @return
     */

    public static Map<String, Integer> matchWords(String text) {
        Map<String, Integer> wordMap = new HashMap<>();
        int len = text.length();
        for (int i = 0; i < len; i++) {
            int wordLength = checkWord(text, i);
            if (wordLength > 0) {
                String word = text.substring(i, i + wordLength);
                // 添加关键词匹配次数
                if (wordMap.containsKey(word)) {
                    wordMap.put(word, wordMap.get(word) + 1);
                } else {
                    wordMap.put(word, 1);
                }

                i += wordLength - 1;
            }
        }
        return wordMap;
    }

    /*
    字典树的方法比生成关键词字典库好用。
     */
    public static void main(String[] args) {
        List<String> list = new ArrayList<>();
        list.add("冰毒");
        list.add("大麻");
        list.add("海洛因");
        /*initMap(list);
        String content = "我是一个好人，买卖冰毒是违法的,大麻也不可以的";
        Map<String,Integer> map = matchWords(content);
        System.out.println(map);*/

        Map<String,String> map = list.stream().collect(Collectors.toMap(Function.identity(),Function.identity()));
        AhoCorasickDoubleArrayTrie<String> acdat = new AhoCorasickDoubleArrayTrie<>();
        // 构建字典树
        acdat.build(map);
        //acdat.save();  //写，配合redis进行持久化
        //acdat.load();    //读
        // Test it
        final String text = "我是一个好人，买卖冰毒是违法的,大麻也不可以的,大麻比较便宜";
        List<AhoCorasickDoubleArrayTrie.Hit<String>> wordList = acdat.parseText(text);
        wordList.forEach(System.out::println);



    }


}
