package top.roadvast.logfilter.util;
 
/**
 * @Author : JCccc
 * @CreateTime : 2019/7/30
 * @Description :
 **/
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;

import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
 
 
/**
 * 屏蔽敏感词初始化
 */
@Slf4j
public class SensitiveWordInit {

    // 初始化敏感字库
    public Map<Object, Object> initKeyWord() {
        // 读取敏感词库 ,存入Set中
        Set<String> wordSet = readSensitiveWordFile();
        // 将敏感词库加入到HashMap中//确定有穷自动机DFA
        return addSensitiveWordToHashMap(wordSet);
    }

    /**
     * 读取敏感词库 ,存入HashSet中
     * @return 敏感词汇库
     */
    private Set<String> readSensitiveWordFile() {
        Set<String> wordSet = null;
        //敏感词库
        Resource resource = new ClassPathResource("config/censorwords.txt");
        File file = null;
        try {
            file = resource.getFile();
        } catch (IOException e) {
            log.error("[censorwords.txt]文件不存在。");
        }
        if (file != null) {
            // 读取文件输入流
            // BufferedReader是包装类，先把字符读到缓存里，到缓存满了，再读入内存，提高了读的效率。
            try(InputStreamReader read = new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8);
                    BufferedReader br = new BufferedReader(read)) {
                wordSet = new HashSet<>();
                String txt = null;
                // 读取文件，将文件内容放入到set中
                while ((txt = br.readLine()) != null) {
                    wordSet.add(txt);
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return wordSet;
    }
 
 
    /**
     * 将HashSet中的敏感词,存入HashMap中
     * @param wordSet 敏感词库set列表
     * @return 敏感词库map列表
     */
    private Map<Object, Object> addSensitiveWordToHashMap(Set<String> wordSet) {
 
        // 初始化敏感词容器，减少扩容操作
        Map<Object, Object> wordMap = new HashMap<>(wordSet.size());
        for (String word: wordSet) {
            Map<Object, Object> nowMap = wordMap;
            for (int i = 0; i < word.length(); i++) {
                 // 转换成char型
                Character keyChar = word.charAt(i);
                // 获取
                Object tempMap = nowMap.get(keyChar);
                // 如果存在该key，直接赋值
                if (tempMap != null) {
                    nowMap = (Map) tempMap;
                } else {
                    // 不存在则，则构建一个map，同时将isEnd设置为0，因为他不是最后一个
                    // 设置标志位
                    Map<Object, Object> newMap = new HashMap<>();
                    newMap.put("isEnd", "0");
                    // 添加到集合
                    nowMap.put(keyChar, newMap);
                    nowMap = newMap;
                }
                // 最后一个
                if (i == word.length() - 1) {
                    nowMap.put("isEnd", "1");
                }
            }
        }
        return wordMap;
    }
 
 
}