package com.nlp.visualization.core.discourse.impl;

import com.nlp.visualization.utils.PropertyUtil;
import org.apache.commons.collections.map.HashedMap;
import org.apache.commons.io.FileUtils;
import org.aspectj.util.FileUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.*;


public class SensitiveWordInit {
    private String ENCODING = "UTF-8";    //字符编码
    private static Logger logger = LoggerFactory.getLogger(SensitiveWordInit.class);
    @SuppressWarnings("rawtypes")
    private static HashMap sensitiveWordMap;

    static {
        logger.info("加载敏感词表");
        //读取敏感词库
        Set<String> keyWordSet = null;
        try {
            keyWordSet = readSensitiveWordFile();
            //将敏感词库加入到HashMap中
            addSensitiveWordToHashMap(keyWordSet);
        } catch (Exception e) {
            logger.error("读取敏感词表失败");
            e.printStackTrace();
        }
        logger.info("加载敏感词表完成");

    }


    public SensitiveWordInit() {
        super();
    }


    @SuppressWarnings("rawtypes")
    public Map initKeyWord() {

        return this.sensitiveWordMap;
    }


    /**
     * @param [keyWordSet] 敏感词库
     * @return void
     * @throws
     * @Description: 读取敏感词库，将敏感词放入HashSet中，构建一个DFA算法模型：
     * @author LXM
     * @date 2018/2/24 下午4:52
     */
    @SuppressWarnings({"rawtypes", "unchecked"})
    private static void addSensitiveWordToHashMap(Set<String> keyWordSet) {
        sensitiveWordMap = new HashMap(keyWordSet.size());     //初始化敏感词容器，减少扩容操作
        String key = null;
        Map nowMap = null;
        Map<String, String> newWorMap = null;
        //迭代keyWordSet
        Iterator<String> iterator = keyWordSet.iterator();
        while (iterator.hasNext()) {
            key = iterator.next();    //关键字
            nowMap = sensitiveWordMap;
            for (int i = 0; i < key.length(); i++) {
                char keyChar = key.charAt(i);       //转换成char型
                Object wordMap = nowMap.get(keyChar);       //获取

                if (wordMap != null) {        //如果存在该key，直接赋值
                    nowMap = (Map) wordMap;
                } else {     //不存在则，则构建一个map，同时将isEnd设置为0，因为他不是最后一个
                    newWorMap = new HashMap<String, String>();
                    newWorMap.put("isEnd", "0");     //不是最后一个
                    nowMap.put(keyChar, newWorMap);
                    nowMap = newWorMap;
                }

                if (i == key.length() - 1) {
                    nowMap.put("isEnd", "1");    //最后一个
                }
            }
        }
    }


    /**
     * @param []
     * @return java.util.Set<java.lang.String>
     * @throws
     * @Description: 读取敏感词库中的内容，将内容添加到set集合中
     * @author LXM
     * @date 2018/2/24 下午4:51
     */
    @SuppressWarnings("resource")
    private static Set<String> readSensitiveWordFile() throws Exception {
        Set<String> set = null;
        final String SENSITIVE_FILE_PATH = PropertyUtil.getInstance("library").getProperty("sensitive");
        File file = new File(SENSITIVE_FILE_PATH);    //读取文件
        try {
            if (file.isFile() && file.exists()) {      //文件流是否存在
                set = new HashSet<String>();
                List<String> list = FileUtils.readLines(file);
                set.addAll(list);
            } else {         //不存在抛出异常信息
                throw new Exception("敏感词库文件不存在");
            }
        } catch (Exception e) {
            throw e;
        } finally {
            ;
        }
        return set;
    }
}
