package top.dingwen.io.treasure.sensitive.core;

import cn.hutool.core.convert.Convert;
import cn.hutool.core.lang.Pair;
import cn.hutool.json.JSONUtil;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import top.dingwen.io.treasure.base.constant.SupWarsConstant;
import top.dingwen.io.treasure.sensitive.autoconfigure.SensitiveProperties;
import top.dingwen.io.treasure.sensitive.constant.EndType;
import top.dingwen.io.treasure.sensitive.constant.SensitiveConstant;
import top.dingwen.io.treasure.sensitive.constant.WordsType;
import top.dingwen.io.treasure.sensitive.po.SensitiveWords;
import top.dingwen.io.treasure.sensitive.service.ISensitiveWordsService;

import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import java.util.*;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

/**
 * 词库上下文环境
 * <p> 初始化敏感词库，将敏感词加入到HashMap中，构建DFA算法模型</p>
 *
 * @author minghu.zhang
 */
@SuppressWarnings({SupWarsConstant.RAWTYPES, SupWarsConstant.UNCHECKED})
@Component
@Slf4j
@Getter
public class WordsContext {

    /**
     * 敏感词字典
     */
    private final Map wordMap = new HashMap(1024);

    /**
     * 是否已初始化
     */
    private boolean init;

    /**
     * 添加最后的编号
     */
    private long addLastId;

    @Resource
    private ISensitiveWordsService sensitiveWordsService;

    @Resource
    private SensitiveProperties sensitiveProperties;

    /**
     * 敏感词上下文初始化
     */
    @PostConstruct
    public void init() {
        List<SensitiveWords> sensitiveWords = sensitiveWordsService.queryWords(null);
        if (CollectionUtils.isNotEmpty(sensitiveWords)) {
            this.addLastId = sensitiveWords.get(sensitiveWords.size() - 1).getId();
        }
        Pair<Set<String>, Set<String>> wordsPair = groupWords(sensitiveWords);
        // 初始化关键词
        initKeyWord(wordsPair.getKey(), wordsPair.getValue());
        if (sensitiveProperties.getAutoLoad()) {
            // 重新加载关键词
            reloadWord(sensitiveWordsService);
        }
    }

    /**
     * 加载敏感词
     *
     * @param sensitiveWordsService 敏感词服务
     */
    @SuppressWarnings(SupWarsConstant.ALL)
    private void reloadWord(ISensitiveWordsService sensitiveWordsService) {
        // 创建一个单线程的定时线程池
        ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor();
        // 创建一个Runnable任务
        Runnable task = () -> {
            try {
                // 添加新的敏感词
                addNewWords(sensitiveWordsService);
                // 移除旧的敏感词
                removeDelWords(sensitiveWordsService);
            } catch (Exception e) {
                log.error("{},敏感词加载任务执行发生错误，消息[{}]", SensitiveConstant.SEN_L_P_S, e.getMessage(), e);
            }
        };
        // 定时执行任务，初始延迟0，之后每分钟执行一次
        scheduler.scheduleAtFixedRate(task, 0, 1, TimeUnit.MINUTES);

    }

    /**
     * 敏感词分组
     *
     * @param sensitiveWords 敏感词集
     * @return key: 黑名单 value: 白名单
     */
    private Pair<Set<String>, Set<String>> groupWords(List<SensitiveWords> sensitiveWords) {
        Set<String> blackWords = new HashSet<>();
        Set<String> whiteWords = new HashSet<>();
        for (SensitiveWords words : sensitiveWords) {
            if (WordsType.B.eq(words.getWordsType().getCode())) {
                // 黑名单
                blackWords.add(words.getWordsContent());
            } else {
                // 白名单
                whiteWords.add(words.getWordsContent());
            }
        }
        return new Pair<>(blackWords, whiteWords);
    }

    /**
     * 删除敏感词
     *
     * @param sensitiveWordsService 敏感词服务
     */
    private void removeDelWords(ISensitiveWordsService sensitiveWordsService) {
        List<SensitiveWords> sensitiveWordsDeleted = sensitiveWordsService.queryLogicDeletedWords();
        if (CollectionUtils.isNotEmpty(sensitiveWordsDeleted)) {
            if (log.isInfoEnabled()) {
                log.info("{},已感知到数据库敏感词变更，将移除以下敏感词[{}]", SensitiveConstant.SEN_L_P_S,
                        JSONUtil.parse(sensitiveWordsDeleted));
            }
            Pair<Set<String>, Set<String>> wordsPair = groupWords(sensitiveWordsDeleted);
            removeWord(wordsPair.getKey(), WordsType.B);
            removeWord(wordsPair.getValue(), WordsType.W);
        }
    }

    /**
     * 添加新的敏感词
     *
     * @param sensitiveWordsService 敏感词服务
     */
    private void addNewWords(ISensitiveWordsService sensitiveWordsService) {
        List<SensitiveWords> sensitiveWords = sensitiveWordsService.queryWords(this.addLastId);
        if (CollectionUtils.isNotEmpty(sensitiveWords)) {
            if (log.isInfoEnabled()) {
                log.info("{},已感知到新的敏感词，将进行加载[{}]", SensitiveConstant.SEN_L_P_S, JSONUtil.parse(sensitiveWords));
            }
            this.addLastId = sensitiveWords.get(sensitiveWords.size() - 1).getId();
            Pair<Set<String>, Set<String>> wordsPair = groupWords(sensitiveWords);
            addWord(wordsPair.getKey(), WordsType.B);
            addWord(wordsPair.getValue(), WordsType.W);
        }
    }


    /**
     * 初始化
     *
     * @param black 黑名单
     * @param white 白名单
     */
    private synchronized void initKeyWord(Set<String> black, Set<String> white) {
        try {
            if (!init) {
                // 将敏感词库加入到HashMap中
                addWord(black, WordsType.B);
                // 将非敏感词库也加入到HashMap中
                addWord(white, WordsType.W);
            }
            init = true;
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

    /**
     * 读取敏感词库，将敏感词放入HashSet中，构建一个DFA算法模型：<br>
     * 中 = { isEnd = 0 国 = {<br>
     * isEnd = 1 人 = {isEnd = 0 民 = {isEnd = 1} } 男 = { isEnd = 0 人 = { isEnd = 1 }
     * } } } 五 = { isEnd = 0 星 = { isEnd = 0 红 = { isEnd = 0 旗 = { isEnd = 1 } } } }
     */
    public void addWord(Collection<String> words, WordsType wordType) {
        if (CollectionUtils.isEmpty(words)) {
            return;
        }
        Map nowMap;
        Map<String, String> newWorMap;
        // 迭代keyWordSet
        for (String word : words) {
            nowMap = wordMap;
            for (int i = 0; i < word.length(); i++) {
                // 转换成char型
                char keyChar = word.charAt(i);
                // 获取
                Object wordMap = nowMap.get(keyChar);
                // 如果存在该key，直接赋值
                if (wordMap != null) {
                    nowMap = (Map) wordMap;
                } else {
                    // 不存在则构建一个map，同时将isEnd设置为0，因为他不是最后一个
                    newWorMap = new HashMap<>(4);
                    // 不是最后一个
                    newWorMap.put(SensitiveConstant.IE, Convert.toStr(EndType.HAS_NEXT.ordinal()));
                    nowMap.put(keyChar, newWorMap);
                    nowMap = newWorMap;
                }

                if (i == word.length() - 1) {
                    // 最后一个
                    nowMap.put(SensitiveConstant.IE, Convert.toStr(EndType.IS_END.ordinal()));
                    nowMap.put(SensitiveConstant.IWW, String.valueOf(wordType.ordinal()));
                }
            }
        }
    }

    /**
     * 在线删除敏感词
     *
     * @param words    敏感词列表
     * @param wordType 黑名单 B，白名单W
     */
    public void removeWord(Collection<String> words, WordsType wordType) {
        if (CollectionUtils.isEmpty(words)) {
            return;
        }
        Map nowMap;
        for (String word : words) {
            List<Map> cacheList = new ArrayList<>();
            nowMap = wordMap;
            for (int i = 0; i < word.length(); i++) {
                char keyChar = word.charAt(i);

                Object map = nowMap.get(keyChar);
                if (map != null) {
                    nowMap = (Map) map;
                    cacheList.add(nowMap);
                } else {
                    return;
                }

                if (i == word.length() - 1) {
                    char[] keys = word.toCharArray();
                    boolean cleanable = false;
                    char lastChar = 0;
                    for (int j = cacheList.size() - 1; j >= 0; j--) {
                        Map cacheMap = cacheList.get(j);
                        if (j == cacheList.size() - 1) {
                            if (Objects.equals(WordsType.B.ordinal(), cacheMap.get(SensitiveConstant.IWW))) {
                                if (WordsType.W.eq(wordType.getCode())) {
                                    return;
                                }
                            }
                            if (Objects.equals(WordsType.W.ordinal(), cacheMap.get(SensitiveConstant.IWW))) {
                                if (WordsType.B.eq(wordType.getCode())) {
                                    return;
                                }
                            }
                            cacheMap.remove(SensitiveConstant.IWW);
                            cacheMap.remove(SensitiveConstant.IE);
                            if (cacheMap.isEmpty()) {
                                cleanable = true;
                                continue;
                            }
                        }
                        if (cleanable) {
                            Object isEnd = cacheMap.get(SensitiveConstant.IE);
                            if (String.valueOf(EndType.IS_END.ordinal()).equals(isEnd)) {
                                cleanable = false;
                            }
                            cacheMap.remove(lastChar);
                        }
                        lastChar = keys[j];
                    }

                    if (cleanable) {
                        wordMap.remove(lastChar);
                    }
                }
            }
        }
    }

}
