package com.samp.solr.solrlstm;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.dictionary.CustomDictionary;
import com.hankcs.hanlp.seg.Segment;
import com.hankcs.hanlp.seg.common.Term;
import com.samp.util.FilesUtils;
import com.samp.util.MapUtils;
import org.apache.commons.lang3.StringUtils;

import java.io.BufferedReader;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;

/**
 * 分词的工具类
 */
public class TokenizeUtils {


    private static Segment segment= HanLP.newSegment().enableCustomDictionary(true);

    private static Map<String, String> synonymMap = new HashMap<>();

    private static List<String> stopWordsDict = new ArrayList<>();

    private static List<String> NUMBERS_LIST = new ArrayList<>();
    private static List<String> AMOUNT_LIST = new ArrayList<>();

    static {
        NUMBERS_LIST.addAll(Arrays.asList("零", "一", "二", "三", "四", "五", "六", "七", "八", "九"));
        NUMBERS_LIST.addAll(Arrays.asList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
        NUMBERS_LIST.addAll(Arrays.asList("壹", "贰", "叁", "肆", "伍", "陆", "柒", "捌", "玖"));
        AMOUNT_LIST.addAll(Arrays.asList("十", "百", "千","拾", "佰","万", "仟","块","角","分","元"));
    }

    public static String tokenize( String doc){
//        HanLP.Config.ShowTermNature = true;
        List<String> tokenizeList = new ArrayList<>() ;//对输入进行分词
        List<Term> termList=segment.seg(doc);
//        CoreStopWordDictionary.apply(termList);
        for(Term term:termList){
            String text = term.toString().split("/")[0];
            if( StringUtils.isNotBlank(text) ) {
                tokenizeList.add(transSpecialWord(text));
            }
//            tokenizeList.add(term.toString());
        }
//        System.out.println(termList);
        //进行同义词替换
        tokenizeList = replaceSynonym(tokenizeList);
        applyStopwords(tokenizeList);
        tokenizeList = removeContinuousRepeatWord(tokenizeList);
        tokenizeList = removeABAB(tokenizeList);
        return StringUtils.join(tokenizeList.iterator(), " ");
    }

    /**
     * 去掉语料中连续重复的部分。
     * @param tokenizeList
     * @return
     */
    public static List<String> removeContinuousRepeatWord(List<String> tokenizeList){
        if( tokenizeList.size() == 1 ){
            return tokenizeList;
        }
        List<String> noRepeatList = new ArrayList<>();
        for( String word: tokenizeList ){
            if( !noRepeatList.contains(word) ){
                noRepeatList.add(word);
            }else if( !noRepeatList.get(noRepeatList.size()-1).equals(word) ){
                noRepeatList.add(word);
            }
        }
        return noRepeatList;
    }

    private static List<String> removeABAB(List<String> tokenizeList){
        if( tokenizeList.size() <= 3 ){
            return tokenizeList;
        }
        List<String> resultList = new ArrayList<>();
        for(String word: tokenizeList){
            addElementRemoveABAB(resultList,word);
        }
        return resultList;
    }

    private static List<String> addElementRemoveABAB(List<String> list, String word){
        if( list.size() < 3 ){
            list.add(word);
            return list;
        }
        int i = list.size()-1;
        if( word.equals(list.get(i-1))
                && list.get(i).equals(list.get(i-2)) ){
            list.remove(i);
        }else {
            list.add(word);
        }
        return list;
    }

    public static void applyStopwords(List<String> termList)
    {
        ListIterator<String> listIterator = termList.listIterator();
        while (listIterator.hasNext())
        {
            if (stopWordsDict.contains(listIterator.next().trim())) {
                listIterator.remove();
            }
        }
    }

    public static void loadStopwordsDict(String stopwordsPath){
        if( StringUtils.isBlank(stopwordsPath) ){
            System.out.println("自定义词典为空。");
            return;
        }
        Path filePath = Paths.get(stopwordsPath);
        BufferedReader bufferReader = null;
        try {
            bufferReader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8);
            String line;
            int index = 1;
            while ((line = bufferReader.readLine()) != null) {
                if( !stopWordsDict.contains(line.trim()) ) {
                    stopWordsDict.add(line.trim());
                    index++;
                }
            }
            System.out.println("加载停用词个数："+index+", for path="+stopwordsPath);
        }catch (IOException e) {
            e.printStackTrace();
        }finally{
            if( bufferReader != null ) {
                try {
                    bufferReader.close();
                }catch (Exception e1){
                    e1.printStackTrace();
                }
            }
        }
    }

    public static void loCustomDict(String customFilePath) {
        if( StringUtils.isBlank(customFilePath) ){
            System.out.println("自定义词典为空。");
            return;
        }
        Path filePath = Paths.get(customFilePath);
        BufferedReader bufferReader = null;
        try {
            bufferReader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8);
            String line;
            int index = 1;
            while ((line = bufferReader.readLine()) != null) {
                CustomDictionary.add(line);
                index++;
            }
            System.out.println("加载自定义词个数："+index+", for path="+customFilePath);
        }catch (IOException e) {
            e.printStackTrace();
        }finally{
            if( bufferReader != null ) {
                try {
                    bufferReader.close();
                }catch (Exception e1){
                    e1.printStackTrace();
                }
            }
        }
    }

    public static void loadSynonymDict(String synonymFilePath){
        System.out.println("initial synonymMap for "+synonymFilePath);
        Path filePath = Paths.get(synonymFilePath);
        BufferedReader bufferReader = null;
        try {
            bufferReader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8);
            String line;
            Set<String> checkFile = new HashSet<>();
            while ((line = bufferReader.readLine()) != null) {
                String[] synonyms = line.split(",");
                if( !checkFile.contains(synonyms[0]) ){
                    checkFile.add(synonyms[0]);
                }else {
                    System.out.println("error similarword:"+synonyms[0]);
                }
                if( synonyms.length > 1 ){
                    for( int i = 1 ; i< synonyms.length ; i++ ){
                        synonymMap.put(synonyms[i], synonyms[0]);
                        if( !checkFile.contains(synonyms[i]) ){
                            checkFile.add(synonyms[i]);
                        }else {
                            System.out.println("error similarword:"+synonyms[i]);
                        }
                    }
                }
            }
            System.out.println("加载同义词大小："+synonymMap.size());
        }catch (IOException e) {
            e.printStackTrace();
        }finally{
            if( bufferReader != null ) {
                try {
                    bufferReader.close();
                }catch (Exception e1){
                    e1.printStackTrace();
                }
            }
        }
    }

    public static List<String> replaceSynonym(List<String> tokenizeList) {
        if( synonymMap == null || synonymMap.size() == 0 ){
            System.out.println(" 同义词没有加载。。。。");

        }
        List<String> resultList = new ArrayList<>();
        if( tokenizeList != null && tokenizeList.size() > 0 ){
            for( int i = 0 ;i < tokenizeList.size(); i++ ){
                String synonymWord = synonymMap.get(tokenizeList.get(i));
                if( synonymWord != null ){
                    resultList.add(synonymWord);
                }else {
                    resultList.add(tokenizeList.get(i));
                }
            }
        }
        return resultList;
    }

    private static void exactCustomAndWeight(String rawPath, String customFile, String weightFile, String synonymFile)
            throws Exception{
        BufferedReader bufferReader = null;
        String line = null;
        try {
            List<String> customList = new ArrayList<>();
            Map<String,List<String>> weightMap = new HashMap();
            Map<String, List<String>> synonymMap = new HashMap<>();
            bufferReader = Files.newBufferedReader(Paths.get(rawPath), StandardCharsets.UTF_8);
            while ((line = bufferReader.readLine()) != null) {
                if(StringUtils.isNotBlank(line) ) {
                    String[] splits = line.split("\t");
                    if( splits.length != 4 ){
                        System.out.println("error data:"+line);
                        continue;
                    }
//                    System.out.println(line);
                    if( !customList.contains(splits[1]) && splits[1].length() > 1 ){
                        customList.add(splits[1]);
                    }
                    if( !customList.contains(splits[2]) && splits[2].length() > 1 ){
                        customList.add(splits[2]);
                    }
                    if( !"1.00".equals(splits[3]) ){
                        if( weightMap.get(splits[3]) == null ){
                            weightMap.put(splits[3], new ArrayList<>());
                        }
                        if( !weightMap.get(splits[3]).contains(splits[2]) ){
                            weightMap.get(splits[3]).add(splits[2]);
                        }
                    }
                    if( synonymMap.get(splits[2]) == null ){
                        synonymMap.put(splits[2], new ArrayList());
                    }
                    if( !splits[2].equals(splits[1]) ) {
                        if( !synonymMap.get(splits[2]).contains(splits[1]) ) {
                            synonymMap.get(splits[2]).add(splits[1]);
                        }
                    }
                }
            }
            List<String> synonymFileList = MapUtils.mapToList(synonymMap, ",");
            List<String> weightList = MapUtils.mapToList(weightMap, ",");

            FilesUtils.saveListToFile(customList, customFile);
            FilesUtils.saveListToFile(weightList, weightFile);
            FilesUtils.saveListToFile(synonymFileList, synonymFile);
        }catch (IOException e) {
            throw new Exception("error line:"+line, e);
        }finally{
            if( bufferReader != null ) {
                try {
                    bufferReader.close();
                }catch (Exception e1){
                    e1.printStackTrace();
                }
            }
        }
    }

    /**
     * 转换分词中的特殊字符为固定字符。
     * 1：818 转换成 $数字
     * 2：7万 转换成 $金额
     * 3：三12370 转换成 $数字
     * @param text
     * @return
     */
    public static String transSpecialWord(String text){
        if( StringUtils.isBlank(text) ){
            return text;
        }
        String result = "$数字";
        for(int i = 0 ; i < text.length(); i++){
            String c = String.valueOf(text.charAt(i));
            if( AMOUNT_LIST.contains(c)){
                result = "$金额";
            }else if( !NUMBERS_LIST.contains(c)){
                result = text;
                break;
            }
        }
        return result;
    }

    /**
     * 从词项分布中生成同义词文件。
     * @param distributionFile
     * @throws Exception
     */
    private static void generateWeightFromWordDistribution(String distributionFile) throws Exception{
        BufferedReader bufferReader = null;
        String line = null;
        try {
            bufferReader = Files.newBufferedReader(Paths.get(distributionFile), StandardCharsets.UTF_8);
            List<String> list = new ArrayList<>();
            while ((line = bufferReader.readLine()) != null) {
                if( StringUtils.isBlank(line) ){
                    System.out.println("blank line , continue");
                    continue;
                }
                String[] splits = line.split(":");
                if( splits.length != 2 ){
                    System.out.println("error data:"+line);
                    continue;
                }
                if( Integer.valueOf(splits[1]) <= 4 ){
                    list.add(splits[0]);
                }
            }
            System.out.println(StringUtils.join(list,","));
        }catch (IOException e) {
            throw new Exception("error line:"+line, e);
        }finally{
            if( bufferReader != null ) {
                try {
                    bufferReader.close();
                }catch (Exception e1){
                    e1.printStackTrace();
                }
            }
        }
    }


    public static void main(String[] args) {
        String callMethod = "generateWeightFromWordDistribution";
        String collectionDirName = "lifeareanavi";
        String collectionFullPath = SolrlstmUtils.BASE_PATH+collectionDirName+"\\";
        try {
            if ("exactCustomAndWeight".equals(callMethod)) {
                String rawPath = collectionFullPath+"raw_custom_weight_data.txt";
                String customPath = collectionFullPath+"tokenize_custom_01.txt";
                String weightPath = collectionFullPath+"tokenize_weight_01.txt";
                String synonymPath = collectionFullPath+"tokenize_synonym_01.txt";
                exactCustomAndWeight(rawPath, customPath, weightPath, synonymPath);
            }else if( "tokenize".equals(callMethod) ){
                String customPath = collectionFullPath+"tokenize_custom_xx.txt";
                String synonymPath = collectionFullPath+"tokenize_synonym_xx.txt";
                String stopWordsPath = collectionFullPath+"stopwords.txt";
                loCustomDict(customPath);
                loadStopwordsDict(stopWordsPath);
                loadSynonymDict(synonymPath);
                String test = "我要找我的那个工号三12370帮我办理保险那个";
                String tokenText = tokenize(test);
                System.out.println(tokenText);
            }else if( "transSpecialWord".equals(callMethod) ) {
                String rawText = "转马";
                System.out.println(transSpecialWord(rawText));
            }else if( "generateWeightFromWordDistribution".equals(callMethod)){
                String wordsDistributionFile = collectionFullPath+"words_distribution.txt";
                generateWeightFromWordDistribution(wordsDistributionFile);
            }
        }catch (Exception e){
            e.printStackTrace();
        }
        System.out.println("done!");
    }
}
