package com.zhihuishu;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.seg.common.Term;
import com.hankcs.hanlp.tokenizer.StandardTokenizer;
import com.zhihuishu.domain.HanlpDto;

import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

/**
 * @Author: wangjinliang
 * @Date: 2019/5/30 16:09
 */
public class HanlpDemo {
    private static final Map<String, Integer> extMap = new ConcurrentHashMap<>();

    /**
     * 需要排除的词性
     */
    static {
        extMap.put("", -1);
    }

    public static void main(String[] args) throws Exception {
        String filePath1 = "E:\\workplace\\aliyunlog\\jieba\\1.txt";
        String filePath2 = "E:\\workplace\\aliyunlog\\jieba\\2.txt";
        String filePath3 = "E:\\workplace\\aliyunlog\\jieba\\3.txt";
        String filePath4 = "E:\\workplace\\aliyunlog\\jieba\\4.txt";

        List<String> originList = new ArrayList<>();
        originList.addAll(Utils.getListByFile(filePath1));
        originList.addAll(Utils.getListByFile(filePath2));
        originList.addAll(Utils.getListByFile(filePath3));
        originList.addAll(Utils.getListByFile(filePath4));
        dealMessageContent(originList);
    }

    private static void dealMessageContent(List<String> contents) {
        Map<String, HanlpDto> resultMap = new HashMap<>();
        Map<String, Set<String>> wordMap = new HashMap<>();
        contents.forEach(item -> {
            // 默认分词器
//            List<Term> segmentList = HanLP.segment(item);

            // 标准分词器
            List<Term> segmentList = StandardTokenizer.segment(item);

            segmentList.forEach(term -> {
                if (extMap.get(term.nature.toString()) == null) {
                    HanlpDto temp = resultMap.get(term.word);
                    if (temp != null) {
                        temp.setCount(temp.getCount() + 1);
                        resultMap.put(term.word, temp);
                    } else {
                        HanlpDto dto = new HanlpDto();
                        dto.setCount(1);
                        dto.setMessage(term);
                        resultMap.put(term.word, dto);
                    }

                    Set<String> wordList = wordMap.get(term.nature.toString());
                    if (wordList != null) {
                        wordList.add(term.word);
                        wordMap.put(term.nature.toString(), wordList);
                    } else {
                        wordList = new TreeSet<>();
                        wordList.add(term.word);
                        wordMap.put(term.nature.toString(), wordList);
                    }
                }
            });
        });
        List<HanlpDto> resultList = new ArrayList<>(resultMap.size());
        resultList.addAll(resultMap.values());
        resultList.sort(Comparator.comparing(HanlpDto::getCount).reversed());
//        printResult(resultList);
        printResultV2(wordMap);
    }

    private static void printResultV2(Map<String, Set<String>> wordMap) {
        wordMap.entrySet().forEach(item -> {
            String key = item.getKey();
            Set<String> values = item.getValue();
            System.out.println(key + "  " + values);
        });
    }

    private static void printResult(List<HanlpDto> resultList) {
        resultList.forEach(dto -> {
            System.out.println(dto.getMessage().word + "  " + dto.getMessage().nature.toString());
        });
        System.out.println("================================");
    }
}
