package com.zshstart.utils;

import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;

/**
 * Ik分词器 对关键词进行分割
 *
 * @author zshstart
 * @create 2021-10-09 18:10
 */
public class SplitWordUtil {
    public static List<String> splitKeyWord(String keyWord) {

        ArrayList<String> words = new ArrayList<>();

        StringReader reader = new StringReader(keyWord);

        //构建分词对象
        // false 非智能分词：细粒度输出所有可能的切分结果           (对单词尽可能的切分)
        // true  智能分词： 合并数词和量词，对分词结果进行歧义判断   (每个词只用一次)
        IKSegmenter ikSegmenter = new IKSegmenter(reader, false);

        try {
            //获取下一个分词
            Lexeme next = ikSegmenter.next();
            while (next != null) {
                String word = next.getLexemeText();
                words.add(word);
                next = ikSegmenter.next();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }

        return words;
    }

    public static void main(String[] args) {
        System.out.println(splitKeyWord("zshstart大数据项目之Flink实时项目"));
    }
}
