package com.atguigu.utils;

import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;

public class KeywordUtil {

    public static List<String> splitKeyword(String keyword) throws IOException {

        //创建集合用于存放切分后的词
        ArrayList<String> words = new ArrayList<>();

        //创建IK分词对象  ik_smart  ik_max_word
        IKSegmenter ikSegmenter = new IKSegmenter(new StringReader(keyword), false);

        //循环取出切分后的词放入集合
        Lexeme lexeme = ikSegmenter.next();

        while (lexeme != null) {
            String word = lexeme.getLexemeText();
            words.add(word);
            lexeme = ikSegmenter.next();
        }

        //返回集合
        return words;
    }

    public static void main(String[] args) throws IOException {

        System.out.println(splitKeyword("尚硅谷大数据之Flink实时数仓项目"));

    }

}
