package com.atguigu.utils;

import lombok.val;
import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;

public class KeywordUtil {

    public static List<String> splitKeyword(String keyword) throws IOException {

        //创建集合用于存储切分后的数据
        ArrayList<String> resultList = new ArrayList<>();

        //创建IK分词器对象
        StringReader reader = new StringReader(keyword);
        IKSegmenter ikSegmenter = new IKSegmenter(reader, false);

        //切词&将词放入集合
        Lexeme next = ikSegmenter.next();

        while (next != null) {
            String word = next.getLexemeText();
            resultList.add(word);

            next = ikSegmenter.next();
        }

        //返回集合
        return resultList;
    }

    public static void main(String[] args) throws IOException {

        val list = splitKeyword("尚硅谷大数据基于Flink的实时数仓项目");
        for (String word : list) {
            System.out.println(word);
        }


    }

}
