package com.atguigu.utils;

import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;

/**
 * @className: KeywordUtil
 * @author: LinCong
 * @description:
 * @date: 2023/2/6 0:08
 * @version: 1.0
 */
public class KeywordUtil {
    public static List<String> splitKeyword(String keyword) throws IOException {

//        创建集合用于存放切分后的数据
        ArrayList<String> list = new ArrayList<>();
//        创建IK分词对象      ik_smark：true 所有字只用一次    ik_max_word：false
        StringReader reader = new StringReader(keyword);
        IKSegmenter ikSegmenter = new IKSegmenter(reader, false);

        Lexeme next = ikSegmenter.next();
        while (next!=null){
            String word = next.getLexemeText();
            list.add(word);
            next=ikSegmenter.next();
        }
        return list;
    }

    public static void main(String[] args) throws IOException {
        System.out.println(splitKeyword("尚硅谷大数据项目之Flink实时数仓"));
    }
}
