package com.atguigu.realtime.util;

import java.io.IOException;
import java.io.StringReader;
import java.util.HashSet;
import java.util.Set;
import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

public class IkUtil {
    public static Set<String> split(String s){
        Set<String> set = new HashSet<>();
        // 内存流 字符串转成内存流(不是文件流)
        StringReader reader = new StringReader(s);
        IKSegmenter segmenter = new IKSegmenter(reader,true);
        Lexeme next = null;
        try {
            next = segmenter.next();

            while (next != null) {
                //取出分出的词语
                String word = next.getLexemeText();
                //添词
                set.add(word);
                //更新next
                next= segmenter.next();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }

        return set;
    }

    public static void main(String[] args) {
        System.out.println(split("我是中国人"));
    }
}
