package com.danan.realtime.util;

import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

import java.io.IOException;
import java.io.StringReader;
import java.util.HashSet;
import java.util.Set;

/**
 * @author NanHuang
 * @Date 2023/1/26
 */
public class WordUtil {
    public static Set<String> splitWord(String keywords) {
        IKSegmenter ikSegmenter = new IKSegmenter(new StringReader(keywords), true);
        HashSet<String> result = new HashSet<>();
        // 获取第一条分词结果
        try {
            Lexeme flag = ikSegmenter.next();
            while (flag != null){
                // 获取分词结果的文本值
                String keyword = flag.getLexemeText();
                result.add(keyword);
                // 获取下一条分词结果
                flag = ikSegmenter.next();
            }
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
        return result;

    }
}
