package com.yswy.common.utils.iksegement;

import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;

/**
 * 分词器
 * @author yanwei
 * @since 2024/1/11 11:52
 */
public class IkAnalyzerUtil {

    /**
     *  分词
     * @param keyword 需要分词的文本
     * @since date
     */
    public static List<String> splitKeyWord(String keyword) {
        ArrayList<String> result = new ArrayList<>();
        try {
            // 创建一个reader对象
            StringReader reader = new StringReader(keyword);
            // 创建一个分词对象
            IKSegmenter ikSegmenter = new IKSegmenter(reader, false);
            Lexeme next = ikSegmenter.next();
            while ( next != null ) {
                // 获取分词的结果
                result.add(next.getLexemeText());
                next = ikSegmenter.next();
            }
        }catch (Exception e){
            e.printStackTrace();
        }
        return result;
    }

}
