package com.zhu.utils;

import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;

/**
 * 关键词切分工具类   IK分词器
 */
public class KeyWordUtil {

    public static List<String> splitKeyword(String keyword) throws IOException {   //很多人用 直接抛出去

        //创建集合用于过滤切分后的数据
        ArrayList<String> result = new ArrayList<>();

        //创建分词对象  ik_smart  ik_maxWord
        StringReader stringReader = new StringReader(keyword);
        IKSegmenter ikSegmenter = new IKSegmenter(stringReader, false);
        //取出切好的词
        Lexeme next = ikSegmenter.next();
        while (next != null ) {
            String word = next.getLexemeText();
            result.add(word);
            next = ikSegmenter.next();
        }


        return result;
    }

    /*
    public static void main(String[] args) throws IOException {
        System.out.println(splitKeyword("大数据项目Flink实时数仓"));
    }
     */
}
