package com.atguigu.edu.realtime.util;

import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;

public class KeywordUtil {

    public static void main(String[] args) throws IOException {
        String fullword="大数据";
    }

    public static  List<String>  analyzeKeyword(String fullword){
        List<String> keywordList = new ArrayList<>();
        StringReader reader = new StringReader(fullword);

        IKSegmenter ikSegmenter = new IKSegmenter(reader,true);
        Lexeme lexeme=null;//词元 ，最细粒度的分词
        while (true){
            try {
                if (!((lexeme=ikSegmenter.next())!=null)) break;
            } catch (IOException e) {
                e.printStackTrace();
            }
            String keyword = lexeme.getLexemeText();
            keywordList.add(keyword);
        }
        return keywordList;
    }
}
