package com.shrio.lucene;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.util.Version;
import org.wltea.analyzer.lucene.IKAnalyzer;

import java.io.IOException;
import java.io.StringReader;

/**
 * Created by luojie on 2018/4/24.
 */
public class ChineseAnalyerDemo {
    /**standardAnalyer分析器 ，Lucene内置中文分析器*/
    public void standardAnalyer(String msg){
        StandardAnalyzer analyzer = new StandardAnalyzer(Version.LUCENE_4_10_4);
        this.getTokens(analyzer, msg);
    }


    /**IK Analyzer分析器*/
    public void iKanalyer(String msg){
        IKAnalyzer analyzer = new IKAnalyzer(true);//当为true时，分词器进行最大词长切分
        //IKAnalyzer analyzer = new IKAnalyzer();
        this.getTokens(analyzer, msg);
    }

    private void getTokens(Analyzer analyzer, String msg) {
        try {
            TokenStream tokenStream=analyzer.tokenStream("content", new StringReader(msg));
            tokenStream.reset();
            this.printTokens(analyzer.getClass().getSimpleName(),tokenStream);
            tokenStream.end();
        } catch (IOException e) {
            e.printStackTrace();
        }

    }

    private void printTokens(String analyzerType,TokenStream tokenStream){
        CharTermAttribute ta = tokenStream.addAttribute(CharTermAttribute.class);
        StringBuffer result =new StringBuffer();
        try {
            while(tokenStream.incrementToken()){
                if(result.length()>0){
                    result.append(",");
                }
                result.append("["+ta.toString()+"]");
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
        System.out.println(analyzerType+"->"+result.toString());
    }
}
