package cn.gmy;

import java.io.IOException;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.CharTermAttributeImpl;
import org.junit.Test;
import org.wltea.analyzer.lucene.IKAnalyzer;


public class Demo02_Lucene {

	@Test
	public void test1() throws IOException {
		Analyzer analyzer=new IKAnalyzer();
		String content="我们都是学习大数据的人";
		TokenStream tStream=analyzer.tokenStream("IKAnalysis", content);
		//为token设置属性类
		CharTermAttribute termAttribute=tStream.addAttribute(CharTermAttribute.class);
		//强转
		CharTermAttributeImpl c=(CharTermAttributeImpl) termAttribute;
		
		//重新设置
		tStream.reset();
		while(tStream.incrementToken()) {
			System.out.println(termAttribute.toString());
		}
	}
	
	@Test
	public void test2() {
//		JiebaSegmenter jb=new JiebaSegmenter();	
		
	}
	
	
}
