package com.wistech.search.solr.function.analyzer;

import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;

import net.paoding.analysis.analyzer.PaodingAnalyzer;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer;
import org.apache.lucene.util.Version;
import org.junit.Test;
import org.wltea.analyzer.core.Lexeme;
import org.wltea.analyzer.core.IKSegmenter;

import com.chenlb.mmseg4j.analysis.ComplexAnalyzer;
import com.chenlb.mmseg4j.analysis.TokenUtils;

/**
 * IKAanlyzer分词演示
 * 
 * 
 */
public class PerformanceBench extends BaseBench {

	@Test
	public void test() throws InterruptedException {
		Thread.sleep(100);
		for (int i = 1; i < 10000; i++) {

		}
		long startTime = System.currentTimeMillis(); // 开始时间
		ikanalyzer(new StringReader(benchText));
		long endTime = System.currentTimeMillis(); // 结束时间
		System.out.println("IK分词耗时" + new Float((endTime - startTime)) / 1000
				+ "秒!");

		startTime = System.currentTimeMillis(); // 开始时间
		mmseg4j(new StringReader(benchText));
		endTime = System.currentTimeMillis(); // 结束时间
		System.out.println("mmseg4j分词耗时" + new Float((endTime - startTime))
				/ 1000 + "秒!");

		startTime = System.currentTimeMillis(); // 开始时间
		smartcn(new StringReader(benchText));
		endTime = System.currentTimeMillis(); // 结束时间
		System.out.println("smartcn分词耗时" + new Float((endTime - startTime))
				/ 1000 + "秒!");

		startTime = System.currentTimeMillis(); // 开始时间
		paoding(new StringReader(benchText));
		endTime = System.currentTimeMillis(); // 结束时间
		System.out.println("paoding分词耗时" + new Float((endTime - startTime))
				/ 1000 + "秒!");

	}

	public void ikanalyzer(Reader reader) {
		IKSegmenter analyzer = new IKSegmenter(reader, true);
		Lexeme lexeme;
		try {
			lexeme = analyzer.next();
			while (lexeme != null) {
				// System.out.println(lexeme.getLexemeText());
				lexeme = analyzer.next();
			}
		} catch (IOException e) {
		}
	}

	public void mmseg4j(Reader reader) {
		Analyzer analyzer = new ComplexAnalyzer();
		TokenStream ts = analyzer.tokenStream("", reader);
		try {
			for (Token t = new Token(); (t = TokenUtils.nextToken(ts, t)) != null;) {
				// System.out.println(t.term());
			}
		} catch (IOException e) {
		}
	}

	public void smartcn(Reader reader) {
		Analyzer analyzer = new SmartChineseAnalyzer(Version.LUCENE_31);
		TokenStream ts = analyzer.tokenStream("", reader);
		try {
			for (Token t = new Token(); (t = TokenUtils.nextToken(ts, t)) != null;) {
				// System.out.println(t.term());
			}
		} catch (IOException e) {
		}
	}

	public void paoding(Reader reader) {
		Analyzer analyzer = new PaodingAnalyzer();
		TokenStream ts = analyzer.tokenStream("", reader);
		try {
			for (Token t = new Token(); (t = TokenUtils.nextToken(ts, t)) != null;) {
				// System.out.println(t.term());
			}
		} catch (IOException e) {
		}
	}
}
