/**
 * 
 */
package chineseAnalyzerTest;

import static org.junit.Assert.*;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;

import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;


import chineseanalyser.CASChineseTokenizer;

/**
 * @author FuLingting
 *
 */
public class TestStandardAnalyser {

	private Tokenizer tokenizer = null;
	
	/**
	 * @throws java.lang.Exception
	 */
	@Before
	public void setUp() throws Exception {
	}

	/**
	 * @throws java.lang.Exception
	 */
	@After
	public void tearDown() throws Exception {
	}

	/**
	 * Test method for {@link org.apache.lucene.analysis.standard.StandardAnalyzer#tokenStream(java.lang.String, java.io.Reader)}.
	 */
	@Test
	public final void testTokenStream() {
		Reader reader;
		try {
			for (int i = 0; i < szFilePaths.length; i++) {

				File file = new File(szFilePaths[i]);
				reader = new FileReader(file);
				tokenizer = new StandardTokenizer(reader);
				Token reusableToken = new Token();
				while (tokenizer.next(reusableToken) != null) {
					TokenTestUtil.printTokenInfor(reusableToken);
				}
			}
			
		} catch (FileNotFoundException fnfe) {
			fnfe.printStackTrace();
		} catch (IOException ioe) {
			// TODO Auto-generated catch block
			ioe.printStackTrace();
		}
		
	}
	
	private String[]  szFilePaths = {
		"../../../test_doc/docs/raw_text_analysis/电话号码.txt",	
//		"..\\..\\..\\test_doc\\docs\\raw_text_analysis\\С256.txt",					//һ??Сڵļ
//		"..\\..\\..\\test_doc\\docs\\raw_text_analysis\\256.txt"						//ڴһ??Сļ
	};

}
