package tfidf;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.io.UnsupportedEncodingException;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.util.Version;

public class testan {

	public static String getText(String filePath) throws FileNotFoundException,
			IOException {

		InputStreamReader isReader = new InputStreamReader(new FileInputStream(
				filePath), "UTF-8");
		BufferedReader reader = new BufferedReader(isReader);
		String aline;
		StringBuilder sb = new StringBuilder();

		while ((aline = reader.readLine()) != null) {
			sb.append(aline + " ");
		}
		isReader.close();
		reader.close();
		return sb.toString();
	}

	public static String[] cutWord(String file) throws IOException {
		String cutWordResult = "";
		StringReader reader = new StringReader(getText(file));
		Analyzer analyzer = new SmartChineseAnalyzer(Version.LUCENE_36);
		TokenStream ts = analyzer.tokenStream("", reader);
		while (ts.incrementToken()) {
			CharTermAttribute ta = ts.addAttribute(CharTermAttribute.class);
			cutWordResult += ta.toString() + " ";
		}
		return cutWordResult.split(" ");
	}

	public static void main(String[] argc) throws IOException {
		for (String x : cutWord("files/test.txt")){
			System.out.println(x);
		}
//		System.out.println(cutWord("files/test.txt"));
	}
}
