package ICTCLAS.I3S.AC;

import java.io.FileWriter;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.HashMap;
import java.util.Enumeration;
import Utils.SortUtils;
import Utils.POSTool;
import Utils.Printers;
import ICTCLAS.I3S.AC.Stopwords;
import JDBC.DatabaseDao;

/**
 * 分词、去停用词、统计词频
 * 
 * @author Liang Guo
 * 
 *         2013-11-26
 */
public class Test {

	public static void main(String[] srg) {

		Hashtable<String, Integer> word_count = new Hashtable<String, Integer>();
		ArrayList<String> seg = new ArrayList<String>();
		String table="style";//table name from Sinaweibo
		
		try {
			int pageSize = 1000;
			int curPage = 0;
			for (curPage = 0; curPage < 26; curPage++) {
				String sql = "select top "
						+ pageSize
						+ "* from "+table+" where (status_id not in (select top "
						+ (pageSize * curPage)
						+ " status_id from "+table+" order by status_id)"+" and category_lable=3)"+" order by status_id";
				Statement stmt = DatabaseDao.getConnection().createStatement();
				ResultSet rs = stmt.executeQuery(sql);
				while (rs.next()) {
					String content = rs.getString("content");
					// fileWriter.write(content+"\n");
					try {
						String segment = TestMain
								.testICTCLAS_ParagraphProcess(content);
						System.out.println("seg:" + segment);
						// TestMain.testICTCLAS_FileProcess();//另存分词结果

						String words[] = segment.split(" ");
						// String words[] =
						// POSTool.SegmentSentence(content).split(" ");
//						for (int i = 0; i < words.length; i++) {
//							seg.add(words[i]);
//						}
//						// 去停用词
//						for (int i = 0; i < seg.size(); i++) {
//							if (Stopwords.isStopword(seg.get(i))) {
//								seg.remove(seg.get(i));
//								i--;
//							}
//						}
//						for (int i = 0; i < seg.size(); i++) {
//							// System.out.println("Element"+i+":"+seg.get(i));
//							// 统计词频
//							
//							if (word_count.containsKey(seg.get(i)))
//								word_count.put(seg.get(i), word_count.get(seg
//										.get(i)) + 1);
//							else
//								word_count.put(seg.get(i), 1);
//						}
						for(int i=0;i<words.length;i++){
							if(!Stopwords.isStopword(words[i])){
								if(word_count.containsKey(words[i]))
									word_count.put(words[i],word_count.get(words[i])+1);
								else
									word_count.put(words[i], 1);
							}
							
						}

					} catch (Exception e) {
						e.printStackTrace();
					}
				}
				// fileWriter.flush();
				// fileWriter.close();
			}

		} catch (Exception e) {
			e.printStackTrace();
		}
		System.out.println(word_count.size());
		// Printers.htPrinters(word_count);
		
		// ArrayList<Integer> value=new ArrayList();
		// Enumeration en=word_count.keys();
		// while(en.hasMoreElements()){
		// Object obj=en.nextElement();
		// System.out.println(obj);
		// System.out.println(word_count.get(obj));
		// value.add(word_count.get(obj));
		// }
		// System.out.println(value.size());
		// System.out.println(value);
		FileWriter fileWriter;
		try {
			fileWriter = new FileWriter("D:\\"+table+"_ordinary.txt",true);
			Map.Entry[] entries = SortUtils.sortedHashtableByValue(word_count,
					-1);
			// 对value排序，-1倒序，+1正序
			for (int i = 0; i < entries.length; i++) {
				String key = (String) entries[i].getKey();
				int value = (Integer) entries[i].getValue();

				System.out.println(key + ":" + value);
				fileWriter.write(key + ":" + value + "\n");
			}
			fileWriter.flush();
			fileWriter.close();

		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

}