package sogou.sample;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.dictionary.CustomDictionary;
import com.hankcs.hanlp.seg.common.Term;
import com.smart.util.CollectionUtil;
import com.smart.util.IOUtil;


//java -jar THUCTC_java_v1_run.jar -c d:/eclipse_mars/workspace/THUCTC/type.txt -train d:/eclipse_mars/workspace/THUCTC/data -s d:/eclipse_mars/workspace/THUCTC/model

public class MergeDoc {

	public static HashSet<String> stopWords;
	
//	static {
//		try {
//			loadStopWords();
//			loadCustomDictionary();
//		} catch (Exception e) {
//			e.printStackTrace();
//		}
//	}
	
	private static void loadCustomDictionary() throws Exception {
		BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream("d:/workspace/LDA4j/data/dict/car.txt"), "utf-8"));
        String word = reader.readLine();
        while(word != null) {
    		CustomDictionary.add(word);
        	word = reader.readLine();
        }
        reader.close();
        
        reader = new BufferedReader(new InputStreamReader(new FileInputStream("d:/workspace/LDA4j/data/dict/game.txt"), "utf-8"));
        word = reader.readLine();
        while(word != null) {
    		CustomDictionary.add(word);
        	word = reader.readLine();
        }
        reader.close();
        
        reader = new BufferedReader(new InputStreamReader(new FileInputStream("d:/workspace/LDA4j/data/dict/pop.txt"), "utf-8"));
        word = reader.readLine();
        while(word != null) {
    		CustomDictionary.add(word);
        	word = reader.readLine();
        }
        reader.close();
        
        reader = new BufferedReader(new InputStreamReader(new FileInputStream("d:/workspace/LDA4j/data/dict/finance.txt"), "utf-8"));
        word = reader.readLine();
        while(word != null) {
    		CustomDictionary.add(word);
        	word = reader.readLine();
        }
        reader.close();
        
        reader = new BufferedReader(new InputStreamReader(new FileInputStream("d:/workspace/LDA4j/data/dict/life.txt"), "utf-8"));
        word = reader.readLine();
        while(word != null) {
    		CustomDictionary.add(word);
        	word = reader.readLine();
        }
        reader.close();
        
        reader = new BufferedReader(new InputStreamReader(new FileInputStream("d:/workspace/LDA4j/data/dict/bizjoin.txt"), "utf-8"));
        word = reader.readLine();
        while(word != null) {
    		CustomDictionary.add(word);
        	word = reader.readLine();
        }
        reader.close();
	}
	
	//载入停用词表，初始化时调用
	public static void loadStopWords() throws Exception {
		File file1 = new File("D:/workspace/LDA4j/data/stopwords/cn.txt");
		File file2 = new File("D:/workspace/LDA4j/data/stopwords/en.txt");
		stopWords = new HashSet<String>();
		
		BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(file1), "utf-8"));
        String word = reader.readLine();
        while(word != null) {
        	stopWords.add(word);
        	word = reader.readLine();
        }
        reader.close();
        
        reader = new BufferedReader(new InputStreamReader(new FileInputStream(file2), "utf-8"));
        word = reader.readLine();
        while(word != null) {
        	stopWords.add(word);
        	word = reader.readLine();
        }
        reader.close();
	}
	
	public static List<Term> delStopWords(List<Term> termList) throws Exception {
		Iterator<Term> itr = termList.iterator();
		// for each on list 不能 边遍历边删除，所以要用 Iterator;
		while(itr.hasNext()) {
			Term term = itr.next();
			if(stopWords.contains(term.word)) {
				itr.remove();
			}
		}
		return termList;
	}
	
	
	public static void main(String[] args) throws Exception {
		File f = new File("D:/eclipse_mars/workspace/LDA4j-hg/data/rawSample");
		File[] files = f.listFiles();
		for(int i=0; i<files.length; i++) {
			List<String> lines = IOUtil.readLines(new FileInputStream(files[i]));
			for(int j=0; j<lines.size(); j++) {
				File parent = new File("D:/eclipse_mars/workspace/THUCTC/data/" + files[i].getName()  + "/");
				if(!parent.exists()) {
					parent.mkdirs();
				}
				PrintWriter writer = new PrintWriter(new OutputStreamWriter(new FileOutputStream("D:/eclipse_mars/workspace/THUCTC/data/" + files[i].getName()  + "/"+ j + ".txt"), "UTF-8"));
				writer.println(lines.get(j));
				writer.close();
			}
		}
	}
		
	
	public static void main1(String[] args) throws Exception {
		File f = new File("d:/workspace/LDA4j/data/rawSample");
		File[] files = f.listFiles();
		for(int i=0; i<files.length; i++) {
			List<String> lines = IOUtil.readLines(new FileInputStream(files[i]));
			System.out.println(files[i].getName() + " : " + lines.size());
			List<String> single = new ArrayList<String>();
			int k = 0;
			for(int j=0; j<lines.size(); j++) {
				if(j==0 || j%30!=0) {
					single.add(lines.get(j));
				}
				else {
					single.add(lines.get(j));
					List<Term> termList  = HanLP.segment(CollectionUtil.join(single, "。"));
					termList = delStopWords(termList);
					PrintWriter writer = new PrintWriter(new OutputStreamWriter(new FileOutputStream("D:/workspace/LDA4j/data/sample/" + files[i].getName() + k + ".txt"), "UTF-8"));
					Iterator<Term> itr = termList.iterator();
					itr = termList.iterator();
					while(itr.hasNext()) {
						Term term = itr.next();
						writer.println(term.word); 
					}
					writer.close();
					
					//IOUtil.writeLines(single, new FileOutputStream("d:/sample1/" + files[i].getName() + k + ".txt"));
					single = new ArrayList<String>();
					k++;
				}
			}
		}
		
	}

}
