package TAIC.util;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.PrintStream;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Scanner;
import java.util.TreeMap;
import java.util.Vector;

import TAIC.Google.PicDegree;
import TAIC.LM.KeyWord;

import edu.stanford.nlp.ling.Sentence;
import edu.stanford.nlp.process.DocumentPreprocessor;
import edu.stanford.nlp.tagger.maxent.MaxentTagger;

public class POSText {

	static MaxentTagger  tagger ;
	
	/**
	 * @param args
	 */
	public static void main(String[] str) {
		// TODO Auto-generated method stub
		if ( str.length < 1 ) {
			System.out.println( "please input config file");
			return; 
		}

		train( str [ 0 ] ) ;
	}
	
	
	public static void train ( String fn ) {
		List l = null ; 
		PrintStream fout = null ; // To cache processed words

		try {
			Scanner dirScanner = new Scanner ( new File ( fn ) ) ;
			while ( dirScanner.hasNext () ) {
				String dirName = dirScanner.next();
				dirName =  dirName.substring(0, dirName.indexOf('.'));
				Scanner configScanner = new Scanner ( new File ( dirName + "\\index.txt" ) ) ;

				while ( configScanner.hasNextLine() ) {   // Iterate all the document in the training set
					String filename = configScanner.nextLine().trim() ;
					System.out.println ( "Processing Document: " + dirName + "\\" + filename ) ; 
					File cacheFile = new File ( dirName + "\\" + filename + ".pos" ); 
					if ( cacheFile.exists () ) continue ; 
					fout = new PrintStream ( cacheFile ) ;
					BufferedReader fin = new BufferedReader( new FileReader( dirName + "\\" + filename ));	 // buffer for POS tagger 
					l = tokenizeText( fin );
					for ( Iterator senIt = l.listIterator () ; senIt.hasNext (); ) {   //  Iterate all the sentence in the document
						Sentence sentence = (Sentence) senIt.next() ;
						fout.println ( MaxentTagger.tagSentence(sentence).toString(false) );
					}
					fout.close() ; 
				}
				configScanner.close();
			}
		} catch ( Exception e) {
			e.printStackTrace() ; 
		}
	}
	
	static {
		try {
			tagger = new MaxentTagger ( "E:\\users\\yuqiangchen\\POS\\train-wsj-0-18.holder" ) ; 
		}
		catch ( Exception e ) {
			e.printStackTrace () ; 
		}
	}
	
	public static List tokenizeText( Reader r ) {
		DocumentPreprocessor documentPreprocessor = new DocumentPreprocessor() ; 
		return documentPreprocessor.getSentencesFromText(r);
	}	

}
