package TAIC.LM ;

import TAIC.util.* ;
import TAIC.Google.* ;
import edu.stanford.nlp.tagger.maxent.* ;
import edu.stanford.nlp.process.* ; 
import edu.stanford.nlp.ling.Sentence ;
import java.util.regex.Pattern ;
import java.util.* ; 
import java.io.* ;

public class Bayes {
	static double Pic_Min = 0.6 ;  // Threashold for min pictotoriability
	static MaxentTagger  tagger ;
	double P_c [] ;
	double P_c_w [][] ;  // P(  w | c ) 
	double P_w_c [][] ;  // P(  c | w ) 
	int classes = 2;
	int KeyNumber ;
	TreeMap < String , int [] > map = new TreeMap < String , int [] > () ;   //  int [ classes ]  stores the id( an int ) of a new added word
	TreeMap < Integer , String > inverseMap = new TreeMap < Integer , String > () ;  // inverseMap stroing the correspondent string of a key(int)

	public Bayes ( int size ) {   
		KeyNumber = size ; 
	}
	
	static public void main ( String s [] ) {
		(new Bayes (5)).getKey ();
	}
	
	public ArrayList < KeyWord > getKey () {
		ArrayList < KeyWord > result = null ;
		int i , j ; 
 
		train () ; 		
//		for ( i = 0 ; i < classes ; i ++ ) {
//			double total = 0 ;
//			for ( j = 0 ; j < map.size() ; j ++ )
//				total += P_c_w [ i ] [ j ] ;
//			System.out.println ( "class " + i + ": " + total ) ; 
//		}
//		
//		if ( 0 == 0 ) System.exit( 0 ) ;
		
		for ( i = 0 ; i < classes ; i ++ ) 
			if ( i == 0 ) result = getKeyInClass ( i ) ;
			else {
				ArrayList < KeyWord > temp = getKeyInClass ( i ) ;
				for ( j = 0 ; j < temp.size(); j ++ ) result.add ( temp.get ( j ) ) ;
			}	
	
		try {
			PrintStream fout = new PrintStream ( new File ( "KeyWords.txt" ) );
			for ( i = 0 ; i < result.size() ; i ++ ) fout.println ( result.get ( i ).word + " " + result.get(i).prob );
		}catch ( Exception e ) {
		}

		return result ; 
	}
	
	public ArrayList < KeyWord > getKeyInClass ( int desiredClass ) {

		if ( KeyNumber > map.size () ) KeyNumber = map.size() - 1 ;
		int length = Math.min ( KeyNumber * 3 , map.size () );
		int i , j;
		String s ;
		
		Node arr [ ] = new Node [ map.size() ] ;
		for ( i = 0 ; i < map.size() ; i ++ ) 
			arr [ i ] = new Node ( i , P_c_w [ desiredClass ][ i ] ) ;
		Arrays.sort ( arr ) ;                   //  sort the arr to get a small query set
		//for (i = 0 ; i < KeyNumber ; i ++ ) System.out.println ( inverseMap.get (arr [ i ].key ));

		ThreadPool tp = new ThreadPool ( 20 ) ;
		PicDegree pd [] = new PicDegree [ length ] ; 
		for ( i = 0 ; i < length ; i ++ ) pd [ i ] = new PicDegree ( inverseMap.get ( arr[ i ].key ) ) ; 
		for ( i = 0 ; i < length ; i ++ ) tp.addThread ( pd[ i ] ) ;
		tp.join () ;
		
		for ( i = 0 ; i < length ; i ++ ) arr [ i ].max *= ( pd[ i ].getValue () > Pic_Min ? 1.0 : 0.1 ) ;
		Arrays.sort ( arr , 0 , length ) ;
		
		ArrayList < KeyWord > result = new ArrayList < KeyWord > () ; 
		ArrayList < Pattern > p = new ArrayList < Pattern > () ;
		
		for ( i = 0 ; i < length ; i ++ ) {
			s = inverseMap.get ( arr[ i ].key ) ;
			Pattern temp = Pattern.compile( ".*"+ s.replace (" " , " .*" ) + ".*" );
			for ( j = 0; j < result.size () ; j ++ ) {
				if ( p.get(j).matcher ( s ).matches() && s.indexOf ( ' ' ) != -1 ) {
					result.set( j , new KeyWord ( s , 0.0 , desiredClass ) );  
					p.set( j ,temp );
					break ;
				}
				if ( temp.matcher ( result.get(j).word ).matches () ) break ;
			}
			if ( j >= result.size () ) {
				result.add ( new KeyWord ( s , 0.0 , desiredClass ) ) ;
				p.add ( temp ) ;
			}
			if ( result.size () >= KeyNumber ) break ; 
		}

		double totalProb = 0.0 ; 
		for ( i = 0 ;i < result.size (); i ++ ) totalProb += P_c_w[desiredClass][map.get(result.get(i).word)[classes]] ;
		for ( i = 0 ; i < result.size(); i ++ ) 
			result.set ( i , new KeyWord ( result.get ( i ).word ,  
				(P_c_w[desiredClass][map.get(result.get(i).word)[classes]]/totalProb)  , desiredClass ));
		return result ; 
	}
	
	
	private void train () {
		int docs = 0 , i , j , classNo ; 
		String s ; 
		String temp, word , attr; 
		int tempClass [ ] = null ;
		int classHasWord [] = null ; 
		int arr [ ] , pos ; 
		double sum ; 
		LinkedList < String > list = new LinkedList < String > () ; 
		boolean cached = false ; 
		List l = null ; 
		Scanner scanner ;
		PrintStream fout = null ; // To cache processed words
		
		try {
			Scanner configScanner = new Scanner ( new File ( "temp.txt" ) ) ;
			classes = configScanner.nextInt () ; 
			tempClass = new int [ classes ];
			classHasWord = new int [ classes ] ; 
			P_c = new double [ classes ] ;
						
			while ( configScanner.hasNextInt() ) {   // Iterate all the document in the training set
				docs ++ ;
				System.out.println ( "Processing Document " + docs ) ; 
				classNo = configScanner.nextInt () ;
				tempClass [ classNo ] ++ ; 
				
				String filename = configScanner.nextLine().trim() ;
				File cacheFile = new File ( filename + ".pos" ); 
				if ( cacheFile.exists () ) {
					l = new Vector < String > () ;
					Scanner tempScanner = new Scanner ( cacheFile ) ; 
					while ( tempScanner.hasNextLine () )  l.add ( tempScanner.nextLine () ) ;
					cached = true ; 
				} else {
					fout = new PrintStream ( cacheFile ) ;
					BufferedReader fin = new BufferedReader( new FileReader( filename ));	 // buffer for POS tagger 
					l = tokenizeText( fin );
					cached = false ; 
				}
				
				for ( Iterator senIt = l.listIterator () ; senIt.hasNext (); ) {   //  Iterate all the sentence in the document
					if ( cached ) scanner = new Scanner ( (String) senIt.next() ) ;
					else {
						Sentence sentence = (Sentence) senIt.next() ;
						scanner = new Scanner ( MaxentTagger.tagSentence(sentence).toString(false) );
					}
//					System.out.println ( "Part of Speech of a sentence Analyze Done. " ) ;
					while ( scanner.hasNext () ) {    //   Iterate all the tagged words in a document 
						temp = scanner.next () ;
						if ( ! cached ) fout.print ( temp + " " ) ; 
						pos = temp.lastIndexOf ( "/" ) ;
						if ( pos == -1 ) continue ;
						word = changeForm ( temp.substring ( 0 , pos ).toLowerCase() ) ;
						attr = temp.substring ( pos + 1 , Math.min (pos + 3, temp.length() ) ).toLowerCase () ; 
						if ( word.length () <= 2 ) continue ;
						if ( attr.equals ( "jj" ) ) list.addFirst ( word ) ; 	
						else if ( attr.equals ( "nn" ) ) {
							s = word ; 
							addWord ( word , classNo ) ; 
							classHasWord [ classNo ] ++ ;
							for ( Iterator < String > it = list.listIterator ( 0 ) ; it.hasNext () ; ) {
								temp = it.next () ; 
								s = temp + " " + s ;
								addWord ( s , classNo ) ;
								addWord ( temp + " " + word , classNo ) ;
								classHasWord [ classNo ] += 2 ; 
							}
							list.clear () ; 
						}
					}
					if ( ! cached ) fout.println (); 
					
					if ( ! list.isEmpty () ) {
						word = list.removeLast ( ) ;
						s = word ; 
						for ( Iterator < String > it = list.listIterator ( 0 ) ; it.hasNext () ; ) {
							temp = it.next () ; 
							s = temp + " " + s ;
							addWord ( s , classNo ) ;
							addWord ( temp + " " + word , classNo ) ;
							classHasWord [ classNo ] += 2 ; 
						}
						list.clear () ; 
					}
				}
			}
			
		int vocabulary = map.size () ; 
		P_c_w = new double [ classes ][ vocabulary ] ; 
		P_w_c = new double [ vocabulary ][ classes ] ; 
		for ( i = 0 ; i < classes ; i ++ ) P_c [ i ]= ( double )( tempClass [ i ] ) / docs ; 
		for ( Iterator<String> it = map.keySet ().iterator() ; it.hasNext() ; ) {
			s = it.next () ; 
			arr = map.get ( s ) ;
			if ( arr != null )  {
				for ( i = 0 ; i < classes ; i ++ )  
					P_c_w [ i ][ arr[ classes ] ] = (double)( (map.get ( s ))[i] +  1 )/( classHasWord [ i ] + vocabulary ) ;
			}
		}
		
		//PrintStream fout = new PrintStream ( new File ( "words.txt" ) );
		for ( i = 0 ; i < vocabulary ; i ++ ) {
			sum = 1e-9 ; 
			for ( j = 0 ; j < classes ; j ++ ) {
				P_w_c [ i ][ j ] = P_c_w [ j ][ i ] * P_c [ j ] ;
				sum += P_w_c [ i ][ j ] ;
			}
			for ( j = 0 ; j < classes ; j ++ ) P_w_c [ i ][ j ] /= sum ; 
	//		fout.println ( P_w_c [ i ][ 0 ] + "|" + P_w_c [ i ][ 1 ] + "|" + inverseMap.get ( i ) ) ; 
		}
		
	//	fout.close () ; 

		}catch ( Exception e ) { 
			e.printStackTrace () ; 
		}
	}
	
	private String process ( String s ) {
		String temp ; 
		int end = s.length () , start = 0 ;
		if ( s.charAt ( 0 ) == '<' && s.charAt ( s.length() -1 ) == '>' ) return null ;
		while ( start < end && !Character.isLetterOrDigit ( s.charAt ( start ) ) ) start ++ ;
		end -- ; 
		while ( end > start && !Character.isLetterOrDigit ( s.charAt ( end ) ) ) end -- ;
		if ( end > start ) return s.substring ( start , end + 1 ).toLowerCase () ;
		return null ; 
	}
	
	private void addWord ( String s , int classNo ) {
		int [] arr = map.get ( s ) ;
		if ( arr == null ) {
			arr = new int [ classes + 1 ] ; 
			arr [ classNo ] = 1 ; 
			arr [ classes ] = map.size () ; 
			inverseMap.put ( arr[ classes ] , s ) ;
			map.put ( s , arr ) ;
		}
		else {
			arr [ classNo ] ++ ; 
		}
	}

	String changeForm ( String s ) {
		if ( s.equals ( "mars" ) ) return s; 
		if ( s.endsWith ( "ies" ) ) return (s.substring ( 0 , s.length () - 3 ) + "y" ) ;
		else if ( s.endsWith("shes") || s.endsWith("ches") || s.endsWith("ses") || s.endsWith ( "xes" )) 
				return ( s.substring(0, s.length() - 2 ) ) ;
		else if ( s.endsWith ( "s" ) ) return s.substring ( 0, s.length() -1 ) ;
		return s ;
		
	}
	

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////          Used for Part of Speech Tagger          ///////////////////////////////////////////////////	
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

	static {
		try {
			tagger = new MaxentTagger ( "model\\train-wsj-0-18.holder" ) ; 
		}
		catch ( Exception e ) {
			e.printStackTrace () ; 
		}
	}
	public static List tokenizeText( Reader r ) {
		DocumentPreprocessor documentPreprocessor = new DocumentPreprocessor() ; 
		return documentPreprocessor.getSentencesFromText(r);
	}

	
}

class Node implements Comparable {
	int key  ;
	double max ; 
	
	public Node ( int a , double b ) {
		key = a ;
		max = b ; 
	}
	
	public int compareTo ( Object o ) {
		if ( o instanceof Node ) 
			if ( max > ((Node)o).max ) return -1 ;
			else if ( max == ((Node)o).max ) return 0 ;
			else return 1; 
		else return 0;
	}
}