package TAIC.LM ;

import TAIC.util.* ;
import TAIC.Google.* ;
import edu.stanford.nlp.tagger.maxent.* ;
import edu.stanford.nlp.process.* ; 
import edu.stanford.nlp.ling.Sentence ;
import java.util.regex.Pattern ;
import java.util.* ; 
import java.io.* ;

public class BayesInDoc {
	static double Pic_Min = 0.4 ;  // Threashold for min pictotoriability
	static MaxentTagger  tagger ;
	double IG [] ; 
	double P_c [] ;
	double P_w [] ; 
	double P_c_w [][] ;  // P(  w | c )
	double P_w_c [][] ;  // P(  c | w ) 
	double NP_w_c [][] ; //  P ( c | not w )  
	int classes = 2;
	int KeyNumber ;
	TreeMap < String , int [] > map = new TreeMap < String , int [] > () ;   //  int [ classes ]  stores the id( an int ) of a new added word
	TreeMap < Integer , String > inverseMap = new TreeMap < Integer , String > () ;  // inverseMap stroing the correspondent string of a key(int)

	public BayesInDoc ( int size ) {   
		KeyNumber = size ; 
	}
	
//	static public void main ( String s [] ) {
//		(new BayesInDoc (5)).getKey ();
//	}
	
	public ArrayList < KeyWord > getKey () {
		ArrayList < KeyWord > result = null ;
		int i , j ; 
 
		train () ; 		
		caleIG () ;
		
		for ( i = 0 ; i < classes ; i ++ ) 
			if ( i == 0 ) result = getKeyInClass ( i ) ;
			else {
				ArrayList < KeyWord > temp = getKeyInClass ( i ) ;
				for ( j = 0 ; j < temp.size(); j ++ ) result.add ( temp.get ( j ) ) ;
			}	
	
		try {
			PrintStream fout = new PrintStream ( new File ( "KeyWords.txt" ) );
			for ( i = 0 ; i < result.size() ; i ++ ) fout.println ( result.get(i).classNo+ " " + result.get ( i ).word + " " + result.get(i).prob );
		}catch ( Exception e ) {
			e.printStackTrace () ;
		}

		return result ; 
	}
	
	public ArrayList < KeyWord > getKeyInClass ( int desiredClass ) {

		if ( KeyNumber > map.size () ) KeyNumber = map.size() - 1 ;
		int length = Math.min ( KeyNumber * 8  , map.size () );
		int i , j;
		String s ;
		
		//System.out.println( IG[ map.get("tire")[classes] ] ) ;
		Node arr [ ] = new Node [ map.size() ] ;
		for ( i = 0 ; i < map.size() ; i ++ ) 
			arr [ i ] = new Node ( i , P_c_w [ desiredClass ][ i ], IG[ i ] ) ;
		
		Arrays.sort ( arr ) ;                   //  sort the arr to get a small query set
		//for (i = 0 ; i < KeyNumber ; i ++ ) System.out.println ( inverseMap.get (arr [ i ].key ));
		ThreadPool tp = new ThreadPool ( 20 ) ;
		PicDegree pd [] = new PicDegree [ length ] ; 
		for ( i = 0 ; i < length ; i ++ ) pd [ i ] = new PicDegree ( inverseMap.get ( arr[ i ].key ) ) ; 
		for ( i = 0 ; i < length ; i ++ ) tp.addThread ( pd[ i ] ) ;
		tp.join () ;
		
		for ( i = 0 ; i < length ; i ++ ) arr [ i ].IG *= ( pd[ i ].getValue () > Pic_Min ? 1.0 : 10.0 ) ;
		Arrays.sort ( arr , 0 , length ) ;
		
		ArrayList < KeyWord > result = new ArrayList < KeyWord > () ; 
		
		for ( i = 0 ; i < length ; i ++ ) System.out.println( inverseMap.get( arr [ i ].key ) + " " + arr [ i ].IG );
		
		for ( i = 0 ; i < length ; i ++ ) {
			if ( belongsTo ( arr [ i ].key ) ==desiredClass )
				result.add ( new KeyWord ( inverseMap.get(arr[i].key) , arr[ i ].prob , desiredClass ) ) ;
			if ( result.size () >= KeyNumber ) break ; 
		}
		
		for ( i = 0 ;i < result.size() ; i ++ ) {
			for ( j = i + 1 ; j < result.size(); j ++ )
				if ( result.get(i).prob < result.get(j).prob ) {
					KeyWord kw = result.get(i );
					result.set(i , result.get(j));
					result.set(j, kw) ;
				}
		}

		return result ; 
	}
	
	
	private void train () {
		int docs = 0 , i , j , classNo , totalWords ; 
		String s ; 
		String temp, word , attr; 
		int tempClass [ ] = null ;
		int classHasWord [] = null ; 
		int arr [ ] , pos ; 
		double sum , nsum; 
		boolean cached = false ; 
		List l = null ; 
		Scanner scanner ;
		PrintStream fout = null ; // To cache processed words
		
		totalWords = 0 ; 
		try {
			Scanner configScanner = new Scanner ( new File ( "temp.txt" ) ) ;
			classes = configScanner.nextInt () ; 
			tempClass = new int [ classes ];
			classHasWord = new int [ classes ] ; 
			P_c = new double [ classes ] ;
						
			while ( configScanner.hasNextInt() ) {   // Iterate all the document in the training set
				docs ++ ;
				System.out.println ( "Processing Document " + docs ) ; 
				classNo = configScanner.nextInt () ;
				tempClass [ classNo ] ++ ; 
				
				String filename = configScanner.nextLine().trim() ;
				File cacheFile = new File ( filename + ".pos" ); 
				if ( cacheFile.exists () ) {
					l = new Vector < String > () ;
					Scanner tempScanner = new Scanner ( cacheFile ) ; 
					while ( tempScanner.hasNextLine () )  l.add ( tempScanner.nextLine () ) ;
					
					cached = true ; 
				} else {
					fout = new PrintStream ( cacheFile ) ;
					BufferedReader fin = new BufferedReader( new FileReader( filename ));	 // buffer for POS tagger 
					l = tokenizeText( fin );
					cached = false ; 
				}
				
				for ( Iterator senIt = l.listIterator () ; senIt.hasNext (); ) {   //  Iterate all the sentence in the document
					if ( cached ) scanner = new Scanner ( (String) senIt.next() ) ;
					else {
						Sentence sentence = (Sentence) senIt.next() ;
						scanner = new Scanner ( MaxentTagger.tagSentence(sentence).toString(false) );
					}
					while ( scanner.hasNext () ) {    //   Iterate all the tagged words in a document 
						temp = scanner.next () ;
						if ( ! cached ) fout.print ( temp + " " ) ; 
						pos = temp.lastIndexOf ( "/" ) ;
						if ( pos == -1 ) continue ;
						word = changeForm ( temp.substring ( 0 , pos ).toLowerCase() ) ;
						if ( word.equals("cat") ||  word.equals("kitten" ) || word.equals("display" )) continue ;  
						attr = temp.substring ( pos + 1 , Math.min (pos + 3, temp.length() ) ).toLowerCase () ; 
						if ( word.length () <= 2 ) continue ;
						if ( attr.equals ( "jj" ) || attr.equals ( "nn" ) ) {
							s = word ; 
							addWord ( word , classNo ) ; 
							classHasWord [ classNo ] ++ ;
							totalWords ++ ; 
						}
					}
					if ( ! cached ) fout.println (); 
				}
			}
			
		int vocabulary = map.size () ; 
		P_w = new double [ vocabulary ] ;
		P_c_w = new double [ classes ][ vocabulary ] ; 
		P_w_c = new double [ vocabulary ][ classes ] ; 
		NP_w_c = new double [ vocabulary ][ classes ] ;
		IG = new double [ vocabulary ] ; 
		for ( i = 0 ; i < classes ; i ++ ) P_c [ i ]= ( double )( tempClass [ i ] ) / docs ; 
		for ( Iterator<String> it = map.keySet ().iterator() ; it.hasNext() ; ) {
			s = it.next () ; 
			arr = map.get ( s ) ;
			if ( arr != null )  {
				for ( i = 0 ; i < classes ; i ++ )  
					P_c_w [ i ][ arr[ classes ] ] = (double)( (map.get ( s ))[i] +  1 )/( classHasWord [ i ] + vocabulary ) ;
			}
		}
		
		for ( i = 0 ; i < vocabulary ; i ++ ) {
			int [] number = map.get( inverseMap.get(i));
			int total = 0 ; 
			for ( j = 0 ; j < classes ; j ++)  total += number [ j ] ; 
			P_w [ i ] = (double)( total + 1 ) / ( vocabulary + totalWords ) ;
		}
		
		//PrintStream fout = new PrintStream ( new File ( "words.txt" ) );
		for ( i = 0 ; i < vocabulary ; i ++ ) {
			sum = 1e-9 ; 
			nsum = 1e-9 ; 
			for ( j = 0 ; j < classes ; j ++ ) {
				P_w_c [ i ][ j ] = P_c_w [ j ][ i ] * P_c [ j ] / P_w [ i ];
				NP_w_c [ i ][ j ] = ( 1 - P_c_w [ j ][ i ] ) * P_c [ j ] / (1 - P_w[ i ] ); 
				sum += P_w_c [ i ][ j ] ;
				nsum += NP_w_c[ i ][ j ] ;
			}
			for ( j = 0 ; j < classes ; j ++ ) {
				P_w_c [ i ][ j ] /= sum ;
				NP_w_c [ i ][ j ] /= nsum ; 
			}
	//		fout.println ( P_w_c [ i ][ 0 ] + "|" + P_w_c [ i ][ 1 ] + "|" + inverseMap.get ( i ) ) ; 
		}
		
	//	fout.close () ; 

		}catch ( Exception e ) { 
			e.printStackTrace () ; 
		}
	}
	
	private String process ( String s ) {
		String temp ; 
		int end = s.length () , start = 0 ;
		if ( s.charAt ( 0 ) == '<' && s.charAt ( s.length() -1 ) == '>' ) return null ;
		while ( start < end && !Character.isLetterOrDigit ( s.charAt ( start ) ) ) start ++ ;
		end -- ; 
		while ( end > start && !Character.isLetterOrDigit ( s.charAt ( end ) ) ) end -- ;
		if ( end > start ) return s.substring ( start , end + 1 ).toLowerCase () ;
		return null ; 
	}
	
	
	void caleIG () {
		for ( int i = 0  ; i < map.size() ; i ++ ) {
			double t1 = 0 ;
			double t2 = 0 ; 
			for ( int j = 0 ; j < classes ; j ++ ) {
				t1 += P_w_c [i][j] * Math.log( P_w_c [i][j]) ;
				t2 += NP_w_c [i][j] * Math.log( NP_w_c [i][j]) ;
			}
			t1 *= P_w [ i ];
			t2 *= ( 1 - P_w [ i ]) ;
			IG [ i ] = t1 + t2 ; 
		}
	}
	
	int belongsTo ( int word ){
		int result = 0 ;
		for ( int i = 1 ; i < classes ;i ++  ) 
			if ( P_w_c [ word ][ i ] > P_w_c [ word ][ result ]) result = i; 
		return result ;
	}
	
	private void addWord ( String s , int classNo ) {
		int [] arr = map.get ( s ) ;
		if ( arr == null ) {
			arr = new int [ classes + 1 ] ; 
			arr [ classNo ] = 1 ; 
			arr [ classes ] = map.size () ; 
			inverseMap.put ( arr[ classes ] , s ) ;
			map.put ( s , arr ) ;
		}
		else {
			arr [ classNo ] ++ ; 
		}
	}

	String changeForm ( String s ) {		
		if ( s.indexOf( "://" ) != -1 || s.indexOf( "www." ) != -1 || s.indexOf( "<" ) != -1 || s.indexOf( ">" ) != -1 ) 
			return ("http" + random.nextInt ( 100000 ) ) ;
		if ( s.equals ( "mars" ) || s.equals( "chess") || s.equals( "jesus") || s.equals( "glass")
				|| s.equals ( "swiss" ) ) return s; 
		if ( s.equals( "horse") || s.equals( "horses")) return "horse" ;
		if ( s.endsWith ( "ies" ) ) return (s.substring ( 0 , s.length () - 3 ) + "y" ) ;
		else if ( s.endsWith("shes") || s.endsWith("ches") || s.endsWith("ses") || s.endsWith ( "xes" )) 
				return ( s.substring(0, s.length() - 2 ) ) ;
		else if ( s.endsWith ( "s" ) ) return s.substring ( 0, s.length() -1 ) ;
		return s ;
		
	}
	

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////          Used for Part of Speech Tagger          ///////////////////////////////////////////////////	
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

	static {
		try {
			tagger = new MaxentTagger ( "model\\train-wsj-0-18.holder" ) ; 
		}
		catch ( Exception e ) {
			e.printStackTrace () ; 
		}
	}
	public static List tokenizeText( Reader r ) {
		DocumentPreprocessor documentPreprocessor = new DocumentPreprocessor() ; 
		return documentPreprocessor.getSentencesFromText(r);
	}

	static Random random = new Random () ; 
	
}

class Node implements Comparable {
	int key  ;
	double prob ; 
	double IG = 0 ; 
	
	public Node ( int a , double b ) {
		key = a ;
		prob = b ; 
	}
	
	public Node ( int a , double b , double IG ){
		key =a ;
		prob = b ;
		this.IG = IG ; 
	}
	
	public int compareTo ( Object o ) {
		if ( o instanceof Node ) 
			if ( IG > ((Node)o).IG ) return -1 ;
			else if ( IG == ((Node)o).IG ) return 0 ;
			else return 1; 
		else return 0;
	}
	
}