package TAIC.Classifier;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.util.Map;
import java.util.Scanner;
import java.util.Vector;

import TAIC.text.Dict;
import TAIC.text.LanguageVec;
import TAIC.text.TextBayes;
import TAIC.text.Translation;
import TAIC.text.WordInClass;
import TAIC.text.WordProb;

/**
 Estimate P(f|c), P(f|v) and then calculate KL-divergence between them 
 P(f|c)=P(f|v)P(v|w)P(w|c) 
 P(f|v)= [ n(f)+1 ]  /  [ n(v)+|f| ]
 P(v|w) = 1 
 P(w|c) = n(image of word w) / |total images|
 */
public class TextRiskMin extends Classifier {
	static int VECSIZE  ; 
	int classes = 2 , totalPic = 0 ;
	Vector< double [] > [] p_f_v ;  //  p [ classed ][ pic ][ feature ]  
	double [][] center ; 
	int n_w_c [][] ;
	static LanguageVec lang ; 
	
	public static void main ( String str [] )  {
		TextRiskMin rm = new TextRiskMin ( "german.txt") ;  
		rm.train ( "trainset") ;
		System.out.println( rm.test ( "testset" )); 
	}

	public TextRiskMin () {
		this ( "german.txt") ;
	}

	public TextRiskMin ( String LangFile ) {
		lang = new LanguageVec ( LangFile ) ;
		VECSIZE = lang.size()  ;  
	}
	
	
	public void train ( String trainFile ) {
		int keys = 0 ; 
		
		center = new double [ classes ][ VECSIZE ] ; 
		try {			
			Scanner scanner ;
			if ( ! isPipe() )  scanner = new Scanner ( new File ( trainFile ) ) ;
			else {
				scanner = new Scanner ( new ByteArrayInputStream( trainPipe ) ) ;
			}

			int len = 0 ; 
			p_f_v = new Vector [ classes ] ;
			for ( int i = 0; i < classes ; i ++ ) p_f_v[ i ] = new Vector < double []> () ;
			double [] p = new double [ VECSIZE ] ;
			while ( scanner.hasNext() ) {
				len ++ ; 
				String str = scanner.next () ;
				int className = translateClass ( str );
				
				get_f( scanner.nextLine (), p ) ;
				p_f_v [ className ].add( p );
				for ( int k = 0 ; k < VECSIZE ; k ++ ) 
					center [ className ][ k ] += p[ k ]; 
			}
			
		}
		catch ( Exception e ) {
			e.printStackTrace() ;
		}
		
		for ( int i = 0 ; i < classes ;i ++ )  divergence.normalize( center [ i ]) ;
	}
	
	public void addInAuxilary ( TextBayes model , Dict dict , double lambda ) {
	//	double [][] oldCenter = new double [ classes ][ VECSIZE ]  ;
//		System.out.println ( lambda ) ;
		lambda *= lambda ; 
		lambda *= lambda ; 
		for ( int i = 0 ; i < classes ; i ++ )
			for ( int j = 0 ; j < VECSIZE ; j ++ ) {
//				oldCenter [ i ][ j ] = center [ i ][ j ] ;
				center [ i ][ j ] *= lambda ; 
			}
		
		for ( Map.Entry < Integer, WordInClass > iter : model.wordCount.entrySet() ){
			int oriKey = iter.getKey() ; 
			double [] oriProb = iter.getValue().prob  ;
			Translation tr = dict.getTranslation(oriKey) ;
			if ( tr == null ) continue ; 
			for ( int i = 0 ; i < classes ; i++ )
			for ( WordProb word : tr.vec ) 
				center [ i ][ word.vec ] += oriProb [ i ] * word.prob ; 
		}
		for ( int i = 0 ; i < classes ;i ++ ) divergence.normalize( center [ i ]) ;
	}
	
	
	public double test ( String testFile ) {
		int total = 0 , correct = 0 ; 
		try { 
			Scanner scanner ;
			if ( ! isPipe() )  scanner = new Scanner ( new File ( testFile ) ) ;
			else {
				scanner = new Scanner ( new  ByteArrayInputStream ( testPipe ) ) ;
			}	
			while ( scanner.hasNext() ) {
//				System.out.println ( "need time ?") ;
				String str = scanner.next ();
				int thisClass = translateClass ( str ) ;
				double p [ ] = new double [ VECSIZE ] ;
				double to1 = 0.0 ; 
				get_f ( scanner.nextLine() , p ) ;
				int judge = findMin ( p ) ; 
				if ( judge == thisClass ) correct ++ ;
				total ++ ;
			}
		}
		catch ( Exception e ) {
			e.printStackTrace()  ; 
		}		
		if ( total == 0 ) return 0.0 ;
		else return (double)correct/ total ; 
	}

	private int findMin ( double [] p ) {
		double min = 1e5 ; 
		int best = 0 ; 

		for ( int i = 0 ; i < classes ; i ++ ) {
			double temp = divergence.cale( center [ i ] , p ) ;
			//System.out.print ( temp + " " ) ; 
//			System.out.println ( Cosine.cale( center[ i ], p) + " " 
//					+ KL.cale( center[ i ], p) + " " 
//					+ Pearson.cale( center[ i ], p) ) ;
			if ( temp < min ) {
				min = temp ; 
				best = i ;
			}
		}
//		System.out.println ( )  ;
		//System.out.println ("\t\t" + best ); 
		return best ; 
	}
		
	private void get_f ( String buffer , double [] p) {
		Scanner line = new Scanner ( buffer ) ;
		double total = 0.0 ;
		for ( int i = 0 ; i < VECSIZE ; i ++ ) p [ i ] = 0 ; 
		while ( line.hasNext() ) {
			String str = line.next() ;
			int vecNo = Integer.valueOf( str.substring(0, str.indexOf(':'))) ;
			int value = Integer.valueOf( str.substring(str.indexOf(':') + 1 ) );					
			p [ vecNo ] += value  ;
			total += value ;
		}
		divergence.normalize( p ) ; 		
	}
	
	protected int translateClass ( String str ) {
		if ( str.charAt( 0 ) == '0' ) return 0 ;
		else if ( str.charAt( 0 ) == '1' ) return 1;
		else return -1 ; 
	}
	
	public boolean isPipe() {
		return true  ; 
	}
	
	static Divergence divergence = cFact.getDivergence( cConfig.get( "divergence")) ; 
	static Divergence Cosine = cFact.getDivergence( "TAIC.Classifier.Cosine") ; 
	static Divergence KL = cFact.getDivergence( "TAIC.Classifier.KL_Divergence" ) ; 
	static Divergence Pearson = cFact.getDivergence( "TAIC.Classifier.Pearson" ) ; 
	//new Cosine () ; // new KL_Divergence () ;
}
