package geppetto.phraseTable;

import geppetto.cat.corpus.BilingualCorpus;
import geppetto.cat.models.M1;
import geppetto.phraseHMM.WordTrie;
import geppetto.phraseHMM.lexicalWeighting.UniformLexicalWeightingCalculator;
import geppetto.phraseHMM.phraseExtraction.GeneralPhraseExtraction;
import geppetto.phraseHMM.phraseExtraction.HistogramGlobalPrunning;
import geppetto.phraseHMM.phraseExtraction.phraseAccepter.AlwaysAccept;
import geppetto.phraseScorer.DirectionalModelPosterior;
import geppetto.phraseTable.phrase.Phrase;
import gnu.trove.TIntDoubleHashMap;
import gnu.trove.TIntObjectHashMap;

import java.io.IOException;



public class PhraseTableStats {

	
	//Calculates the mean entropy of the phrase table.
	//Each entropy is calculated as P(targerPhrase | sourcePhrase)
	// was a simple probability distribution
	public TIntDoubleHashMap entropy(PhraseTable pt){
		TIntDoubleHashMap entropyByPhrase = new TIntDoubleHashMap();
		//Normla phrases
		for(int targetLen = 0; targetLen < pt._maxTargetPhraseLen; targetLen++){
			for(int sourceLen = 0; sourceLen < pt._maxSourcePhraseLen; sourceLen++){
				TIntObjectHashMap<TIntObjectHashMap<Phrase>> phrasesByLen = pt._phraseTable[sourceLen][targetLen];
				for(int sourcePhrases = 0; sourcePhrases < phrasesByLen.size(); sourcePhrases++){
					TIntObjectHashMap<Phrase> phrases = phrasesByLen.get(phrasesByLen.keys()[sourcePhrases]);
				
					double entropy=0;
					for(int allPhrases = 0; allPhrases < phrases.size(); allPhrases++){
						Phrase p = phrases.get(phrases.keys()[allPhrases]);
						//entropy += -p._prob*Math.log(p._prob);
					}
					entropyByPhrase.put(phrasesByLen.keys()[sourcePhrases], entropy/Math.log(2));
				}
			}
			//Null Phrases
			double entropy=0;
			for(int nullPhrases = 0; nullPhrases <pt._nullPhrases[targetLen].size(); nullPhrases++){
				Phrase p = pt._nullPhrases[targetLen].get(pt._nullPhrases[targetLen].keys()[nullPhrases]);
				//entropy += -p._prob*Math.log(p._prob);
			}
			entropyByPhrase.put(-1, entropy/Math.log(2));
	
		}
		return entropyByPhrase;
	}
	
	
	public void printPhraseTableStats(PhraseTable pt){
		TIntDoubleHashMap entropy = entropy(pt);
		double mean =0;
		double variance =0;
		double min = 1000;
		double max = -1;
		int keys[] = entropy.keys();
		int size = keys.length;
		for(int i = 0; i < size; i++){
			double value = entropy.get(keys[i]);
			mean += value;
			if(value < min){
				min = value;
			}
			if(value > max){
				max = value;
			}
		}
		mean = mean/size;
		for(int i = 0; i < size; i++){
			double value = entropy.get(keys[i]);
			variance += (value - mean)*(value - mean);
		}
		variance = variance/size;		
		System.out.println(" Entropy mean " + mean + " variance " + variance + " min " + min + " max " + max);
	}
	
	public static void main(String[] args) throws IOException {
		/*String corpusDescription = args[0];
		int size = Integer.parseInt(args[1]); 
		int maxSentenceSize = Integer.parseInt(args[2]); 
		int maxSourcePhraseSize = Integer.parseInt(args[3]); 
		int maxTargetPhraseSize = Integer.parseInt(args[4]); 
		int threshold = Integer.parseInt(args[5]); 
		System.out.println("Corpus " + corpusDescription);
		System.out.println("Size " + size);
		System.out.println("Max Sentence size " + maxSentenceSize);
		System.out.println("Max Source Phrase Size " + maxSourcePhraseSize);
		System.out.println("Max Target Phrase Size " + maxTargetPhraseSize);
		System.out.println("Threshold " + threshold);
		
		
		System.out.println("Load bilingual corpus");
		BilingualCorpus corpus = BilingualCorpus.getCorpusFromFileDescription(corpusDescription, size, maxSentenceSize);
		System.out.println("Load Phrases vocab");
		WordTrie sourcePhrasesVocab = new WordTrie(maxSourcePhraseSize);
		sourcePhrasesVocab.addPhrasesAllSentences(corpus._trainSourceSentences);
		sourcePhrasesVocab.compactTrie();
		System.out.println("source vocab built with " + sourcePhrasesVocab._phraseCounter);
		WordTrie foreignPhrasesVocab = new WordTrie(maxTargetPhraseSize);
		foreignPhrasesVocab.addPhrasesAllSentences(corpus._trainForeignSentences);
		foreignPhrasesVocab.compactTrie();
		System.out.println("foreing vocab built with " + foreignPhrasesVocab._phraseCounter);
		System.out.println("Starting init of phrase table");
	
		M1 model =  new M1(corpus);
		model.train(5,false,"");
		
		PhraseTable pt = GeneralPhraseExtraction.build(corpus, BilingualCorpus.TRAIN_CORPUS,sourcePhrasesVocab, foreignPhrasesVocab, maxSourcePhraseSize, maxTargetPhraseSize, 
				new GlobalProbabilityCalculator(), new DirectionalModelPosterior(model), new AlwaysAccept(), new HistogramGlobalPrunning(0), new UniformLexicalWeightingCalculator());
		//pt.print(System.out);	
		
		PhraseTableStats pts = new PhraseTableStats();
		TIntDoubleHashMap resuls = pts.entropy(pt);
		int[] keys = resuls.keys();
		for(int i = 0; i < keys.length; i++){
			System.out.println("Source Phrase " + keys[i] + " entropy " + resuls.get(keys[i]));
		}
		pts.printPhraseTableStats(pt);
*/
	}
	
}
