package geppetto.lexical;

import java.io.PrintStream;
import java.math.BigDecimal;
import java.util.Arrays;


import geppetto.cat.alignments.Alignment;
import geppetto.cat.alignments.AlignmentsSet;
import geppetto.cat.common.Pair;
import geppetto.cat.corpus.BilingualCorpus;
import geppetto.phraseHMM.WordTrie;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.ExtractedPhrasePair;
import geppetto.phraseScorer.AbstractPhraseScorer;
import geppetto.phraseScorer.One;
import geppetto.phraseTable.PhraseTable;
import gnu.trove.TIntDoubleHashMap;
import gnu.trove.TIntIntHashMap;
import gnu.trove.TIntObjectHashMap;
import gnu.trove.TObjectDoubleHashMap;

public class LexicalT {
	public static int NULL = -1;
	public BilingualCorpus _corpus;
	public TIntDoubleHashMap _sourceWordSums;
	public TIntDoubleHashMap _targetWordSums;
	public TIntObjectHashMap<TIntDoubleHashMap> _wordTranslationSF;
	public TIntObjectHashMap<TIntDoubleHashMap> _wordTranslationFS;
	public AbstractPhraseScorer _scorer = new One();

	public LexicalT(BilingualCorpus corpus) {
		super();
		_corpus = corpus;
	}
	
	public LexicalT(BilingualCorpus corpus, AbstractPhraseScorer scorer) {
		super();
		_corpus = corpus;
		_scorer = scorer;
	}
	
	public void init(byte sentenceSource, final AlignmentsSet alignments){
		_sourceWordSums = new TIntDoubleHashMap();
		_targetWordSums = new TIntDoubleHashMap();
		_wordTranslationSF = new TIntObjectHashMap<TIntDoubleHashMap>();
		_wordTranslationFS = new TIntObjectHashMap<TIntDoubleHashMap>();
		
		for (int i = 0; i<_corpus.getNumSentences(sentenceSource); i++){
			Alignment alignment = alignments.get(i);
			int[] foreingSentence = _corpus.getForeignSentence(i,sentenceSource);
			int[] sourceSentence = _corpus.getSourceSentence(i,sentenceSource);
			boolean[] hasAlignmentT = new boolean[foreingSentence.length];
			Arrays.fill(hasAlignmentT, false);
			for(int s = 0; s < sourceSentence.length; s++){
				int sourceWordId = sourceSentence[s];
				boolean hasAlignmentS = false;
				for(int f = 0; f < foreingSentence.length; f++){					
					if(alignment.hasPosition(s, f)){
						hasAlignmentS = true;
						hasAlignmentT[f] = true;
						int targetWordId = foreingSentence[f];
						double score = _scorer.getPhraseScore(new ExtractedPhrasePair(new int[]{sourceWordId}, new int[]{targetWordId}, i, sentenceSource, foreingSentence, sourceSentence,s,s,f,f));
						incrementTranslationTable(_wordTranslationSF, sourceWordId, targetWordId, score);
						incrementTranslationTable(_wordTranslationFS, targetWordId, sourceWordId, score);
						incrementWordCount(_sourceWordSums, sourceWordId, score);
						incrementWordCount(_targetWordSums, targetWordId, score);
					}
				}
				if (!hasAlignmentS){
					double score = _scorer.getPhraseScore(new ExtractedPhrasePair(new int[]{sourceWordId}, new int[]{}, i, sentenceSource, foreingSentence, sourceSentence,s,s,-1,-1));
					incrementTranslationTable(_wordTranslationSF, sourceWordId, NULL, score);
					incrementTranslationTable(_wordTranslationFS, NULL, sourceWordId, score);
					incrementWordCount(_sourceWordSums, sourceWordId, score);
					incrementWordCount(_targetWordSums, NULL, score);
				}
			}
			
			for(int f = 0; f < hasAlignmentT.length; f++){
				if(!hasAlignmentT[f]){
					int targetWordId = foreingSentence[f];
					double score = _scorer.getPhraseScore(new ExtractedPhrasePair(new int[]{}, new int[]{targetWordId}, i, sentenceSource, foreingSentence, sourceSentence,-1,-1,f,f));
					incrementTranslationTable(_wordTranslationSF, NULL, foreingSentence[f],score);
					incrementTranslationTable(_wordTranslationFS, foreingSentence[f], NULL,score);
					incrementWordCount(_targetWordSums, foreingSentence[f],score);					
					incrementWordCount(_sourceWordSums, NULL,score);
				}
			}
		}
		_sourceWordSums.compact();
		_targetWordSums.compact();
		_wordTranslationFS.compact();
		_wordTranslationSF.compact();
		for(TIntDoubleHashMap hash : _wordTranslationFS.getValues(new TIntDoubleHashMap[0])){
			hash.compact();
		}
		for(TIntDoubleHashMap hash : _wordTranslationSF.getValues(new TIntDoubleHashMap[0])){
			hash.compact();
		}
	}
	
	private static void incrementWordCount(TIntDoubleHashMap counter, int wordId, double score){
		if(!counter.contains(wordId)){
			counter.put(wordId, 0);
		}
		counter.put(wordId, counter.get(wordId)+score);		
	}
	
	private static double getWordCount(TIntDoubleHashMap counter, int wordId){
		if(counter.contains(wordId)){
			return counter.get(wordId);
		}
		return 0;
	}

	private static void incrementTranslationTable(TIntObjectHashMap<TIntDoubleHashMap> table,int sourceId, int targetId, double score){
		if(!table.contains(sourceId)){
			table.put(sourceId, new TIntDoubleHashMap());
		}
		incrementWordCount(table.get(sourceId), targetId, score);
	}
	
	private static TIntDoubleHashMap getTranslationTableEntry(TIntObjectHashMap<TIntDoubleHashMap> table, int sourceId){
		if(table.contains(sourceId)){
			return table.get(sourceId);
		}
		return new TIntDoubleHashMap();
	}
	
	public void printTableSF(BilingualCorpus corpus, PrintStream out){
		printTable(corpus, _wordTranslationSF, _targetWordSums,out);
	}
	
	public void printTableFS(BilingualCorpus corpus, PrintStream out){
		printTable(corpus.reverse(), _wordTranslationFS, _sourceWordSums,out);
	}
	
	public static void printTable(BilingualCorpus corpus, TIntObjectHashMap<TIntDoubleHashMap> table, TIntDoubleHashMap sums,PrintStream out){
		for (int sourceId : table.keys()){
			TIntDoubleHashMap targets = getTranslationTableEntry(table, sourceId);
			for (int targetId : targets.keys()){
				double totalCount =  getWordCount(sums, targetId);
				double translationCount = getWordCount(targets, targetId);
				printLine(out, corpus, sourceId, targetId, (double) translationCount / (double) totalCount);
			}
		}
	}
	
	public double getProbabilitySF(int sourceId, int targetId){
		return getProbability(sourceId, targetId, _wordTranslationSF, _targetWordSums);
	}
	
	public double getProbabilityFS(int targetId, int sourceId){
		return getProbability(targetId, sourceId, _wordTranslationFS, _sourceWordSums);
	}
	
	public double getProbability(int sourceId, int targetId, TIntObjectHashMap<TIntDoubleHashMap> table, TIntDoubleHashMap sums){
		TIntDoubleHashMap targets = getTranslationTableEntry(table, sourceId);
		double totalCount =  getWordCount(sums, targetId);
		double translationCount = getWordCount(targets, targetId);
		return translationCount / totalCount;
	}
	
	private static void printLine(PrintStream out, BilingualCorpus corpus, int sourceId, int foreignId, double probability){
		out.println(getSourceWordFromCorpus(corpus,sourceId) + " " + getForeignWordFromCorpus(corpus, foreignId) + " " + getProbabilityInMosesFormat(probability));
		
	}
	
	private static String getSourceWordFromCorpus(BilingualCorpus corpus, int sourceId){
		if(sourceId == NULL){
			return "null";
		}
		return corpus.getSourceWordById(sourceId);
	}
	
	private static String getForeignWordFromCorpus(BilingualCorpus corpus, int targetId){
		if(targetId == NULL){
			return "null";
		}
		return corpus.getForeignWordById(targetId);
	}
	
	private static String getProbabilityInMosesFormat(double prob){
		BigDecimal bd = new BigDecimal(prob);
		bd = bd.setScale(7, BigDecimal.ROUND_HALF_UP);
		return bd.toString();
	}
	
	public void reverse(){
		TIntDoubleHashMap temp = _sourceWordSums;
		_sourceWordSums = _targetWordSums;
		_targetWordSums = temp;
		
		TIntObjectHashMap<TIntDoubleHashMap> temp2 = _wordTranslationSF;
		_wordTranslationSF = _wordTranslationFS;
		_wordTranslationFS = temp2;		
	}
}
