package geppetto.cat.alignments.phrasal;

import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.TreeMap;

import model.distribution.AbstractMultinomial;
import model.distribution.Multinomial;
import model.distribution.SparseMultinomial;

import geppetto.cat.corpus.BilingualCorpus;
import geppetto.phraseHMM.WordTrie;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.ExtractedPhrasePair;
import geppetto.phraseTable.phrase.Phrase;
import gnu.trove.TIntArrayList;
import gnu.trove.TIntDoubleHashMap;
import gnu.trove.TIntObjectHashMap;

public class PhraseTranslationTable {
	public TIntObjectHashMap<TIntDoubleHashMap> phraseScores;
	public TIntObjectHashMap<TIntDoubleHashMap> phraseCountTable;
	public TIntDoubleHashMap nullTranslationScores;
	public TIntDoubleHashMap nullTranslationCount;
	public double initialScore = 0.01;

	BilingualCorpus corpus;
	WordTrie sourceTrie;
	WordTrie targetTrie;
	int sourceMaxSize;
	int targetMaxSize;

	public PhraseTranslationTable(BilingualCorpus corpus, WordTrie sourceTrie,
			WordTrie targetTrie, int sourceMaxSize, int targetMaxSize) {
		super();
		this.corpus = corpus;
		this.sourceTrie = sourceTrie;
		this.targetTrie = targetTrie;
		this.sourceMaxSize = sourceMaxSize;
		this.targetMaxSize = targetMaxSize;
		phraseScores = new TIntObjectHashMap<TIntDoubleHashMap>();
		nullTranslationScores = new TIntDoubleHashMap();
		phraseCountTable = new TIntObjectHashMap<TIntDoubleHashMap>();
		nullTranslationCount = new TIntDoubleHashMap();
	}

	public double getEmission(int source, int target){
		if(!phraseScores.contains(source) || !phraseScores.get(source).contains(target)){
			insertPhraseScore(source, target, initialScore);
		}
		return phraseScores.get(source).get(target);
	}

	public double getEmission(int target){
		if(!nullTranslationScores.contains(target)){
			insertPhraseScore(target, initialScore);
		}
		return nullTranslationScores.get(target);
	}

	public void insertPhraseScore(int source, int target, double score){
		if(!phraseScores.contains(source)){
			phraseScores.put(source, new TIntDoubleHashMap());			
		}
		TIntDoubleHashMap targets = phraseScores.get(source);
		targets.put(target, score);
	}

	public void insertPhraseScore(int target, double score){
		nullTranslationScores.put(target, score);
	}

	public void loadFromPhraseTableFile(String filename) throws IOException{
		FileInputStream fstream = new FileInputStream(filename);
		DataInputStream in = new DataInputStream(fstream);
		BufferedReader br = new BufferedReader(new InputStreamReader(in,"UTF8"));
		String strLine;
		while ((strLine = br.readLine()) != null)   {
			String[] args = strLine.split(" \\|\\|\\| ");
			String source = args[0];
			String target = args[1];
			String features = args[2];
			int sourceSize = source.split("\\s+").length;
			int targetSize = target.split("\\s+").length;
			if(sourceSize > sourceMaxSize || targetSize > targetMaxSize){
				continue;
			}
			insertPhraseScore(getSourceId(source), getTargetId(target), getFeaturesScore(features));
		}
		br.close();
	}
	
	public void loadFromNullPhraseTable(String filename) throws IOException{
		FileInputStream fstream = new FileInputStream(filename);
		DataInputStream in = new DataInputStream(fstream);
		BufferedReader br = new BufferedReader(new InputStreamReader(in,"UTF8"));
		String strLine;
		while ((strLine = br.readLine()) != null)   {
			String[] args = strLine.split(" \\|\\|\\| ");
			String target = args[0];
			String features = args[1];
			int targetSize = target.split("\\s+").length;
			if(targetSize > targetMaxSize){
				continue;
			}
			insertPhraseScore(getTargetId(target), getFeaturesScore(features));
		}
		br.close();
	}

	public void loadFromPhraseTableFileReverse(String filename) throws IOException{
		FileInputStream fstream = new FileInputStream(filename);
		DataInputStream in = new DataInputStream(fstream);
		BufferedReader br = new BufferedReader(new InputStreamReader(in,"UTF8"));
		String strLine;
		while ((strLine = br.readLine()) != null)   {
			String[] args = strLine.split(" \\|\\|\\| ");
			String source = args[1];
			String target = args[0];
			String features = args[2];
			int sourceSize = source.split("\\s+").length;
			int targetSize = target.split("\\s+").length;
			if(sourceSize > sourceMaxSize || targetSize > targetMaxSize){
				continue;
			}
			insertPhraseScore(getSourceId(source), getTargetId(target), getFeaturesScore(features));
		}
		br.close();		
	}

	public int getSourceId(String source){
		String[] words = source.split("\\s+");
		int[] wordIds = new int[words.length];
		for(int i = 0; i < words.length; i++){
			wordIds[i] = corpus.getSourceWordIndex(words[i]);
		}
		return sourceTrie.getPhraseId(wordIds);
	}

	public int getTargetId(String target){
		String[] words = target.split("\\s+");
		int[] wordIds = new int[words.length];
		for(int i = 0; i < words.length; i++){
			wordIds[i] = corpus.getForeignWordIndex(words[i]);
		}
		return targetTrie.getPhraseId(wordIds);
	}

	public double getFeaturesScore(String features){
		String[] featuresArray = features.split("\\s+");
		return Double.parseDouble(featuresArray[0]);
	}

	public double getFeaturesScoreRev(String features){
		String[] featuresArray = features.split("\\s+");
		return Double.parseDouble(featuresArray[2]);		
	}

	public int getTableSize() {
		int n = 0;
		for(int source : phraseScores.keys()){
			n+= phraseScores.get(source).size();
		}
		return n;
	}

	public double getExpectedCount(int source, int target){
		if(!phraseCountTable.contains(source) || !phraseCountTable.get(source).contains(target)){
			addToCountTableEntry(source, target, 0);
		}
		return phraseCountTable.get(source).get(target);
	}

	public double getExpectedCount(int target){
		if(!nullTranslationCount.contains(target)){
			addToCountTableEntry(target, initialScore);
		}
		return nullTranslationCount.get(target);
	}

	public void addToCountTableEntry(int source, int target, double score){
		if(!phraseCountTable.contains(source)){
			phraseCountTable.put(source, new TIntDoubleHashMap());			
		}
		TIntDoubleHashMap targets = phraseCountTable.get(source);
		if(!targets.contains(target)){
			targets.put(target, 0);
		}
		double previousScore = targets.get(target);
		targets.put(target, previousScore + score);
	}
	
	public void addToCountTableEntry(int[] source, int[] target, double score){
		addToCountTableEntry(sourceTrie.getPhraseId(source), targetTrie.getPhraseId(target), score);
	} 

	public void addToCountTableEntry(int target, double score){
		if(!nullTranslationCount.contains(target)){
			nullTranslationCount.put(target, 0);
		}
		double previousScore = nullTranslationCount.get(target);
		nullTranslationCount.put(target, previousScore + score);
	}
	
	public void addToCountTableEntry(int target[], double score){
		addToCountTableEntry(targetTrie.getPhraseId(target), score);

	}

	public void clearCountTables(){
		nullTranslationCount = new TIntDoubleHashMap();
		phraseCountTable = new TIntObjectHashMap<TIntDoubleHashMap>();
	}

	public void updatePhraseTableScores(){
		int[] sources = phraseCountTable.keys();
		int numberOfVariables = sources.length + 1; //sources + null translations
		int null_word_index = sources.length;
		TIntArrayList[] pattern = new TIntArrayList[numberOfVariables];
		for(int i = 0; i < sources.length; i++){
			pattern[i] = new TIntArrayList();
			pattern[i].add(phraseCountTable.get(sources[i]).keys());
		}
		pattern[null_word_index] = new TIntArrayList();
		pattern[null_word_index].add(nullTranslationCount.keys());

		SparseMultinomial multinomial = new SparseMultinomial(numberOfVariables, numberOfVariables, pattern);
		for(int i = 0; i < sources.length; i++){
			TIntDoubleHashMap targets = phraseCountTable.get(sources[i]);
			for(int t : targets.keys()){
				multinomial.setCounts(i, t, targets.get(t));				
			}
		}
		for(int t : nullTranslationCount.keys()){
			multinomial.setCounts(null_word_index, t, nullTranslationCount.get(t));
		}

		normalize(multinomial);

		phraseScores = new TIntObjectHashMap<TIntDoubleHashMap>();
		for(int i = 0; i < sources.length; i++){
			TIntDoubleHashMap targets = phraseCountTable.get(sources[i]);
			for(int t : targets.keys()){
				insertPhraseScore(sources[i] , t, multinomial.getCounts(i, t));
			}
		}

		nullTranslationScores = new TIntDoubleHashMap();
		for(int t : nullTranslationCount.keys()){
			insertPhraseScore(t, multinomial.getCounts(null_word_index, t));
		}

		/*phraseScores = new TIntObjectHashMap<TIntDoubleHashMap>();
		nullTranslationScores = new TIntDoubleHashMap();
		for(int s : phraseCountTable.keys()){
			TIntDoubleHashMap targets = phraseCountTable.get(s);
			double sourceSum = 0;
			for(int t : targets.keys()){
				sourceSum += targets.get(t);
			}
			for(int t : targets.keys()){
				double prob = targets.get(t) / sourceSum;
				if(Double.isNaN(prob)){
					System.err.println("found nan when updating phrase table sourceSum = " + sourceSum);
				}
				insertPhraseScore(s, t, prob);
			}			
		}

		double nullSum = 0;
		for(int t : nullTranslationCount.keys()){
			nullSum+=nullTranslationCount.get(t);
		}
		for(int t : nullTranslationCount.keys()){
			insertPhraseScore(t, nullTranslationCount.get(t) / nullSum);
		}
		 */
	}

	protected void normalize(AbstractMultinomial multinomial){
		multinomial.normalize();
	}

	public TIntObjectHashMap<TIntDoubleHashMap> getPhraseScores() {
		return phraseScores;
	}

	public void setPhraseScores(TIntObjectHashMap<TIntDoubleHashMap> phraseScores) {
		this.phraseScores = phraseScores;
	}

	public TIntDoubleHashMap getNullTranslationScores() {
		return nullTranslationScores;
	}

	public void setNullTranslationScores(TIntDoubleHashMap nullTranslationScores) {
		this.nullTranslationScores = nullTranslationScores;
	}

	public void printExtractionTable(PrintStream phrasePairStream,
			PrintStream nullPhraseStream,
			ExtractedPhrasePair[] extractedPhrasesFromSentenceInEstep, int fertility) {
		PhraseTranslationTable emptyTable = new PhraseTranslationTable(corpus, sourceTrie, targetTrie, sourceMaxSize, targetMaxSize);
		for(ExtractedPhrasePair extractedPP : extractedPhrasesFromSentenceInEstep){
			if(extractedPP.isNullPhrase()){
				emptyTable.addToCountTableEntry(extractedPP.getTargetPhrase(), extractedPP.getScore());
			}
			else{
				emptyTable.addToCountTableEntry(extractedPP.getSourcePhrase(), extractedPP.getTargetPhrase(), extractedPP.getScore());
			}
		}
		
		final TIntObjectHashMap<TIntDoubleHashMap> phrasePairCounts = emptyTable.phraseCountTable;
		for(int s : phrasePairCounts.keys()){
			final TIntDoubleHashMap targets = phrasePairCounts.get(s);
			TreeMap<Integer,Double> sorted_map = new TreeMap<Integer, Double>(new Comparator<Integer>(){
				@Override
				public int compare(Integer arg0, Integer arg1) {
					return Double.compare(targets.get(arg1), targets.get(arg0));
				}
			});
			int[] targetKeys = targets.keys();
			double[] targetValues = targets.getValues();
			for(int i = 0; i < targetKeys.length; i++){
				sorted_map.put(targetKeys[i], targetValues[i]);
			}
			int numberOfOptions = 0;
			for(Integer t : sorted_map.keySet()){
				phrasePairStream.println(corpus.getSourceString(sourceTrie.getPhraseIndexesById(s)) + " ||| " + corpus.getForeignString(targetTrie.getPhraseIndexesById(t)) + " ||| " + sorted_map.get(t));	
				numberOfOptions++;
				if(numberOfOptions>=fertility){
					break;
				}
			}
		}

		TIntDoubleHashMap nullCounts = emptyTable.nullTranslationCount;
		for(int t : nullCounts.keys()){
			nullPhraseStream.println(corpus.getForeignString(targetTrie.getPhraseIndexesById(t)) + " ||| " + nullCounts.get(t));			
		}
	}

	public void printExtractionTable(PrintStream phrasePairStream,
			PrintStream nullPhraseStream, int fertility) {		
		final TIntObjectHashMap<TIntDoubleHashMap> phrasePairCounts = phraseCountTable;
		for(int s : phrasePairCounts.keys()){
			final TIntDoubleHashMap targets = phrasePairCounts.get(s);
			TreeMap<Integer,Double> sorted_map = new TreeMap<Integer, Double>(new Comparator<Integer>(){
				@Override
				public int compare(Integer arg0, Integer arg1) {
					return Double.compare(targets.get(arg1), targets.get(arg0));
				}
			});
			int[] targetKeys = targets.keys();
			double[] targetValues = targets.getValues();
			for(int i = 0; i < targetKeys.length; i++){
				sorted_map.put(targetKeys[i], targetValues[i]);
			}
			int numberOfOptions = 0;
			for(Integer t : sorted_map.keySet()){
				phrasePairStream.println(corpus.getSourceString(sourceTrie.getPhraseIndexesById(s)) + " ||| " + corpus.getForeignString(targetTrie.getPhraseIndexesById(t)) + " ||| " + sorted_map.get(t));	
				numberOfOptions++;
				if(numberOfOptions>=fertility){
					break;
				}
			}
		}

		TIntDoubleHashMap nullCounts = nullTranslationCount;
		for(int t : nullCounts.keys()){
			nullPhraseStream.println(corpus.getForeignString(targetTrie.getPhraseIndexesById(t)) + " ||| " + nullCounts.get(t));			
		}
	}

	public void loadCountsFromExtraction(String phraseExtractionFile, String nullPhraseExtractionFile) throws IOException {
		BufferedReader phraseExtractionReader = new BufferedReader(new InputStreamReader(new FileInputStream(phraseExtractionFile), "UTF8"));
		while(phraseExtractionReader.ready()){
			String line = phraseExtractionReader.readLine();
			String[] lineArray = line.split("\\s+\\|\\|\\|\\s+");
			String source = lineArray[0];
			String target = lineArray[1];
			Double posteriorCounts = Double.parseDouble(lineArray[2]);
			int[] sourceIds = corpus.getSourceIds(source.split("\\s+"));
			int[] targetIds = corpus.getTargetIds(target.split("\\s+"));
			addToCountTableEntry(sourceIds, targetIds, posteriorCounts);
		}
		
		BufferedReader nullPhraseExtractionReader = new BufferedReader(new InputStreamReader(new FileInputStream(nullPhraseExtractionFile), "UTF8"));
		while(nullPhraseExtractionReader.ready()){
			String line = nullPhraseExtractionReader.readLine();
			String[] lineArray = line.split("\\s+\\|\\|\\|\\s+");
			String target = lineArray[0];
			Double posteriorCounts = Double.parseDouble(lineArray[1]);
			int[] targetIds = corpus.getTargetIds(target.split("\\s+"));
			addToCountTableEntry(targetIds, posteriorCounts);
		}
	}

	public void printPhraseTable(PrintStream out, PrintStream nullPhraseOutput) {
		int[] sources = phraseScores.keys();
		for(int s : sources){
			TIntDoubleHashMap directTargetPhrases = phraseScores.get(s); 
			int[] targets = directTargetPhrases.keys();
			for(int t : targets){
				double score = directTargetPhrases.get(t);
				if(score > initialScore){
					out.println(corpus.getSourceString(sourceTrie.getPhraseIndexesById(s)) + " ||| " + corpus.getForeignString(targetTrie.getPhraseIndexesById(t)) + " ||| " + score);
				}
			}
		}
		int[] targets = nullTranslationScores.keys();
		for(int t : targets){
			double score = nullTranslationScores.get(t);
			if(score > initialScore){
				nullPhraseOutput.println(corpus.getForeignString(targetTrie.getPhraseIndexesById(t)) + " ||| " + score);
			}
		}
	}
}

	