package geppetto.cat.alignments.phrasal;

import java.io.PrintStream;
import java.util.ArrayList;

import geppetto.cat.alignments.Alignment;
import geppetto.cat.alignments.AlignmentsSet;
import geppetto.cat.constrains.ConstrainedProjectionStats;
import geppetto.cat.corpus.BilingualCorpus;
import geppetto.cat.models.AbstractModel;
import geppetto.cat.models.DistortionTable;
import geppetto.cat.models.stats.EStepStats;
import geppetto.phraseHMM.DoubleDistortionTable;
import geppetto.phraseHMM.WordTrie;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.ExtractedPhrasePair;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.SentenceInfo;
import geppetto.phraseHMM.phraseExtraction.phraseAccepter.AbstractPhraseAccepter;
import geppetto.phraseHMM.phraseExtraction.phraseAccepter.AlwaysAccept;
import geppetto.phraseHMM.phraseExtraction.phraseAccepter.NoSurePointConflictAccepter;
import geppetto.phraseHMM.phraseExtraction.sentencePair.PhrasePairCandidate;
import geppetto.phraseHMM.phraseExtraction.sentencePair.SizeBasedPhrasePairCandidateExtractor;
import geppetto.phraseTable.phrase.Phrase;
import geppetto.reordering.graph.GraphWeightScorer;
import geppetto.reordering.graph.ReorderingGraph;
import geppetto.reordering.graph.ReorderingGraphEdge;

public class PhrasalHmmModel extends AbstractModel{	
	
	WordTrie sourceTrie;
	WordTrie targetTrie;
	AlignmentsSet alignmentSet;
	PhraseTranslationTable translationTable;
	PhraseDistortionTable distortionTable;
	int sourceMaxSize;
	int targetMaxSize;
	
	//estep variables
	ExtractedPhrasePair[] extractedPhrasesFromSentenceInEstep;
	ReorderingGraphEdge[] edgesFromEstep;

	public PhrasalHmmModel(BilingualCorpus corpus, WordTrie sourceTrie,
			WordTrie targetTrie, AlignmentsSet alignmentSet, PhraseTranslationTable translationTable, PhraseDistortionTable distortionTable, int sourceMaxSize, int targetMaxSize) {
		super(corpus);
		this.sourceTrie = sourceTrie;
		this.targetTrie = targetTrie;
		this.alignmentSet = alignmentSet;
		this.translationTable = translationTable;
		this.distortionTable = distortionTable;
		this.sourceMaxSize = sourceMaxSize;
		this.targetMaxSize = targetMaxSize;
	}

	@Override
	public EStepStats eStep() {
		clearTables();
		EStepStats stats = createModelStats();
		byte sentenceSource = BilingualCorpus.TRAIN_CORPUS;
		int numberOfSentences = _corpus.getNumSentences(sentenceSource);
		for (int i = 0; i < numberOfSentences; i++) {
			System.err.println("processing sentencce " + i);
			generatePosteriors(i, sentenceSource);
			updateExpectedCounts();
		}
		return stats;
	}
	
	public void clearTables(){
		translationTable.clearCountTables();
		distortionTable.clearCountTables();
	}
	
	public void generatePosteriors(int sentenceNumber, byte sentenceSource){
		int[] foreingSentence = _corpus.getForeignSentence(sentenceNumber,sentenceSource);
		int[] sourceSentence = _corpus.getSourceSentence(sentenceNumber,sentenceSource);			
		PhrasePairCandidate[] candidatePairs = new SizeBasedPhrasePairCandidateExtractor(sourceMaxSize, targetMaxSize).extractSentencePairs(foreingSentence, sourceSentence, sentenceNumber);
		ArrayList<ExtractedPhrasePair> extractedPhrasePairs = new ArrayList<ExtractedPhrasePair>();
		for(PhrasePairCandidate candidatePair : candidatePairs){
			int foreingSize = candidatePair.getTargetEndIndex() - candidatePair.getTargetStartIndex() + 1;
			int sourceSize = candidatePair.getSourceEndIndex() - candidatePair.getSourceStartIndex() + 1;
			int[] foreing = new int[foreingSize];
			int[] source = new int[sourceSize];
			for (int f = 0; f < foreingSize; f++) {
				foreing[f] = foreingSentence[candidatePair.getTargetStartIndex() + f];
			}
			for (int s = 0; s < sourceSize; s++) {
				source[s] = sourceSentence[candidatePair.getSourceStartIndex() + s];
			}

			//Get phrase
			ExtractedPhrasePair epp = new ExtractedPhrasePair(source, foreing, sentenceNumber, sentenceSource, foreingSentence, sourceSentence,candidatePair.getSourceStartIndex(), candidatePair.getSourceEndIndex(), candidatePair.getTargetStartIndex(), candidatePair.getTargetEndIndex());
			AbstractPhraseAccepter accepter = new NoSurePointConflictAccepter(alignmentSet);
//			AbstractPhraseAccepter accepter = new AlwaysAccept();
			if(accepter.acceptPhrase(epp)){
				epp.setScore(translationTable.getEmission(sourceTrie.getPhraseId(source), targetTrie.getPhraseId(foreing)));
				//add alignmentInformation
				extractedPhrasePairs.add(epp);
			}
		}
		ExtractedPhrasePair[] nullPhrasePairs = getNullTranslationPhrasePairs(foreingSentence, sourceSentence, sentenceNumber, 1);
		for(ExtractedPhrasePair nullPhrase : nullPhrasePairs){
			nullPhrase.setScore(translationTable.getEmission(targetTrie.getPhraseId(nullPhrase.getTargetPhrase())));
			extractedPhrasePairs.add(nullPhrase);				
		}
		if(extractedPhrasePairs.size() == 0){
			extractedPhrasesFromSentenceInEstep = new ExtractedPhrasePair[0];
			edgesFromEstep = new ReorderingGraphEdge[0];
			return; // no point calculating posteriors if no phrase pairs are extracted
		}
		ExtractedPhrasePair[] extractedPhrasePairsArray = extractedPhrasePairs.toArray(new ExtractedPhrasePair[0]);
		SentenceInfo info = new SentenceInfo(sentenceNumber, foreingSentence, sourceSentence, extractedPhrasePairsArray);
		ReorderingGraph graph = new ReorderingGraph(info);
		graph.setScorer(new GraphWeightScorer(){
			@Override
			public double scoreNode(ExtractedPhrasePair node) {
				if(ReorderingGraph.isStartNode(node) || ReorderingGraph.isEndNode(node)){
					return 1;
				}
				return node.getScore();
			}
			@Override
			public double scoreNextArc(ExtractedPhrasePair from,
					ExtractedPhrasePair to) {
				double potential = getEdgePotential(new ReorderingGraphEdge(from, to));
				//System.out.println(getEdgePotential(new ReorderingGraphEdge(from, to)) + " " + from.isNullPhrase() + " " + to.isEndNode());					
				return potential;
			}
			@Override
			public double scorePrevArc(ExtractedPhrasePair from,
					ExtractedPhrasePair to) {
				double potential = getEdgePotential(new ReorderingGraphEdge(to, from));
				return potential;
			}
		});
		graph.generateWeightedGraphPaths();
		
		double normalizer = graph.getPathsToNode(graph.getEndNode());
		for(ExtractedPhrasePair extractedPP : extractedPhrasePairsArray){
			double alpha = graph.getPathsToNode(extractedPP);
			double beta = graph.getPathsFromNode(extractedPP);
			double score = alpha * beta / (extractedPP.getScore() * normalizer);
			extractedPP.setScore(score);			
		}
		//System.out.println("norm alpha = " + graph.getPathsToNode(graph.getEndNode())); 	
		//System.out.println("norm beta = " + graph.getPathsFromNode(graph.getStartNode())); 	
		extractedPhrasesFromSentenceInEstep = extractedPhrasePairsArray;
		edgesFromEstep=graph.getAllEdges();
		for(ReorderingGraphEdge edge : edgesFromEstep){
			double alpha = graph.getPathsToNode(edge.getFromNode());
			double beta = graph.getPathsFromNode(edge.getToNode());
			double score = alpha * getEdgePotential(edge) * beta / normalizer;
			edge.setPotential(score);
		//	distortionTable.addTransitionExpectedCount(edge, score);
		}
	}
	
	public void updateExpectedCounts(){
		for(ExtractedPhrasePair extractedPP : extractedPhrasesFromSentenceInEstep){
			if(Double.isNaN(extractedPP.getScore()) || Double.isInfinite(extractedPP.getScore())){
				System.err.println("detected state Nan in sentence " + extractedPP.getSentenceNumber()); 
			}
			if(extractedPP.isNullPhrase()){
				if(extractedPP.getScore() != 0){
					translationTable.addToCountTableEntry(targetTrie.getPhraseId(extractedPP.getTargetPhrase()), extractedPP.getScore());
				}
			}
			else{
				if(extractedPP.getScore() == 0){
					System.err.println("detected 0 in sentence " + extractedPP.getSentenceNumber() + " phrase: " + Phrase.getSourceString(_corpus, extractedPP.getSourcePhrase()) + "("+ sourceTrie.getPhraseId(extractedPP.getSourcePhrase())+ ")" + "->" + Phrase.getTargetString(_corpus, extractedPP.getTargetPhrase()) + "(" + targetTrie.getPhraseId(extractedPP.getTargetPhrase()) + ")");
				}
				else{
					translationTable.addToCountTableEntry(sourceTrie.getPhraseId(extractedPP.getSourcePhrase()), targetTrie.getPhraseId(extractedPP.getTargetPhrase()), extractedPP.getScore());
				}
			}
		}
		for(ReorderingGraphEdge edge : edgesFromEstep){
			if(Double.isNaN(edge.getPotential()) || Double.isInfinite(edge.getPotential())){
				System.err.println("detected edge Nan in sentence " + edge.getToNode().getSentenceNumber()); 
			}			
			distortionTable.addTransitionExpectedCount(edge, edge.getPotential());
		}
	}
	
	public void printPhrasePairExpectations(PrintStream phrasePairStream, PrintStream nullPhraseStream){
		translationTable.printExtractionTable(phrasePairStream, nullPhraseStream, 10);
	}
	
	public void printReorderingExpectations(PrintStream reorderingStream){
		distortionTable.printExtractionTable(reorderingStream);
	}
	
	public double getEdgePotential(ReorderingGraphEdge edge){
		return distortionTable.getTransition(edge);
	}
	
	@Override
	public void finalizeTrain() {

	}
	
	public void printExpectactions(PrintStream out) {
		for(ExtractedPhrasePair p : extractedPhrasesFromSentenceInEstep){
			out.println(Phrase.getSourceString(_corpus,p.getSourcePhrase()) + " " +
					Phrase.getTargetString(_corpus, p.getTargetPhrase()) + " " +
					p.getScore());
		}		
	}

	@Override
	public String getName() {
		return "phrasal hmm alignments";
	}

	@Override
	public double getNullPhrasePosterior(int sentenceNumber,
			byte sentenceSource, int[] foreingSentence, int[] sourceSentence,
			int startForeignIndex, int endForeignIndex) {
		// TODO Auto-generated method stub
		return 0;
	}

	@Override
	public double getPhrasePosterior(int sentenceNumber, byte sentenceSource,
			int[] foreingSentence, int[] sourceSentence, int startSourceIndex,
			int endSourceIndex, int startForeignIndex, int endForeignIndex) {
		// TODO Auto-generated method stub
		return 0;
	}

	@Override
	public void initializeTrain() {
		// TODO Auto-generated method stub
		
	}

	@Override
	public void mStep() {
		translationTable.updatePhraseTableScores();
		distortionTable.updateDistortionTableScores();
	}

	@Override
	public Alignment posteriorDecodingAlignment(int sentenceNumber,
			byte sentenceSource, float treshhold, boolean projectPosteriors,
			ConstrainedProjectionStats stats) {
		// TODO Auto-generated method stub
		return null;
	}

	@Override
	public Alignment viterbiAlignment(int sentenceNumber, byte sentenceSource,
			boolean projectPosteriors, ConstrainedProjectionStats stats) {
		// TODO Auto-generated method stub
		return null;
	}
	
	public EStepStats createModelStats(){
		return new EStepStats();
	}
	
	public ExtractedPhrasePair[] getNullTranslationPhrasePairs(int[] foreingSentence,
			int[] sourceSentence, int sentenceNumber, int maxPhraseSize){
		int tSize = foreingSentence.length;
		int sSize = sourceSentence.length;
		ArrayList<ExtractedPhrasePair> nullPhrases = new ArrayList<ExtractedPhrasePair>();
		Alignment al = alignmentSet.get(sentenceNumber);
		for (int targetStartPos = 0; targetStartPos < tSize; targetStartPos++){
			boolean hasSourceAlignment = false;
			for(int sourceStartPos = 0; sourceStartPos < sSize; sourceStartPos++){
				if(al.isSure(sourceStartPos, targetStartPos)){				
					hasSourceAlignment = true;				
				}
			}
			if(!hasSourceAlignment){
				for(int sourceStartPos = 0; sourceStartPos < sSize; sourceStartPos++){
					nullPhrases.add(new ExtractedPhrasePair(new int[]{foreingSentence[targetStartPos]}, sentenceNumber, BilingualCorpus.TRAIN_CORPUS, sourceSentence, sourceStartPos + sSize, sourceStartPos + sSize,targetStartPos, targetStartPos));
				}
			}
		}
		return nullPhrases.toArray(new ExtractedPhrasePair[0]);
	}
	
	public void printPhraseTable(PrintStream output, PrintStream nullPhrasesOutput){
		translationTable.printPhraseTable(output, nullPhrasesOutput);
	}
	
	public void printReorderingTable(PrintStream output){
		distortionTable.printReorderingTable(output);
	}
}
