package geppetto.main;

import geppetto.cat.corpus.BilingualCorpus;
import geppetto.phraseHMM.WordTrie;
import geppetto.phraseHMM.lexicalWeighting.AbstractLexicalWeightingCalculator;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.ExtractedPhrasePairDataManager;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.AdaptationTypeCalc;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.ExtractedSentenceDataCalc;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.LexicalWeightDataCalc;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.ReorderingDataCalc;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.ReorderingGraphDataCalc;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.ScoreDataCalc;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.SubPathsDataCalc;
import geppetto.phraseProbability.PhraseProbabilityCalculator;
import geppetto.phraseProbability.smoothing.DiscountSmoothingProbabilityCalc;
import geppetto.phraseProbability.smoothing.FixedDiscount;
import geppetto.phraseProbability.smoothing.KneserNeyDiscount;
import geppetto.phraseProbability.smoothing.KneserNeyDistribution;
import geppetto.phraseProbability.smoothing.ModifiedKneserNeyDiscount;
import geppetto.phraseTable.ChineseSplitedPrinter;
import geppetto.phraseTable.NormalPrinter;
import geppetto.phraseTable.PhrasePrinter;
import geppetto.phraseTable.PhraseTable;
import geppetto.phraseTable.PrintSpecification;
import geppetto.phraseTable.builder.ExtractionFileReader;
import geppetto.phraseTable.builder.MemoryPhraseWriter;
import geppetto.phraseTable.builder.PhraseTableBuilder;
import geppetto.phraseTable.phrase.feature.calc.local.LexicalWeightingFeatureCalc;
import geppetto.phraseTable.phrase.feature.calc.local.PenaltyFeatureCalc;
import geppetto.phraseTable.phrase.feature.calc.local.ReorderingFeatureCalc;
import geppetto.phraseTable.phrase.feature.calc.local.WeightedReorderingFeatureCalc;
import geppetto.phraseTable.phrase.feature.global.AbstractGlobalFeatureCalc;
import geppetto.phraseTable.phrase.feature.global.LocalFeatureAdapter;
import geppetto.phraseTable.phrase.feature.global.PhraseBasedLexicalWeightingAverageCalc;
import geppetto.phraseTable.phrase.feature.global.PhraseBasedLexicalWeightingLongestPathCalc;
import geppetto.phraseTable.phrase.feature.global.PhraseBasedLexicalWeightingWeightedAverageCalc;
import geppetto.phraseTable.phrase.feature.global.ProbabilityGlobalFeatureCalc;
import geppetto.phraseTable.prunning.global.LocalPrunnerAdaptor;
import geppetto.phraseTable.prunning.global.LossBasedPrunner;
import geppetto.phraseTable.prunning.local.SubPhrasesPrunner;
import geppetto.phraseTable.resource.SourceTargetIndexResource;
import geppetto.phraseTable.resource.TranslationGraphResource;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;

public class CreatePhraseTableInMemory {
	protected static GeppettoParameters _parameters = new GeppettoParameters();
	protected static HashMap<String, String> _defaultParameters = new HashMap<String, String>();
	protected static String[] REQUIRED = new String[] {"extract_file", "output"};
	protected static boolean requirePhraseCountNormalization = false;
	protected static BilingualCorpus corpus;
	protected static WordTrie sourceVocab;
	protected static WordTrie targetVocab;
	protected static String sourceTrieFile;
	protected static String targetTrieFile;
	
	static{
		_defaultParameters.put("encoding", "UTF-8");
//		_properties.put("output", "");		
		_defaultParameters.put("targetPhrasePrinter", "normal");
		_defaultParameters.put("sourcePhrasePrinter", "normal");
		_defaultParameters.put("features", "probability-lexical_weighting-reverse_lexical_weighting-penalty");
		_defaultParameters.put("useAdditionalInformation", "false");
		_defaultParameters.put("prunners", "loss:0.1;[0.0117595,0.0910752,0.0692457,0.111499,0.0155662];[-0.00454431,0.0509002,0.103889,0.0732293,0.054077,-0.0286458,0.0497597]");
		//_defaultParameters.put("prunners", "noprune");
		_defaultParameters.put("size", "22");
		_defaultParameters.put("maxSentenceSize", "999");
	}
	
	protected static String encoding;
	protected static String output;

	protected static AbstractLexicalWeightingCalculator lexicalWeightCalc; 
	
	protected static String targetTextPrinter;
	protected static String sourceTextPrinter;
		
	protected static String phraseTableFile;
	protected static String extractionFile;
	protected static String extractionHeadersFile;
	protected static String extractionInfoFile;
	protected static List<String> features;
	protected static List<String> prunners;
	protected static boolean useAdditionalInformation;
	
	protected static String corpusDescription;
	protected static int size; 
	protected static int maxSentenceSize; 
	
	//extraction info parameters
	protected static int maxSourceSize=0;
	protected static int maxTargetSize=0;
	
	//resources
	protected static SourceTargetIndexResource sourceTargetCountResource;
	protected static TranslationGraphResource translationGraphResource;
	
	protected static void initArguments(){
		corpusDescription = _parameters.getProperties().getProperty("corpusDescription");
		size = Integer.parseInt(_parameters.getProperties().getProperty("size"));
		maxSentenceSize = Integer.parseInt(_parameters.getProperties().getProperty("maxSentenceSize"));
		encoding = _parameters.getProperties().getProperty("encoding");
		output = _parameters.getProperties().getProperty("output");	
				
		targetTextPrinter = _parameters.getProperties().getProperty("targetPhrasePrinter");
		sourceTextPrinter = _parameters.getProperties().getProperty("sourcePhrasePrinter");
		
		extractionFile = _parameters.getProperties().getProperty("extract_file");
		extractionHeadersFile = _parameters.getProperties().getProperty("headers");
		extractionInfoFile = _parameters.getProperties().getProperty("info");

		features = Arrays.asList(_parameters.getProperties().getProperty("features").split("-"));
		prunners = Arrays.asList(_parameters.getProperties().getProperty("prunners").split("\\+"));
		
		useAdditionalInformation = Boolean.parseBoolean(_parameters.getProperties().getProperty("useAdditionalInformation"));
	}
	
	protected static void printArguments(){
		System.out.println("----General Parameters----");
		System.out.println("outputDir: " + output);
		System.out.println("encoding: " + encoding);

	}
		
	public static PhraseProbabilityCalculator getPhraseProbabilityCalculator(String name){
		if(name.startsWith("fixed_discount-")){
			requirePhraseCountNormalization = true;
			return new DiscountSmoothingProbabilityCalc(new FixedDiscount(Double.parseDouble(name.split("-")[1])), new KneserNeyDistribution());
		}
		if(name.equals("kneser_ney_discount")){
			requirePhraseCountNormalization = true;
			return new DiscountSmoothingProbabilityCalc(new KneserNeyDiscount(), new KneserNeyDistribution());
		}
		if(name.equals("modified_kneser_ney_discount")){
			requirePhraseCountNormalization = true;
			return new DiscountSmoothingProbabilityCalc(new ModifiedKneserNeyDiscount(), new KneserNeyDistribution());
		}
		return null;
	}
	
	public static PrintSpecification getPrintSpec(String sourcePrinterName,String targetPrinterName){
		PhrasePrinter sourcePrinter = null;
		PhrasePrinter targetPrinter = null;

		if(sourcePrinterName.equals("normal")){
			sourcePrinter = new NormalPrinter();
		}else if(sourcePrinterName.equals("chinese_split")){
			sourcePrinter = new ChineseSplitedPrinter();
		}	
		if(targetPrinterName.equals("normal")){
			targetPrinter = new NormalPrinter();
		}
		else if(targetPrinterName.equals("chinese_split")){
			targetPrinter = new ChineseSplitedPrinter();
		}
		
		return new PrintSpecification(sourcePrinter, targetPrinter);
	}		
	
	public static void main(String[] args) throws Exception{
		_parameters.loadParametersFromMap(_defaultParameters);
		_parameters.loadFromPropertyFile(args[0]);
		_parameters.loadParametersFromCommandLineArgs(args);
		_parameters.setRequiredParameters(REQUIRED);
		String[] missingProps = _parameters.getMissingParamters();

		if(missingProps.length > 0){
			String msg = "";
			for (String prop : missingProps){
				msg += prop + " ";
			}			
			throw new Exception("Missing Properties: " + msg);			
		}
		
		initArguments();
		printArguments();
		loadExtractionProperties();
		loadCorpus();
		loadWordTries();
				
		//PhraseProbabilityCalculator probCalc = getPhraseProbabilityCalculator(probCalcType);		
		//ExtractedPhrasePairDataManager.addData("adaptation_type", new AdaptationTypeCalc(null));
		ExtractedPhrasePairDataManager.addData("reordering", new ReorderingDataCalc(null));
		ExtractedPhrasePairDataManager.addData("score", new ScoreDataCalc(null));
		ExtractedPhrasePairDataManager.addData("lexical_weight", new LexicalWeightDataCalc(null));
		LexicalWeightDataCalc calc = new LexicalWeightDataCalc(lexicalWeightCalc);
		calc.setReverse(true);
		ExtractedPhrasePairDataManager.addData("lexical_weight_reverse", calc);
		ExtractedPhrasePairDataManager.addData("reordering_graph", new ReorderingGraphDataCalc(sourceVocab, targetVocab));
		ExtractedPhrasePairDataManager.addData("sub_paths", new SubPathsDataCalc());
		ExtractedPhrasePairDataManager.addData("extracted_sentence_counts", new ExtractedSentenceDataCalc());
		
		ExtractionFileReader reader = new ExtractionFileReader(extractionHeadersFile, extractionFile, encoding) {
			@Override
			public boolean stopReading() {
				return sourceReadBuffer.size()>1 && 
				(!sourceReadBuffer.get(sourceReadBuffer.size()-1).equals(sourceReadBuffer.get(sourceReadBuffer.size()-2)) || 
				!targetReadBuffer.get(targetReadBuffer.size()-1).equals(targetReadBuffer.get(targetReadBuffer.size()-2)));
			}
		};
		
		PhraseTableBuilder phraseTableBuilder = new PhraseTableBuilder(reader, sourceVocab, targetVocab);
		phraseTableBuilder.setUseAdditionalInformation(useAdditionalInformation);
		PhraseTable phraseTable = new PhraseTable(corpus,maxSourceSize,maxTargetSize,sourceVocab,targetVocab);
		initResources(phraseTable);
		addBasicFeatures(phraseTable);
		addRequiredFeaturesForCompositeFeatures(phraseTable);
		phraseTableBuilder.write(new MemoryPhraseWriter(phraseTable));
		System.gc(); System.gc(); System.gc(); System.gc();
	    System.gc(); System.gc(); System.gc(); System.gc();
	    System.gc(); System.gc(); System.gc(); System.gc();
	    System.gc(); System.gc(); System.gc(); System.gc();
	    System.err.println("used memory 1 = " + (Runtime.getRuntime().totalMemory() -
	      Runtime.getRuntime().freeMemory()));
		phraseTable.compact();
		phraseTable.calculateFeatures();
		phraseTable.removeAllFeatureListeners();
		initCompositeResources(phraseTable);
		addCompositeFeatures(phraseTable);
		if(phraseTable.getFeatureListenners().size() != 0){
			phraseTable.calculateFeatures();
		}
		addCompositePrunners(phraseTable);
		if(phraseTable.getPrunnerListenners().size() != 0){
			phraseTable.prunePhrasePairs();
		}
		System.gc(); System.gc(); System.gc(); System.gc();
	    System.gc(); System.gc(); System.gc(); System.gc();
	    System.gc(); System.gc(); System.gc(); System.gc();
	    System.gc(); System.gc(); System.gc(); System.gc();
	    System.err.println("used memory last = " + (Runtime.getRuntime().totalMemory() -
	      Runtime.getRuntime().freeMemory()));

		phraseTable.printHeaders(new PrintStream(new File(output+".headers"), encoding));
		phraseTable.printMosesFormat(new PrintStream(new File(output),encoding), new PrintSpecification());
	}
	
	protected static void initResources(PhraseTable table){
		sourceTargetCountResource = new SourceTargetIndexResource();
		table.addListenner(sourceTargetCountResource);
	}
	
	protected static void initCompositeResources(PhraseTable table){
		//translationGraphResource = new TranslationGraphResource();
		//table.addListenner(translationGraphResource);
	}

	protected static void addBasicFeatures(PhraseTable table){
		int i = 0;
		for(String f : features){
			if(f.equals("probability")){
				table.addListenner(new ProbabilityGlobalFeatureCalc(i, sourceTargetCountResource));
			}
			else if(f.equals("reverse_probability")){
				table.addListenner(new ProbabilityGlobalFeatureCalc(i, sourceTargetCountResource, true, true));
			}
			else if(f.startsWith("adapted_probability")){
				/*String[] weights = f.split("\\s+");
				HashMap<String, Double> type_weights=new HashMap<String, Double>();
				for(int i = 1; i < weights.length; i++){
					String[] type_weight = weights[i].split("=");
					type_weights.put(type_weight[0], Double.parseDouble(type_weight[1]));
				}
				phraseTableBuilder.addCalc(new AdaptedPhraseProbabilityFeatureCalc(type_weights));*/
			}
			else if(f.equals("lexical_weighting")){
				table.addListenner(new LocalFeatureAdapter(new LexicalWeightingFeatureCalc(i, false)));
			}
			else if(f.equals("reverse_lexical_weighting")){
				LexicalWeightingFeatureCalc calc = new LexicalWeightingFeatureCalc(i, true);
				calc.setReverse(true);
				table.addListenner(new LocalFeatureAdapter(calc));
			}
			else if(f.equals("penalty")){
				table.addListenner(new LocalFeatureAdapter(new PenaltyFeatureCalc(i, false)));
			}
			else if(f.startsWith("reordering")){
				double smoothing = Double.parseDouble(f.split("_")[1]);
				table.addListenner(new LocalFeatureAdapter(new ReorderingFeatureCalc(i,smoothing)));
			}
			else if(f.equals("weighted_reordering")){
				table.addListenner(new LocalFeatureAdapter(new WeightedReorderingFeatureCalc(i)));
			}
			i++;
		}
	}
	
	protected static void addRequiredFeaturesForCompositeFeatures(PhraseTable table){
		int i = features.size();
		for(String f : features){
			if(f.equals("phrase_weighting_longest") || f.equals("phrase_weighting_average") || f.equals("phrase_weighting_average_weighted")){				
				if(!features.contains("probability")){
					table.addListenner(new ProbabilityGlobalFeatureCalc(i ,sourceTargetCountResource, false, false));					
				}
				if(!features.contains("reverse_probability")){
					table.addListenner(new ProbabilityGlobalFeatureCalc(i ,sourceTargetCountResource, true, false));										
				}
				i++;
			}
		}
		for(String p : prunners){
			if(p.startsWith("loss")){
				if(!features.contains("probability")){
					table.addListenner(new ProbabilityGlobalFeatureCalc(i ,sourceTargetCountResource, false, false));					
				}
				if(!features.contains("reverse_probability")){
					table.addListenner(new ProbabilityGlobalFeatureCalc(i ,sourceTargetCountResource, true, false));										
				}
				if(!features.contains("lexical_weighting")){
					table.addListenner(new LocalFeatureAdapter(new LexicalWeightingFeatureCalc(i, false, false)));
				}
				if(!features.contains("reverse_lexical_weighting")){
					LexicalWeightingFeatureCalc calc = new LexicalWeightingFeatureCalc(i, true, false);
					calc.setReverse(true);
					table.addListenner(new LocalFeatureAdapter(calc));
				}
				if(!features.contains("penalty")){
					table.addListenner(new LocalFeatureAdapter(new PenaltyFeatureCalc(i)));
				}
				boolean hasReordering = false;
				for(String f : features){
					if(f.startsWith("reordering")){
						hasReordering = true;
					}
				}
				if(!hasReordering){
					double smoothing = 0.5; //should be changed to a given value
					table.addListenner(new LocalFeatureAdapter(new ReorderingFeatureCalc(i, smoothing, false)));
				}
			}
			i++;
		}
	}
	
	protected static void addCompositeFeatures(PhraseTable table){
		int i = 0;
		for(String f : features){			
			if(f.equals("phrase_weighting_longest")){
				table.addListenner(new PhraseBasedLexicalWeightingLongestPathCalc(i,translationGraphResource, false));
			}
			else if(f.equals("reverse_phrase_weighting_longest")){
				table.addListenner(new PhraseBasedLexicalWeightingLongestPathCalc(i,translationGraphResource, true));
			}
			else if(f.equals("phrase_weighting_average")){
				table.addListenner(new PhraseBasedLexicalWeightingAverageCalc(i,translationGraphResource, false));
			}
			else if(f.equals("reverse_phrase_weighting_average")){
				table.addListenner(new PhraseBasedLexicalWeightingAverageCalc(i,translationGraphResource, true));
			}
			else if(f.equals("phrase_weighting_average_weighted")){
				table.addListenner(new PhraseBasedLexicalWeightingWeightedAverageCalc(i,translationGraphResource, false));
			}
			else if(f.equals("phrase_weighting_average_weighted")){
				table.addListenner(new PhraseBasedLexicalWeightingWeightedAverageCalc(i,translationGraphResource, true));
			}
			i++;
		}
	}
	
	protected static void addPrunners(PhraseTableBuilder phraseTableBuilder){
		for(String p : prunners){
			if(p.startsWith("subpaths")){		
				phraseTableBuilder.addPrunner(new SubPhrasesPrunner(Double.parseDouble(p.split(":")[1])));
			}
		}
	}
	
	protected static void addCompositePrunners(PhraseTable phraseTable){
		for(String p : prunners){
			if(p.startsWith("subpaths")){		
				phraseTable.addListenner(new LocalPrunnerAdaptor(new SubPhrasesPrunner(Double.parseDouble(p.split(":")[1]))));
			}
			if(p.startsWith("loss")){	
			//	phraseTable.addListenner(LossBasedPrunner.parseFromString(p.split(":")[1], translationGraphResource));
			}
		}
	}
	
	private static void loadExtractionProperties() {
		Properties properties = new Properties();
		try {
			properties.load(new FileInputStream(extractionInfoFile));
		} catch (FileNotFoundException e) {
			System.err.println("could not find properties file: " + extractionInfoFile);
			e.printStackTrace();
		} catch (IOException e) {
			System.err.println("properties file: " + extractionInfoFile + " is invalid");
			e.printStackTrace();
		}
		maxSourceSize = Integer.parseInt(properties.getProperty("maxSourceSize"));
		maxTargetSize = Integer.parseInt(properties.getProperty("maxTargetSize"));
		sourceTrieFile = properties.getProperty("sourceTrieFile");
		targetTrieFile = properties.getProperty("targetTrieFile");
	}
	
	protected static void loadCorpus() throws IOException{
		Properties properties = new Properties();
		try {
			properties.load(new FileInputStream(corpusDescription));
		} catch (IOException e) {
			throw new AssertionError("Wrong properties file " + corpusDescription);
		}
		corpus = new BilingualCorpus();
		corpus.loadFromFileDescription(corpusDescription, size, maxSentenceSize);
		corpus.initialize();
	}
	
	public static void loadWordTries() throws FileNotFoundException, IOException{
		sourceVocab = new WordTrie(maxSourceSize); 		
		sourceVocab.loadFromFile(new BufferedReader(new FileReader(sourceTrieFile)));
//		sourceVocab.addPhrasesAllSentences(corpus._trainSourceSentences);
		sourceVocab.compactTrie();
		targetVocab = new WordTrie(maxTargetSize); 
		targetVocab.loadFromFile(new BufferedReader(new FileReader(targetTrieFile)));
//		targetVocab.addPhrasesAllSentences(corpus._trainForeignSentences);
		targetVocab.compactTrie();
	}
}