package geppetto.main;

import geppetto.cat.corpus.BilingualCorpus;
import geppetto.phraseHMM.WordTrie;
import geppetto.phraseHMM.lexicalWeighting.AbstractLexicalWeightingCalculator;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.ExtractedPhrasePairDataManager;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.AdaptationTypeCalc;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.ExtractedSentenceDataCalc;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.LexicalWeightDataCalc;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.ReorderingDataCalc;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.ReorderingGraphDataCalc;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.ScoreDataCalc;
import geppetto.phraseHMM.phraseExtraction.extractedphrase.data.SubPathsDataCalc;
import geppetto.phraseProbability.PhraseProbabilityCalculator;
import geppetto.phraseProbability.smoothing.DiscountSmoothingProbabilityCalc;
import geppetto.phraseProbability.smoothing.FixedDiscount;
import geppetto.phraseProbability.smoothing.KneserNeyDiscount;
import geppetto.phraseProbability.smoothing.KneserNeyDistribution;
import geppetto.phraseProbability.smoothing.ModifiedKneserNeyDiscount;
import geppetto.phraseTable.ChineseSplitedPrinter;
import geppetto.phraseTable.NormalPrinter;
import geppetto.phraseTable.PhrasePrinter;
import geppetto.phraseTable.PrintSpecification;
import geppetto.phraseTable.builder.ExtractionFileReader;
import geppetto.phraseTable.builder.MosesPhraseWriter;
import geppetto.phraseTable.builder.PhraseTableBuilder;
import geppetto.phraseTable.phrase.feature.calc.local.AdaptedPhraseProbabilityFeatureCalc;
import geppetto.phraseTable.phrase.feature.calc.local.LexicalWeightingFeatureCalc;
import geppetto.phraseTable.phrase.feature.calc.local.PenaltyFeatureCalc;
import geppetto.phraseTable.phrase.feature.calc.local.ProbabilityFeatureCalc;
import geppetto.phraseTable.phrase.feature.calc.local.ReorderingFeatureCalc;
import geppetto.phraseTable.phrase.feature.calc.local.WeightedReorderingFeatureCalc;
import geppetto.phraseTable.prunning.local.SubPhrasesPrunner;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Properties;




public class CreatePhraseTable {
	protected static GeppettoParameters _parameters = new GeppettoParameters();
	protected static HashMap<String, String> _defaultParameters = new HashMap<String, String>();
	protected static String[] REQUIRED = new String[] {"extract_file", "output"};
	protected static boolean requirePhraseCountNormalization = false;
	protected static BilingualCorpus corpus;
	protected static WordTrie sourceVocab;
	protected static WordTrie targetVocab;
	protected static String sourceTrieFile;
	protected static String targetTrieFile;
	protected static boolean rev;

	static{
		_defaultParameters.put("encoding", "UTF-8");
//		_properties.put("output", "");		
		_defaultParameters.put("targetPhrasePrinter", "normal");
		_defaultParameters.put("sourcePhrasePrinter", "normal");
		_defaultParameters.put("features", "probability-lexical_weighting-reverse_lexical_weighting-penalty");
		_defaultParameters.put("useAdditionalInformation", "false");
		_defaultParameters.put("prunners", "noprune");
		_defaultParameters.put("size", "22");
		_defaultParameters.put("maxSentenceSize", "999");
	}
	
	protected static String encoding;
	protected static String output;

	protected static AbstractLexicalWeightingCalculator lexicalWeightCalc; 
	
	protected static String targetTextPrinter;
	protected static String sourceTextPrinter;
		
	protected static String phraseTableFile;
	protected static String extractionFile;
	protected static String extractionHeadersFile;
	protected static String extractionInfoFile;
	protected static String[] features;
	protected static String[] prunners;
	protected static boolean useAdditionalInformation;
	
	protected static String corpusDescription;
	protected static int size; 
	protected static int maxSentenceSize; 
	
	//extraction info parameters
	protected static int maxSourceSize=0;
	protected static int maxTargetSize=0;
	
	protected static void initArguments(){
		corpusDescription = _parameters.getProperties().getProperty("corpusDescription");
		size = Integer.parseInt(_parameters.getProperties().getProperty("size"));
		maxSentenceSize = Integer.parseInt(_parameters.getProperties().getProperty("maxSentenceSize"));
		encoding = _parameters.getProperties().getProperty("encoding");
		output = _parameters.getProperties().getProperty("output");	
				
		targetTextPrinter = _parameters.getProperties().getProperty("targetPhrasePrinter");
		sourceTextPrinter = _parameters.getProperties().getProperty("sourcePhrasePrinter");
		
		extractionFile = _parameters.getProperties().getProperty("extract_file");
		extractionHeadersFile = _parameters.getProperties().getProperty("headers");
		extractionInfoFile = _parameters.getProperties().getProperty("info");

		features = _parameters.getProperties().getProperty("features").split("-");
		prunners = _parameters.getProperties().getProperty("prunners").split("-");
		useAdditionalInformation = Boolean.parseBoolean(_parameters.getProperties().getProperty("useAdditionalInformation"));
		maxSourceSize = Integer.parseInt(_parameters.getProperties().getProperty("maxSourceSize"));
		maxTargetSize = Integer.parseInt(_parameters.getProperties().getProperty("maxTargetSize"));
		sourceTrieFile = _parameters.getProperties().getProperty("sourceTrieFile");
		targetTrieFile = _parameters.getProperties().getProperty("targetTrieFile");
		rev = Boolean.parseBoolean(_parameters.getProperties().getProperty("reverse"));

	}
	
	protected static void printArguments(){
		System.out.println("----General Parameters----");
		System.out.println("outputDir: " + output);
		System.out.println("encoding: " + encoding);

	}
		
	public static PhraseProbabilityCalculator getPhraseProbabilityCalculator(String name){
		if(name.startsWith("fixed_discount-")){
			requirePhraseCountNormalization = true;
			return new DiscountSmoothingProbabilityCalc(new FixedDiscount(Double.parseDouble(name.split("-")[1])), new KneserNeyDistribution());
		}
		if(name.equals("kneser_ney_discount")){
			requirePhraseCountNormalization = true;
			return new DiscountSmoothingProbabilityCalc(new KneserNeyDiscount(), new KneserNeyDistribution());
		}
		if(name.equals("modified_kneser_ney_discount")){
			requirePhraseCountNormalization = true;
			return new DiscountSmoothingProbabilityCalc(new ModifiedKneserNeyDiscount(), new KneserNeyDistribution());
		}
		return null;
	}
	
	public static PrintSpecification getPrintSpec(String sourcePrinterName,String targetPrinterName){
		PhrasePrinter sourcePrinter = null;
		PhrasePrinter targetPrinter = null;

		if(sourcePrinterName.equals("normal")){
			sourcePrinter = new NormalPrinter();
		}else if(sourcePrinterName.equals("chinese_split")){
			sourcePrinter = new ChineseSplitedPrinter();
		}	
		if(targetPrinterName.equals("normal")){
			targetPrinter = new NormalPrinter();
		}
		else if(targetPrinterName.equals("chinese_split")){
			targetPrinter = new ChineseSplitedPrinter();
		}
		
		return new PrintSpecification(sourcePrinter, targetPrinter);
	}		
	
	public static void main(String[] args) throws Exception{
		_parameters.loadParametersFromMap(_defaultParameters);
		_parameters.loadFromPropertyFile(args[0]);
		_parameters.loadParametersFromCommandLineArgs(args);
		_parameters.setRequiredParameters(REQUIRED);
		String[] missingProps = _parameters.getMissingParamters();

		if(missingProps.length > 0){
			String msg = "";
			for (String prop : missingProps){
				msg += prop + " ";
			}			
			throw new Exception("Missing Properties: " + msg);			
		}
		
		initArguments();
		printArguments();
		loadCorpus();
		loadWordTries();
				
		//PhraseProbabilityCalculator probCalc = getPhraseProbabilityCalculator(probCalcType);		
		//ExtractedPhrasePairDataManager.addData("adaptation_type", new AdaptationTypeCalc(null));
		ExtractedPhrasePairDataManager.addData("reordering", new ReorderingDataCalc(null));
		ExtractedPhrasePairDataManager.addData("score", new ScoreDataCalc(null));
		ExtractedPhrasePairDataManager.addData("lexical_weight", new LexicalWeightDataCalc(null));
		LexicalWeightDataCalc calc = new LexicalWeightDataCalc(lexicalWeightCalc);
		calc.setReverse(true);
		ExtractedPhrasePairDataManager.addData("lexical_weight_reverse", calc);
		//ExtractedPhrasePairDataManager.addData("reordering_graph", new ReorderingGraphDataCalc(sourceVocab, targetVocab));
		ExtractedPhrasePairDataManager.addData("sub_paths", new SubPathsDataCalc());
		ExtractedPhrasePairDataManager.addData("extracted_sentence_counts", new ExtractedSentenceDataCalc());
		
		ExtractionFileReader reader = new ExtractionFileReader(extractionHeadersFile, extractionFile, encoding) {
		
			@Override
			public boolean stopReading() {
				return sourceReadBuffer.size()>1 && !sourceReadBuffer.get(sourceReadBuffer.size()-1).equals(sourceReadBuffer.get(sourceReadBuffer.size()-2));
			}
		
		};
		if(rev){
			corpus = corpus.reverse();
			WordTrie temp = sourceVocab;
			sourceVocab = targetVocab;
			targetVocab = temp;
		}
		PhraseTableBuilder phraseTableBuilder = new PhraseTableBuilder(reader, sourceVocab, targetVocab);
		phraseTableBuilder.setUseAdditionalInformation(useAdditionalInformation);
		addFeatures(phraseTableBuilder);
		addPrunners(phraseTableBuilder);
		phraseTableBuilder.write(new MosesPhraseWriter(
				new PrintStream(new File(output),encoding), corpus, sourceVocab, targetVocab, new PrintSpecification()));
	}

	protected static void addFeatures(PhraseTableBuilder phraseTableBuilder){
		int i = 0;
		for(String f : features){
			if(f.equals("probability")){				
				phraseTableBuilder.addCalc(new ProbabilityFeatureCalc(i));				
			}
			else if(f.startsWith("adapted_probability")){
				String[] weights = f.split("\\s+");
				HashMap<String, Double> type_weights=new HashMap<String, Double>();
				for(int j = 1; j < weights.length; j++){
					String[] type_weight = weights[j].split("=");
					type_weights.put(type_weight[0], Double.parseDouble(type_weight[1]));
				}
				phraseTableBuilder.addCalc(new AdaptedPhraseProbabilityFeatureCalc(type_weights, i));				
			}			
			else if(f.equals("lexical_weighting")){
				phraseTableBuilder.addCalc(new LexicalWeightingFeatureCalc(i, false));
			}
			/*else if(f.equals("reverse_lexical_weighting")){
				LexicalWeightingFeatureCalc calc = new LexicalWeightingFeatureCalc(i, true);
			}*/
			else if(f.equals("penalty")){
				phraseTableBuilder.addCalc(new PenaltyFeatureCalc(i));				
			}
			else if(f.startsWith("reordering")){
				double smoothing = Double.parseDouble(f.split("_")[1]);
				phraseTableBuilder.addCalc(new ReorderingFeatureCalc(i,smoothing));
			}
			else if(f.equals("weighted_reordering")){
				phraseTableBuilder.addCalc(new WeightedReorderingFeatureCalc(i));				
			}
			i++;
		}
	}
	
	protected static void addPrunners(PhraseTableBuilder phraseTableBuilder){
		for(String p : prunners){
			if(p.startsWith("subpaths")){		
				phraseTableBuilder.addPrunner(new SubPhrasesPrunner(Double.parseDouble(p.split(":")[1])));
			}
		}
	}
	
	protected static void loadCorpus() throws IOException{
		Properties properties = new Properties();
		try {
			properties.load(new FileInputStream(corpusDescription));
		} catch (IOException e) {
			throw new AssertionError("Wrong properties file " + corpusDescription);
		}
		corpus = new BilingualCorpus();
		corpus.loadFromFileDescription(corpusDescription, size, maxSentenceSize);
		corpus.initialize();
	}
	
	public static void loadWordTries() throws FileNotFoundException, IOException{
		sourceVocab = new WordTrie(maxSourceSize); 		
		sourceVocab.loadFromFile(new BufferedReader(new FileReader(sourceTrieFile)));
//		sourceVocab.addPhrasesAllSentences(corpus._trainSourceSentences);
		sourceVocab.compactTrie();
		targetVocab = new WordTrie(maxTargetSize); 
		targetVocab.loadFromFile(new BufferedReader(new FileReader(targetTrieFile)));
//		targetVocab.addPhrasesAllSentences(corpus._trainForeignSentences);
		targetVocab.compactTrie();
	}
}
