package geppetto.cat.corpus;

import geppetto.cat.alignments.Alignment;
import geppetto.cat.alignments.AlignmentsSet;
import gnu.trove.TIntIntHashMap;
import gnu.trove.TObjectIntHashMap;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Properties;




public class BilingualCorpus {

	// Debug to see how much corpus are being created
	public static int counter = 0;
	public static final byte TRAIN_CORPUS = 0;
	public static final byte TEST_CORPUS = 1;
	public static final byte DEV_CORPUS = 2;

	// Class that knows how to assing an unique ID to a pair source foreing
	// TODO see where it should go
	public Reference ref;

	// Contains an integer for each word string
	String _name;
	public TObjectIntHashMap<String> _vocabSource;
	public ArrayList<String> _vocabNamesSource;
	public TObjectIntHashMap<String> _vocabForeign;
	public ArrayList<String> _vocabNamesForeign;
	int _maxSourceLen, _maxForeignLen;
	public TIntIntHashMap _sourceWordsCounts;
	public TIntIntHashMap _foreignWordsCounts;

	// TrainData contains the training instances for both languages
	public ArrayList<int[]> _trainSourceSentences;
	public ArrayList<int[]> _trainForeignSentences;

	int _maxTrainingSentences;
	int _nrTrainingSentences;
	int _maxSentenceSize;
	int _discardedSentences;

	// Testing data. May be null
	public ArrayList<int[]> _testSourceSentences;
	public ArrayList<int[]> _testForeignSentences;
	public AlignmentsSet _gold;

	// Development Data might be null
	public ArrayList<int[]> _devSourceSentences;
	public ArrayList<int[]> _devForeignSentences;
	public AlignmentsSet _devGold;

	// TODO maybe this is not the best place
	// Empirical fertilities created using the dev set
	double[] _foreignFertilities;

	public double[] _sourceFertilities;

	int _sourceVocabSize;
	int _foreignVocabSize;

	// Prefixes of each language used when outputing corpus for moses and other
	// stuff
	String _sourceSuffix;
	String _foreignSuffix;

	public void trim() {
		_vocabSource = null;
		_vocabNamesSource = null;
		_vocabForeign = null;
		_vocabNamesForeign = null;
		_sourceWordsCounts = null;
		_foreignWordsCounts = null;
		_foreignFertilities = null;
		_sourceFertilities = null;
		_sourceWordsCounts = null;
		_foreignWordsCounts = null;
	}

	public BilingualCorpus(int maxNumberOfSentences, String name,
			String sourcePrefix, String foreignPrefix) {
		_name = name;
		_vocabForeign = new TObjectIntHashMap<String>();
		_vocabSource = new TObjectIntHashMap<String>();
		_vocabNamesForeign = new ArrayList<String>();
		_vocabNamesSource = new ArrayList<String>();
		_maxTrainingSentences = maxNumberOfSentences;
		_trainSourceSentences = new ArrayList<int[]>();
		_trainForeignSentences = new ArrayList<int[]>();
		_testSourceSentences = new ArrayList<int[]>();
		_testForeignSentences = new ArrayList<int[]>();
		_devSourceSentences = new ArrayList<int[]>();
		_devForeignSentences = new ArrayList<int[]>();
		_sourceSuffix = sourcePrefix;
		_foreignSuffix = foreignPrefix;
		_sourceWordsCounts = new TIntIntHashMap();
		_foreignWordsCounts = new TIntIntHashMap();
	}

	public BilingualCorpus() {
		counter++;
		System.out.println();
		System.out.println("BilingualCorpus instance nr " + counter);
		System.out.println();
	}

	public BilingualCorpus reverse() {
		BilingualCorpus res = new BilingualCorpus();
		res._name = _name;
		res._vocabForeign = _vocabSource;
		res._vocabSource = _vocabForeign;
		res._vocabNamesForeign = _vocabNamesSource;
		res._vocabNamesSource = _vocabNamesForeign;
		res._maxTrainingSentences = _maxTrainingSentences;
		res._trainSourceSentences = _trainForeignSentences;
		res._trainForeignSentences = _trainSourceSentences;
		res._testSourceSentences = _testForeignSentences;
		res._testForeignSentences = _testSourceSentences;
		res._devSourceSentences = _devForeignSentences;
		res._devForeignSentences = _devSourceSentences;
		res._maxTrainingSentences = _maxTrainingSentences;
		res._nrTrainingSentences = _nrTrainingSentences;
		res._maxSentenceSize = _maxSentenceSize;
		res._discardedSentences = _discardedSentences;
		if (_gold != null)
			res._gold = _gold.reverse();
		if (_devGold != null)
			res._devGold = _devGold.reverse();
		res._maxForeignLen = _maxSourceLen;
		res._maxSourceLen = _maxForeignLen;
		res._sourceFertilities = _foreignFertilities;
		res._foreignFertilities = _sourceFertilities;
		res._sourceSuffix = _foreignSuffix;
		res._foreignSuffix = _sourceSuffix;
		res._foreignWordsCounts = _sourceWordsCounts;
		res._sourceWordsCounts = _foreignWordsCounts;
		res._foreignVocabSize = _sourceVocabSize;
		res._sourceVocabSize = _foreignVocabSize;
		res.ref = new Reference(res);
		return res;
	}

	public String getName() {
		return _name;
	}

	public void printCorpusDescription() {
		System.out.println(_name);
		System.out.println(_maxSentenceSize);
		System.out.println(_maxTrainingSentences);
		System.out.println(_nrTrainingSentences);
	}

	public void saveDescription(String directory) {
		try {
			PrintStream f = new PrintStream(directory + "corpus-description");
			f.println(_name);
			f.println(_maxSentenceSize);
			f.println(_maxTrainingSentences);
			f.println(_nrTrainingSentences);

		} catch (FileNotFoundException e) {
			e.printStackTrace();
			System.out.println("ERROR SAVING CORPUS DESCRIPTION");
			System.exit(1);
		}
	}

	public boolean checkDescription(String directory) {
		try {
			BufferedReader in = new BufferedReader(new FileReader(directory
					+ "/corpus-description"));

			String name = in.readLine();
			int maxSentences = Integer.parseInt(in.readLine());
			int maxTrainingSentences = Integer.parseInt(in.readLine());
			if (!name.equalsIgnoreCase(_name)) {
				System.out.println("Corpus name is not the same, got:" + name
						+ " have " + _name);
				return false;
			}
			if ((maxSentences != _maxSentenceSize)) {
				System.out
						.println("Corpus max sentence size is not the same got:"
								+ maxSentences + " have " + _maxSentenceSize);
				return false;
			}
			if (maxTrainingSentences != _maxTrainingSentences){
				System.out.println("Corpus nr Training sentences is not the same got:" + maxTrainingSentences + " have " + _maxTrainingSentences); 
				return false; 
			}		 
		} catch (FileNotFoundException e) {
			e.printStackTrace();
			System.out.println("ERROR LOADING CORPUS DESCRIPTION");
			System.exit(1);
		} catch (IOException e) {
			e.printStackTrace();
			System.out.println("ERROR LOADING CORPUS DESCRIPTION");
			System.exit(1);
		}
		return true;
	}

	public static BilingualCorpus getCorpusFromFileDescription(String file,
			int maxTrainingSentences) throws IOException {
		return getCorpusFromFileDescription(file, maxTrainingSentences,
				Integer.MAX_VALUE);
	}

	public static BilingualCorpus getCorpusFromFileDescription(String file,
			int maxTrainingSentences, int maxSentenceSize) throws IOException {
		BilingualCorpus corpus = new BilingualCorpus();
		corpus.loadFromFileDescription(file, maxTrainingSentences, maxSentenceSize);
		corpus.initialize();
		return corpus;
	}
	
	public void loadFromFileDescription(String file,
			int maxTrainingSentences, int maxSentenceSize) throws IOException {
		Properties properties = new Properties();
		try {
			properties.load(new FileInputStream(file));
		} catch (IOException e) {
			throw new AssertionError("Wrong properties file " + file);
		}
		String name = properties.getProperty("name");
		String training = properties.getProperty("training_file");
		String source = properties.getProperty("source_suffix");
		String target = properties.getProperty("target_suffix");
		_name = name;
		_vocabForeign = new TObjectIntHashMap<String>();
		_vocabSource = new TObjectIntHashMap<String>();
		_vocabNamesForeign = new ArrayList<String>();
		_vocabNamesSource = new ArrayList<String>();
		_trainSourceSentences = new ArrayList<int[]>();
		_trainForeignSentences = new ArrayList<int[]>();
		_testSourceSentences = new ArrayList<int[]>();
		_testForeignSentences = new ArrayList<int[]>();
		_devSourceSentences = new ArrayList<int[]>();
		_devForeignSentences = new ArrayList<int[]>();
		_sourceWordsCounts = new TIntIntHashMap();
		_foreignWordsCounts = new TIntIntHashMap();

		_maxTrainingSentences = maxTrainingSentences;
		_sourceSuffix = source;
		_foreignSuffix = target;

		String dev = properties.getProperty("wa_dev_file");
		String devGold = properties.getProperty("wa_dev_gold_file");
		String test = properties.getProperty("wa_test_file");
		String testGold = properties.getProperty("wa_test_gold_file");
		
		String trainingSource = training + "." + source;
		String trainingTarget = training + "." + target;
		
		String devSource = null;
		String devTarget = null;
		if(dev!=null){
			devSource = dev + "." + source;
			devTarget = dev + "." + target;
		}
		
		String testSource = null;
		String testTarget = null;
		if(test!=null){
			testSource = test + "." + source;
			testTarget = test + "." + target;
		}

		load(name, source, target, trainingSource, trainingTarget, devSource, devTarget, testSource, testTarget,devGold,testGold, maxTrainingSentences, maxSentenceSize);
		
	}
	
	public void load(String name, String source, String target, String trainingSource, String trainingTarget, String devSource, String devTarget, String devGold, String testGold, String testSource, String testTarget, 
			int maxTrainingSentences, int maxSentenceSize) throws IOException {
		_name = name;
		_vocabForeign = new TObjectIntHashMap<String>();
		_vocabSource = new TObjectIntHashMap<String>();
		_vocabNamesForeign = new ArrayList<String>();
		_vocabNamesSource = new ArrayList<String>();
		_trainSourceSentences = new ArrayList<int[]>();
		_trainForeignSentences = new ArrayList<int[]>();
		_testSourceSentences = new ArrayList<int[]>();
		_testForeignSentences = new ArrayList<int[]>();
		_devSourceSentences = new ArrayList<int[]>();
		_devForeignSentences = new ArrayList<int[]>();
		_sourceWordsCounts = new TIntIntHashMap();
		_foreignWordsCounts = new TIntIntHashMap();

		_maxTrainingSentences = maxTrainingSentences;
		_sourceSuffix = source;
		_foreignSuffix = target;

		addTrainFile(trainingSource, trainingTarget,
				maxSentenceSize);
		if (devSource != null || devTarget != null) {
			addTrainFile(devSource, devTarget,
					Integer.MAX_VALUE);
			System.out.println("Adding Dev File");
			if (devGold != null) {
				addDevFile(devSource, devTarget);
				addDevGoldAligments(devGold);
			}
		}

		if (testSource != null || testTarget != null) {
			System.out.println("Adding Test File ");

			addTrainFile(testSource, testTarget,
					Integer.MAX_VALUE);
			if (testGold != null) {
				addTestFile(testSource, testTarget);
				addGoldAligments(testGold);
			}
		}
		
	}

	public int getMaxSourceLen() {
		return _maxSourceLen;
	}

	public int getMaxForeignLen() {
		return _maxForeignLen;

	}

	public String getSourceString(int i) {
		return _vocabNamesSource.get(i);
	}

	public String getForeignString(int i) {
		return _vocabNamesForeign.get(i);
	}

	public AlignmentsSet getGold() {
		return _gold;
	}

	public int getNumSentences(byte sentenceSource) {
		switch (sentenceSource) {
		case TEST_CORPUS:
			return _testSourceSentences.size();
		case DEV_CORPUS:
			return _devSourceSentences.size();
		case TRAIN_CORPUS:
			return _nrTrainingSentences;
		default:
			throw new IllegalArgumentException("Unknown sentence type:"
					+ sentenceSource);
		}
	}

	public int getNumberOfTrainingSentences() {
		return _nrTrainingSentences;
	}

	public int[] getSentencesByLen(byte sentenceSource, int min, int max) {
		ArrayList<Integer> sen = new ArrayList<Integer>();
		int[] sent;
		switch (sentenceSource) {
		case TEST_CORPUS:
			for (int i = 0; i < _testSourceSentences.size(); i++) {
				if (_testForeignSentences.get(i).length <= max
						&& _testForeignSentences.get(i).length > min)
					sen.add(i);
			}
			break;
		case DEV_CORPUS:
			for (int i = 0; i < _devSourceSentences.size(); i++) {
				if (_devForeignSentences.get(i).length <= max
						&& _devForeignSentences.get(i).length > min)
					sen.add(i);
			}

			break;
		case TRAIN_CORPUS:
			for (int i = 0; i < _trainSourceSentences.size(); i++) {
				if (_trainForeignSentences.get(i).length <= max
						&& _trainForeignSentences.get(i).length > min)
					sen.add(i);
			}
			break;
		default:
			throw new IllegalArgumentException("Unknown sentence type:"
					+ sentenceSource);
		}
		sent = new int[sen.size()];
		for (int i = 0; i < sen.size(); i++) {
			sent[i] = sen.get(i);
		}
		return sent;
	}

	public AlignmentsSet getAlignments(int[] sentences, byte sentenceSource) {
		AlignmentsSet set = new AlignmentsSet();
		switch (sentenceSource) {
		case TEST_CORPUS:
			for (int i = 0; i < sentences.length; i++) {
				set.addAlignment(_gold.get(sentences[i]));
			}
			return set;
		case DEV_CORPUS:
			for (int i = 0; i < sentences.length; i++) {
				set.addAlignment(_devGold.get(sentences[i]));
			}
			return set;
		}
		return null;
	}

	public AlignmentsSet getAlignments(byte sentenceSource) {
		switch (sentenceSource) {
		case TEST_CORPUS:
			return _gold;
		case DEV_CORPUS:
			return _devGold;
		}
		return null;
	}

	public int getSourceSize() {
		return _sourceVocabSize;
	}

	public int getTotalSourceWords() {
		int total = 0;
		for (int i = 0; i < _sourceWordsCounts.size(); i++) {
			total += _sourceWordsCounts.get(i);
		}
		return total;
	}

	public int getTotalForeignWords() {
		int total = 0;
		for (int i = 0; i < _foreignWordsCounts.size(); i++) {
			total += _foreignWordsCounts.get(i);
		}
		return total;
	}

	/**
	 * Calculate the fertilties of a given alignement set
	 * Return two vectors, for each source word returns the 
	 *  means and the variance.
	 * If a source word is not seen return zero
	 * @param sentenceSource
	 * @return
	 */
	public double[][] getFertileSource(byte sentenceSource) {
		double[] means = new double[getSourceSize()];
		double[] variances = new double[getSourceSize()];
		java.util.Arrays.fill(means, -1);
		java.util.Arrays.fill(variances, -1);
		int[] counts = new int[getSourceSize()];
		int numSents = getNumSentences(sentenceSource);
		ArrayList<Alignment> alignments = getAlignments(sentenceSource)
				.getAlignments();
		for (int i = 0; i < numSents; i++) {
			int[] s = getSourceSentence(i, sentenceSource);
			int[] f = getForeignSentence(i, sentenceSource);
			Alignment a = alignments.get(i);
			for (int si = 0; si < s.length; si++) {
				//Word was seen at least once
				if(means[s[si]] == -1){
					means[s[si]] = 0;
					variances[s[si]] = 0;
				}
				//If word was seen as sure
				int yesCounts = 0;
				//If word was seen as possible
				int seen = 0;
				for (int fi = 0; fi < f.length; fi++) {
					if (a.isSure(si, fi))
						yesCounts++;
					if(a.isPossible(si, fi)){
						seen++;
					}
				}
				if (yesCounts != 0) {
					counts[s[si]] += 1;
					means[s[si]] += yesCounts;
					variances[s[si]] += yesCounts*yesCounts;
				}
				if(seen != 0){
					//If seen as possible mark as fertility 1
					counts[s[si]] += 1;
					means[s[si]] += 1;
					variances[s[si]] +=1;
				}
			}
		}
		//squareCounts/(n-1) - mean*mean*n/(n-1);
		for (int i = 0; i < means.length; i++) {
			if(means[i] != 0 && means[i] != -1){
			//	System.out.println("Counts: " + counts[i] + " mean " + means[i] + " variance " + variances[i]);
				double n = counts[i];
				double mean = means[i]/n;
				double variance ;
				if(n>1){
					variance = variances[i]/(n-1) - mean*mean*n/(n-1);
				}else{
					variance = 0;
				}
				means[i] =mean;
				variances[i]= variance;
				
				if(variance < 0){
					System.out.println("BilingualCorpus: Varianve is negative can't be " + mean + " var " + variance);
					System.exit(-1);
				}
				
			}
		}
		double[][] results = new double[2][];
		results[0]=means;
		results[1]=variances;
		return results;
	}

	public String getSourceSufix() {
		return _sourceSuffix;
	}

	public String getForeignSufix() {
		return _foreignSuffix;
	}

	public int getForeignSize() {
		return _foreignVocabSize;
	}

	public String getSourceWordById(int id) {
		return _vocabNamesSource.get(id);
	}

	public String getForeignWordById(int id) {
		return _vocabNamesForeign.get(id);
	}

	public int getSourceWordId(int sentenceNumber, byte sentenceSource, int i) {
		return getSourceSentence(sentenceNumber, sentenceSource)[i];
	}

	public int getForeignWordId(int sentenceNumber, byte sentenceSource, int i) {
		return getForeignSentence(sentenceNumber, sentenceSource)[i];
	}

	public String getSourceWord(int sentenceNumber, byte sentenceSource, int i) {
		return _vocabNamesSource.get(getSourceSentence(sentenceNumber,
				sentenceSource)[i]);
	}

	public String getForeignWord(int sentenceNumber, byte sentenceSource, int i) {
		return _vocabNamesForeign.get(getForeignSentence(sentenceNumber,
				sentenceSource)[i]);
	}

	public int[] getForeignSentence(int sentenceNr, byte sentenceSource) {
		switch (sentenceSource) {
		case TRAIN_CORPUS:
			return _trainForeignSentences.get(sentenceNr);
		case TEST_CORPUS:
			return _testForeignSentences.get(sentenceNr);
		case DEV_CORPUS:
			return _devForeignSentences.get(sentenceNr);
		}
		return null;
	}

	public int[] getSourceSentence(int sentenceNr, byte sentenceSource) {
		switch (sentenceSource) {
		case TRAIN_CORPUS:
			return _trainSourceSentences.get(sentenceNr);
		case TEST_CORPUS:
			return _testSourceSentences.get(sentenceNr);
		case DEV_CORPUS:
			return _devSourceSentences.get(sentenceNr);
		}
		return null;
	}

	public String getForeignSentenceString(int sentenceNr, byte sentenceSource) {
		// System.out.println("getForeignSentenceString" + sentenceNr + " " +
		// sentenceSource);
		int[] s = null;
		switch (sentenceSource) {
		case TRAIN_CORPUS:
			s = _trainForeignSentences.get(sentenceNr);
			break;
		case TEST_CORPUS:
			s = _testForeignSentences.get(sentenceNr);
			break;
		case DEV_CORPUS:
			s = _devForeignSentences.get(sentenceNr);
			break;
		}
		String sent = "";
		for (int i = 0; i < s.length; i++) {
			sent += getForeignWordById(s[i]) + " ";
		}
		return sent;
	}

	public String getSourceSentenceString(int sentenceNr, byte sentenceSource) {
		// System.out.println("getSourceSentenceString" + sentenceNr + " " +
		// sentenceSource);
		int[] s = null;
		switch (sentenceSource) {
		case TRAIN_CORPUS:
			// System.out.println("Train" + sentenceNr + " " + TRAIN_CORPUS +
			// _trainSourceSentences.size() + " " + sentenceNr);
			s = _trainSourceSentences.get(sentenceNr);
			break;
		case TEST_CORPUS:
			// System.out.println("Test" + sentenceNr + " " + TEST_CORPUS +
			// _testSourceSentences.size() + " " + sentenceNr);
			s = _testSourceSentences.get(sentenceNr);
			break;
		case DEV_CORPUS:
			// System.out.println("Dev" + sentenceNr + " " + DEV_CORPUS +
			// _devSourceSentences.size() + " " + sentenceNr);
			s = _devSourceSentences.get(sentenceNr);
			break;
		}
		String sent = "";
		for (int i = 0; i < s.length; i++) {
			sent += getSourceWordById(s[i]) + " ";
		}
		return sent;

	}

	public int getForeignSentenceLength(int sentenceNr, byte sentenceSource) {
		switch (sentenceSource) {
		case TRAIN_CORPUS:
			return _trainForeignSentences.get(sentenceNr).length;
		case TEST_CORPUS:
			return _testForeignSentences.get(sentenceNr).length;
		case DEV_CORPUS:
			return _devForeignSentences.get(sentenceNr).length;
		}
		return -1;
	}

	public int getSourceSentenceLength(int sentenceNr, byte sentenceSource) {
		switch (sentenceSource) {
		case TRAIN_CORPUS:
			return _trainSourceSentences.get(sentenceNr).length;
		case TEST_CORPUS:
			return _testSourceSentences.get(sentenceNr).length;
		case DEV_CORPUS:
			return _devSourceSentences.get(sentenceNr).length;
		}
		return -1;
	}


	public void addTrainFile(String sourceName, String foreignName,
			int maxSentenceSize) throws IOException {

		addTrainFile(new File(sourceName), new File(foreignName),
				maxSentenceSize);
	}

	public void addTrainFile(File sourceFile, File foreignFile,
			int maxSentenceSize) throws IOException {
		int[] result = addFile(sourceFile, foreignFile, maxSentenceSize,
				_nrTrainingSentences, _discardedSentences,
				_maxTrainingSentences, _trainSourceSentences,
				_trainForeignSentences);
		_nrTrainingSentences = result[0];
		_discardedSentences = result[1];
	}

	public void addTestFile(String sourceName, String foreignName)
			throws IOException {
		@SuppressWarnings("unused")
		int[] result = addFile(new File(sourceName), new File(foreignName),
				Integer.MAX_VALUE, _testSourceSentences.size(), 0,
				Integer.MAX_VALUE, _testSourceSentences, _testForeignSentences);
	}

	public void addDevFile(String sourceName, String foreignName)
			throws IOException {
		@SuppressWarnings("unused")
		int[] result = addFile(new File(sourceName), new File(foreignName),
				Integer.MAX_VALUE, _devSourceSentences.size(), 0,
				Integer.MAX_VALUE, _devSourceSentences, _devForeignSentences);
	}

	// Return the current number of sentences
	int[] addFile(File sourceFile, File foreignFile, int maxSentenceSize,
			int numberSentences, int discardedSentences, int maxSentences,
			ArrayList<int[]> sourceSentences, ArrayList<int[]> foreignSentences)
			throws IOException {

		if (_maxSentenceSize == 0) {
			_maxSentenceSize = maxSentenceSize;
		} else if (maxSentenceSize <= _maxSentenceSize) {
			_maxSentenceSize = maxSentenceSize;
		}
		// System.out.println("Nr Sentences " + numberSentences + "Discarded " +
		// _discardedSentences);
		int[] result = { numberSentences, discardedSentences };
		if (result[0] >= maxSentences)
			return result;
		BufferedReader sourceReader = new BufferedReader(new InputStreamReader(
				new FileInputStream(sourceFile), "UTF8"));
		BufferedReader foreignReader = new BufferedReader(
				new InputStreamReader(new FileInputStream(foreignFile), "UTF8"));
		String sourceSentence = sourceReader.readLine();
		String foreignSentence = foreignReader.readLine();
		while (sourceSentence != null && foreignSentence != null) {
			if (result[0] >= maxSentences)
				return result;
			String[] sourceTokens = sourceSentence.split("\\s+");
			int sourceLen = sourceTokens.length;
			String[] foreignTokens = foreignSentence.split("\\s+");
			int foreignLen = foreignTokens.length;
			if (sourceLen <= maxSentenceSize && foreignLen <= maxSentenceSize) {
				// System.out.println(sourceLen + " " + _maxSourceLen);
				if (sourceLen > _maxSourceLen)
					_maxSourceLen = sourceLen;
				// System.out.println(foreignLen + " " + _maxForeignLen);
				if (foreignLen > _maxForeignLen)
					_maxForeignLen = foreignLen;
				sourceSentences.add(convertStringToIntArray(sourceTokens,
						sourceLen, _vocabNamesSource, _vocabSource,
						_sourceWordsCounts));
				foreignSentences.add(convertStringToIntArray(foreignTokens,
						foreignLen, _vocabNamesForeign, _vocabForeign,
						_foreignWordsCounts));
				result[0]++;
			} else {
				result[1]++;
			}
			sourceSentence = sourceReader.readLine();
			foreignSentence = foreignReader.readLine();
		}
		return result;
	}

	public void addGoldAligments(String fileName) throws IOException {
		_gold = AlignmentsSet.getAlignmentFromFileNAACL(fileName,
				BilingualCorpus.TEST_CORPUS, this, 0);
		// for(Alignment2 al: _gold.getAlignments())
		// AlignerOutput2.output(al, this, System.out);
	}

	public void addDevGoldAligments(String fileName) throws IOException {
		_devGold = AlignmentsSet.getAlignmentFromFileNAACL(fileName,
				BilingualCorpus.DEV_CORPUS, this, 0);
	}

	public void addTrainDirectory(String sourceDirectoryName,
			String foreignDirectoryName, int maxSentenceSize)
			throws IOException {
		if (_nrTrainingSentences >= _maxTrainingSentences)
			return;
		File sourceDir = new File(sourceDirectoryName);
		File foreignDir = new File(foreignDirectoryName);
		File[] sourceFiles = sourceDir.listFiles();
		File[] foreignFiles = foreignDir.listFiles();
		Arrays.sort(sourceFiles);
		// shuffle(sourceFiles);
		if (sourceFiles.length != foreignFiles.length)
			throw new RuntimeException(
					"source and foreign have different number of files");
		for (File sf : sourceFiles) {
			String fpath = foreignDirectoryName + "/" + sf.getName();
			// System.out.println(sf.getName());
			if (_nrTrainingSentences == _maxTrainingSentences)
				return;
			addTrainFile(sf, new File(fpath), maxSentenceSize);
		}
	}

	// TODO This method has to be called
	public void initialize() {
		System.out.println("Initializing Corpus");

		_sourceVocabSize = _vocabSource.size();
		_foreignVocabSize = _vocabForeign.size();
		/*
		 * if(_devGold != null){ //Calculate empirical fertilities
		 * _foreignFertilities =
		 * AlignmentEvaluator.calculateFertilities(_devGold.getAlignments(),
		 * this, DEV_CORPUS); BilingualCorpus rev = this.reverse();
		 * _sourceFertilities =
		 * AlignmentEvaluator.calculateFertilities(rev._devGold.getAlignments(),
		 * rev, DEV_CORPUS); }
		 */

		ref = new Reference(this);
		System.out.println("Finished Initializing Corpus: Used "
				+ _nrTrainingSentences + " sentences, discarded "
				+ _discardedSentences);
	}

	public void printVocab(ArrayList<String> vocab) {
		for (int i = 0; i < vocab.size(); i++)
			System.out.println(i + 1 + " - " + vocab.get(i));
	}

	public boolean sourceSentenceContainsRareWord(int sentenceNr,
			byte sentenceSource) {
		int[] sentence = getSourceSentence(sentenceNr, sentenceSource);
		for (int i = 0; i < sentence.length; i++) {
			if (sourceRareWord(sentence[i]))
				return true;
		}
		return false;
	}

	public boolean foreignRareWord(int foreingId) {
		return _foreignWordsCounts.get(foreingId) < 5;
	}

	public boolean foreignRareWord(int foreingId, int nr) {
		return _foreignWordsCounts.get(foreingId) < nr;
	}

	public int getSourceWordCounts(int sourceId) {
		return _sourceWordsCounts.get(sourceId);
	}

	public boolean sourceRareWord(int sourceId) {
		return _sourceWordsCounts.get(sourceId) < 5;
	}

	public boolean sourceRareWord(int sourceId, int nr) {
		return _sourceWordsCounts.get(sourceId) < nr;
	}

	public boolean foreignHighFertilityWord(int foreignId) {
		if (_foreignFertilities[foreignId] >= 2)
			return true;
		return false;
	}

	public double numberOfForeignRareWordTypes() {
		int rare = 0;
		int all = 0;
		for (int i = 0; i < _foreignWordsCounts.size(); i++) {
			if (_foreignWordsCounts.get(i) < 5)
				rare ++;
			all ++;
		}
		return rare * 100.0 / all;
	}

	public double numberOfSourceRareWordTypes() {
		int rare = 0;
		int all = 0;
		for (int i = 0; i < _sourceWordsCounts.size(); i++) {
			if (_sourceWordsCounts.get(i) < 5)
				rare ++;
			all ++;
		}
		return rare * 100.0 / all;
	}

	public int getSourceWordIndex(String word){
		return _vocabSource.get(word);
	}
	
	public int getForeignWordIndex(String word){
		return _vocabForeign.get(word);
	}
	
	public double numberOfForeignRareWordTokens() {
		int rare = 0;
		int all = 0;
		for (int i = 0; i < _foreignWordsCounts.size(); i++) {
			if (_foreignWordsCounts.get(i) < 5)
				rare += _foreignWordsCounts.get(i);
			all += _foreignWordsCounts.get(i);
		}
		return rare * 100.0 / all;
	}

	public double numberOfSourceRareWordTokens() {
		int rare = 0;
		int all = 0;
		for (int i = 0; i < _sourceWordsCounts.size(); i++) {
			if (_sourceWordsCounts.get(i) < 5)
				rare += _sourceWordsCounts.get(i);
			all += _sourceWordsCounts.get(i);
		}
		return rare * 100.0 / all;
	}
	
	
	
	public static void main(String[] args) throws IOException {
		String corpusFile = args[0];
		int size = Integer.parseInt(args[1]);
	
		BilingualCorpus corpus = BilingualCorpus.getCorpusFromFileDescription(
				corpusFile, size,40);
		System.out.println("Number of source rare words types" + corpus.numberOfSourceRareWordTypes());
		System.out.println("Number of target rare words types" + corpus.numberOfForeignRareWordTypes());
		
		System.out.println("Number of source rare words tokens" + corpus.numberOfSourceRareWordTokens());
		System.out.println("Number of target rare words tokens" + corpus.numberOfForeignRareWordTokens());
	            
//		System.out.println();
//		System.out.println();
//		System.out.println();
//		System.out.println("Init corpus Spent Memory " + (endM - initM) + " time " + (end-init));
//		System.out.println();
//		System.out.println();
//		System.out.println();
//
//		System.out.println("Memory " + MemoryUtil.deepMemoryUsageOf(corpus,VisibilityFilter.ALL)*1.0/(1024*1024) + "MB");
//
//		System.gc();
//                System.gc();
//                System.gc();
//                initM = (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory())/(1024*1024);                                           
//                corpus.trim();                           
//                System.gc();
//                System.gc();
//                System.gc();
//                endM = (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory())/(1024*1024);  
//                System.out.println();
//                System.out.println();
//                System.out.println();
//                System.out.println("Corpus after Trim memory" + endM );
//                System.out.println();
//                System.out.println();
//                System.out.println();
//
//		System.out.println("Memory After Trim" + MemoryUtil.deepMemoryUsageOf(corpus,VisibilityFilter.ALL)*1.0/(1024*1024) + "MB");

//		double[] fertility = corpus.getFertileSource(BilingualCorpus.TEST_CORPUS);
//		int ones = 0;
//		int notseen = 0;
//		for(int i = 0; i < fertility.length; i++){
//			if(fertility[i] == 1){
//				ones ++;
//			}else if(fertility[i] == -1){
//				notseen++;
//			}else{
//				System.out.println(corpus.getSourceWordById(i) + "-" + fertility[i]);
//			}
//		}
//		System.out.println("Fertility of 1 "+ ones);
//		System.out.println("Not seen "+ notseen);
                
//        AlignmentsSet gold = corpus.getGold();
//        System.out.println("Number of aligned poits" + gold.numberOfAlignedPoints());
//        System.out.println("Number of aligned poits" + gold.numberOfAlignedPoints(2));
		
		System.out.println("Ended Corpus");

	}

	public String getForeignString(int[] target) {
		String targetS = "";
		for (int i = 0; i < target.length; i++) {
			targetS += getForeignWordById(target[i]);
			if(i!=target.length-1){targetS+= " ";}
		}
		return targetS;
	}
	
	public String getSourceString(int[] source) {
		String sourceS = "";
		for (int i = 0; i < source.length; i++) {
			sourceS += getSourceWordById(source[i]);
			if(i!=source.length-1){sourceS+= " ";}
		}
		return sourceS;
	}

	public int[] getSourceIds(String[] sent) {
		int[] reply = new int[sent.length];
		for (int i = 0; i < reply.length; i++) {
			int id = _vocabSource.get(sent[i]);
			reply[i] = id;
		}
		return reply;
	}
	
	public int[] getTargetIds(String[] sent) {
		int[] reply = new int[sent.length];
		for (int i = 0; i < reply.length; i++) {
			int id = _vocabForeign.get(sent[i]);
			reply[i] = id;
		}
		return reply;
	}
	
	public int[] convertStringToIntArray(String[] sent, int len,
			ArrayList<String> vocabNames, TObjectIntHashMap<String> vocab,
			TIntIntHashMap counts) {
		int[] reply = new int[len];
		for (int i = 0; i < len; i++) {
			if (vocab.containsKey(sent[i])) {
				int id = vocab.get(sent[i]);
				reply[i] = id;
				counts.put(id, counts.get(id) + 1);
			} else {
				vocabNames.add(sent[i]);
				vocab.put(sent[i], vocabNames.size() - 1);
				reply[i] = vocabNames.size() - 1;
				counts.put(vocabNames.size() - 1, 1);

			}
		}
		return reply;
	}
}
