package geppetto.cat.models;


import geppetto.cat.corpus.BilingualCorpus;

import java.io.PrintStream;
import java.util.Arrays;



public class SparseCountTable {
	double[] counts;

	double[] sums;

	double[] nullCounts;

	double nullSum;

	BilingualCorpus corpus;

	double smooth;

	/**
	 * Creates an empty count table
	 * 
	 * @param corpus
	 */
	public SparseCountTable(BilingualCorpus corpus) {
		this.corpus = corpus; // The corpus that this reference applies

		/**
		 * The counts for all given pair. Indexed by source word id, and then as
		 * a hashmap indexed by foreign index
		 */
		counts = new double[corpus.ref.getSize()];
		/** The sums for each source word */
		sums = new double[corpus.getSourceSize()];
		/** The counts for each foreing word alligned to null */
		nullCounts = new double[corpus.getForeignSize()];

		/** The laplace smoothing being used */
		this.smooth = 0;
	}

	/**
	 * Creates an emptu count table Initializes the counts with a given value.
	 * This is used as smoothing
	 * 
	 * @param corpus
	 * @param smooth
	 */
	public SparseCountTable(BilingualCorpus corpus, double smooth) {
		this.corpus = corpus;
		counts = new double[corpus.ref.getSize()];
		sums = new double[corpus.getSourceSize()];
		nullCounts = new double[corpus.getForeignSize()];
		nullSum = 0;
		this.smooth = smooth;
		initializeToSmoothingValue();
		System.out.println("Initializing with smooth of " + smooth);
	}

	/**
	 * Initialize the count tables with the given smoothing parameter
	 */
	public void initializeToSmoothingValue() {
		Arrays.fill(counts, smooth);
		for (int i = 0; i < corpus.getSourceSize(); i++) {
			sums[i] = corpus.ref.getForeignIndices(i).length * smooth;
		}
		Arrays.fill(nullCounts, smooth);
		nullSum = nullCounts.length * smooth;
	}

	
	public void clear() {
		Arrays.fill(counts, 0);
		Arrays.fill(nullCounts, 0);
		nullSum = 0;
		for (int i = 0; i < corpus.getSourceSize(); i++) {
			sums[i] = 0;
		}
	}
	
	public void initializeUniform() {
		System.out.println("Initialize uniform");
		for (int i = 0; i < corpus.getSourceSize(); i++) {
			double value = 1.0 / corpus.ref.getNumberIForeign(i);
			int[] positions = corpus.ref.getForeignIndices(i);

			for (int j = 0; j < positions.length; j++) {
				int pos = positions[j];
				// System.out.println("Nr positions " + corpus.ref.getSize() + "
				// Pos " + pos + " " + probs.length);
				counts[pos] = value;
			}
			sums[i] = 1;
		}
		Arrays.fill(nullCounts, 1.0 / nullCounts.length);
		nullSum = 1;
		// printTable();
	}

	
	public void printTable(PrintStream out) {

		int sourceSize = corpus.getSourceSize();
		int foreginSize = corpus.getForeignSize();
		System.out.println("foreign word || source word || prob");
		for (int sourceIndex = 0; sourceIndex < sourceSize; sourceIndex++) {
			for (int foreingIndex = 0; foreingIndex < foreginSize; foreingIndex++) {	
				int ref = corpus.ref.getIndex(sourceIndex, foreingIndex);
				if (ref == -1)
					continue;
				double prob = counts[ref];
				if (prob > 0.0001) {
					out.println(corpus.getForeignWordById(foreingIndex) + "-" + foreingIndex
							+ " | " + corpus.getSourceWordById(sourceIndex) + "-" + sourceIndex
							+ " : " + prob + " ");
				}
			}
			System.out.println("Sum " + sums[sourceIndex]);
		}
		for (int foreingIndex = 0; foreingIndex < foreginSize; foreingIndex++) {
			if (nullCounts[foreingIndex] > 0) {
				out.println( corpus.getForeignWordById(foreingIndex) + "-" + foreingIndex + " || Null  : " + nullCounts[foreingIndex] + " ");
			}
		}
		System.out.println("Null sum " + nullSum);

	}
	/*
	public void printTable() {
		MyArrays.printDoubleArray(counts, "probs");
		MyArrays.printDoubleArray(sums, "sums");
		MyArrays.printDoubleArray(nullCounts, "null");
		System.out.println("Null Sum" + nullSum);
	}
	*/

	public double getNormalizing(int sourceWord) {
		return sums[sourceWord];
	}

	public double getNormalizingNull() {
		return nullSum;
	}

	public double getNullCounts(int foreignWord) {
		return nullCounts[foreignWord];
	}

	// Used on online methods
	public void setNullCount(int foreignWord, double counts) {
		assert (!Double.isNaN(counts)) && counts >= 0 && counts <= 1.0001;
		nullSum -= nullCounts[foreignWord];
		nullCounts[foreignWord] = counts;
		nullSum += counts;
	}

	public void addToNullCount(int foreignWord, double counts) {
		assert (!Double.isNaN(counts)) && counts >= 0 && counts <= 1.0001;
		nullCounts[foreignWord] += counts;
		nullSum += counts;
	}

	public double getCounts(int sourceWord, int foreignWord) {
		return counts[corpus.ref.getIndex(sourceWord, foreignWord)];
	}

	public void addToCount(int sourceWord, int foreignWord, double p) {
		assert !Double.isNaN(p) && p >= 0 && p <= 1.001 : "Not a probability "
				+ p;
		counts[corpus.ref.getIndex(sourceWord, foreignWord)] += p;
		sums[sourceWord] += p;
	}

	public void setCount(int sourceWord, int foreignWord, double p) {
		assert !Double.isNaN(p) && p >= 0 && p <= 1.001 : "Not a probability "
				+ p;
		sums[sourceWord] -= counts[corpus.ref.getIndex(sourceWord, foreignWord)];
		counts[corpus.ref.getIndex(sourceWord, foreignWord)] = p;
		sums[sourceWord] += p;
	}

	public int[] getNotZeroCountsByWord(int si) {
		return corpus.ref.getForeignIndices(si);
	}

	public double getCountByIndex(int pos) {
		return counts[pos];
	}

}
