/**
 * Copyright 2012 Brigham Young University
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package edu.byu.nlp.cluster.mom;

import java.util.Arrays;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;

import edu.byu.nlp.cluster.Dataset;
import edu.byu.nlp.data.SparseFeatureVector;
import edu.byu.nlp.data.SparseFeatureVector.EntryVisitor;
import edu.byu.nlp.math.GammaFunctions;
import edu.byu.nlp.pipes.Instance;
import edu.byu.nlp.util.DoubleArrays;
import edu.byu.nlp.util.Enumeration;
import edu.byu.nlp.util.Iterables2;

public class CollapsedParameters {
	
	// Reference to data
	private final Dataset data;

	// Assignments to y
	private final int[] y;
	
	// Sufficient statistics (counts)
	private final double[] topicCounts;
	private final double[][] topicWordCounts;
	private final double[] topicCountsAcrossTokens;
	private final int[] docSize;

	// Avoid recreation of arrays
	private final double[] completeConditional;
	
	private CollapsedParameters(Dataset data, int[] y, double[] topicCounts,
			double[][] topicWordCounts, double[] topicCountsAcrossTokens, int[] docSize) {
		this.data = data;
		this.y = y;
		this.topicCounts = topicCounts;
		this.topicWordCounts = topicWordCounts;
		this.topicCountsAcrossTokens = topicCountsAcrossTokens;
		this.docSize = docSize;
		
		this.completeConditional = new double[topicCounts.length];
	}
	
	public int getNumTopics() {
		return topicCounts.length;
	}
	
	public int getNumFeatures() {
		return topicWordCounts[0].length;
	}

	public void nextAssignments(final Assigner assigner) {
		int docIndex = 0;
		for (SparseFeatureVector doc : data.unlabeledData()) {
			nextAssignment(docIndex++, doc, assigner);
		}
	}
	
	public void nextAssignment(int docIndex, SparseFeatureVector doc, final Assigner assigner) {
		// Decrement the counts for the label assigned to the current doc
		decrementCounts(docIndex, doc);
		
		// Compute the (log) complete conditionals
		logCompleteConditionals(docIndex, doc);

		// Sample and assign
		int nextY = assigner.assignY(completeConditional);
		
		// Increment the counts for the chosen label for the current doc
		incrementCounts(docIndex, doc, nextY);
	}
	
	private static class Term3Accumulator implements EntryVisitor {

		private final double[] topicWordCounts;
		private double acc;
		
		public Term3Accumulator(double[] topicWordCounts) {
			this.topicWordCounts = topicWordCounts;
			this.acc = 0.0;
		}
		
		@Override
		public void visitEntry(int index, double value) {
			acc += GammaFunctions.logRatioOfGammasByDifference(topicWordCounts[index], (int) value);
		}
		
		public double getAccumulator() {
			return acc;
		}
		
	}

	private static double computeTerm3(SparseFeatureVector doc, double[] topicWordCounts) {
		Term3Accumulator a = new Term3Accumulator(topicWordCounts);
		doc.visitSparseEntries(a);
		return a.getAccumulator();
	}
	
	/**
	 * Returns a value within a constant offset of the logarithm of the complete conditional. 
	 */
	private void logCompleteConditionals(int docIndex, SparseFeatureVector doc) {
		for (int k = 0; k < completeConditional.length; k++) {
			completeConditional[k] = logCompleteConditional(topicCounts[k], topicCountsAcrossTokens[k],
					docSize[docIndex], doc, topicWordCounts[k]);
		}
	}
	
	@VisibleForTesting
	static double logCompleteConditional(double topicDocCount, double topicTokenCounts, int docSize,
			SparseFeatureVector doc, double[] topicWordCounts) {
		double term2 = GammaFunctions.logRatioOfGammasByDifference(
				topicTokenCounts, -docSize);
		double term3 = computeTerm3(doc, topicWordCounts);
		return topicDocCount + term2 + term3;
	}

	// Important : also updates y[docIndex]
	@VisibleForTesting
	void incrementCounts(int docIndex, SparseFeatureVector doc, int nextY) {
		y[docIndex] = nextY;
		topicCounts[nextY] = Math.log(Math.exp(topicCounts[nextY]) + 1);
		doc.addTo(topicWordCounts[nextY]);
		topicCountsAcrossTokens[nextY] += docSize[docIndex];
	}

	@VisibleForTesting
	void decrementCounts(int docIndex, SparseFeatureVector doc) {
		int curY = y[docIndex];
		topicCounts[curY] = Math.log(Math.exp(topicCounts[curY]) - 1);
		doc.subtractFrom(topicWordCounts[curY]);
		topicCountsAcrossTokens[curY] -= docSize[docIndex];
	}

	/** Returns a value within a constant offset of the log of the joint probability. **/
	public double logJoint() {
		DoubleArrays.expToSelf(topicCounts);
		double logJoint = GammaFunctions.logBeta(topicCounts);
		for (int k = 0; k < topicWordCounts.length; k++) {
			logJoint += GammaFunctions.logBeta(topicWordCounts[k]);
		}
		DoubleArrays.logToSelf(topicCounts);
		return logJoint;
	}
	
	//
	// Code below here is used for initialization
	//
	
	private static double[] newTopicCounts(int K, double alpha) {
		double[] topicCounts = new double[K];
		Arrays.fill(topicCounts, alpha);
		return topicCounts;
	}
	
	private static double[][] newTopicWordCounts(int K, int F, double beta) {
		double[][] topicWordCounts = new double[K][F];
		for (int k = 0 ; k < K; k++) {
			Arrays.fill(topicWordCounts[k], beta);
		}
		return topicWordCounts;
	}
	
	private static double[] newTopicTokenCounts(int K, int numFeatures, double beta) {
		double[] topicTokenCounts = new double[K];
		Arrays.fill(topicTokenCounts, numFeatures * beta);
		return topicTokenCounts;
	}

	public static CollapsedParameters newInstance(Dataset data, int[] unlabeledY, int K, double alpha, double beta) {
		Preconditions.checkArgument(alpha > 0.0, "alpha (%s) must be strictly greater than 0.0", alpha);
		Preconditions.checkArgument(beta > 0.0, "beta (%s) must be strictly greater than 0.0", beta);
		
		double[] yCounts = newTopicCounts(K, alpha);
		double[][] topicWordCounts = newTopicWordCounts(K, data.getNumFeatures(), beta);
		double[] topicTokenCounts = newTopicTokenCounts(K, data.getNumFeatures(), beta);
		
		for (Instance<Integer, SparseFeatureVector> instance : data.labeledData()) {
			int y = instance.getLabel();

			++yCounts[y];
			instance.getData().addTo(topicWordCounts[y]);
			topicTokenCounts[y] += instance.getData().sum();
		}
		
		int[] docSize = new int[data.unlabeledData().size()];
		for (Enumeration<SparseFeatureVector> e : Iterables2.enumerate(data.unlabeledData())) {
			int docIndex = e.getIndex();
			SparseFeatureVector instance = e.getElement();
			int y = unlabeledY[docIndex];

			++yCounts[y];
			instance.addTo(topicWordCounts[y]);
			double size = instance.sum();
			topicTokenCounts[y] += size;
			docSize[docIndex] = (int)size;
		}
		
		DoubleArrays.logToSelf(yCounts);
		return new CollapsedParameters(data, unlabeledY, yCounts, topicWordCounts, topicTokenCounts, docSize);
	}

	/** This class maintains ownership; if you need to change the values, clone the array first **/
	public int[] getY() {
		return y;
	}

	/** Returns a copy **/
	public double[] getTopicCounts() {
		double[] copy = topicCounts.clone();
		DoubleArrays.expToSelf(copy);
		return copy;
	}
	
	/** This class maintains ownership; if you need to change the values, clone the matrix first **/
	public double[][] getTopicWordCounts() {
		return topicWordCounts;
	}
	
}