/**
 * Copyright 2012 Brigham Young University
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package edu.byu.nlp.cluster.mom;

import org.apache.commons.math3.special.Gamma;

import edu.byu.nlp.cluster.Dataset;
import edu.byu.nlp.cluster.Datasets;
import edu.byu.nlp.math.Math2;
import edu.byu.nlp.util.DoubleArrays;
import edu.byu.nlp.util.Matrices;

/**
 * @author rah67
 *
 */
public class MixtureOfMultinomialsUtil {
	
	private MixtureOfMultinomialsUtil() { }
	
	public static double[] posteriorAlpha(Dataset data, double alpha, int K) {
		// FIXME : does not work for clustering when K < data.getNumLabels()
		double[] posteriorAlpha = Datasets.countLabels(data);
		DoubleArrays.addToSelf(posteriorAlpha, alpha);
		return posteriorAlpha;
	}

	public static double[][] posteriorBeta(Dataset data, double beta, int K) {
		// FIXME : does not work for clustering when K < data.getNumLabels()
		double[][] posteriorBeta = Datasets.countLabelsAndFeatures(data);
		Matrices.addToSelf(posteriorBeta, beta);
		return posteriorBeta;
	}
	
	/**
	 * Creates a parameter object representing the variational parameters.
	 * 
	 * Note: parameters are directly modified; caller maintains ownership of arrays inside MoMParameters.
	 */
	public static MoMParameters newVariationalParameters(double[] a, double[][] b, double temp) {
		// We cache the differences of digammas since they are the main computation (and the most expensive).

		// This will become: digammaAMinusDigammaA0[k] = digamma(a[k]) - digamma(\sum_i a[k])
		double digammaA0 = Gamma.digamma(DoubleArrays.sum(a));
		double[] digammaAMinusDigammaA0 = a;
		digammaToSelf(digammaAMinusDigammaA0);
		// INFO : the sum over yCounts only needs to be computed once for the whole dataset, but doing so may be
		// more work than not and it is not very costly computationally to repeat.
		DoubleArrays.subtractToSelf(digammaAMinusDigammaA0, digammaA0);
		if (!Math2.doubleEquals(temp, 1.0, 1e-10)) {
			DoubleArrays.divideToSelf(digammaAMinusDigammaA0, temp);
		}
		
		// This will become: digammaBMinusDigammaB0[k][v] = digamma(b[k][v]) - digamma(\sum_v b[k][v])
		double[][] digammaBMinusDigammaB0 = b;
		for (int k = 0; k < digammaBMinusDigammaB0.length; k++) {
			double digammaB0 = Gamma.digamma(DoubleArrays.sum(b[k]));
			digammaToSelf(digammaBMinusDigammaB0[k]);
			DoubleArrays.subtractToSelf(digammaBMinusDigammaB0[k], digammaB0);
			if (!Math2.doubleEquals(temp, 1.0, 1e-10)) {
				DoubleArrays.divideToSelf(digammaBMinusDigammaB0[k], temp);
			}
		}
		
		return MoMParameters.fromLogProbabilities(a, b, false, false);
	}
	
	private static final void digammaToSelf(double[] arr) {
		for (int i = 0; i < arr.length; i++) {
			arr[i] = Gamma.digamma(arr[i]);
		}
	}

	/**
	 * Computes counts based on the labeled data and returns a Priors object. 
	 */
	public static HyperParams posteriors(Dataset data, double alpha, double beta, int numClusters) {
		double[] alphaPlusObservedCounts = posteriorAlpha(data, alpha, numClusters);
		double[][] betaPlusObservedCounts = posteriorBeta(data, beta, numClusters);
		return new HyperParams(alpha, beta, alphaPlusObservedCounts, betaPlusObservedCounts);
	}
	
}
