package be.ac.ulb.mlg.utils.measure;

/*
 * The MIT License (MIT)
 * 
 * Copyright (c) 2013 Jean-Sebastien Lerat (Jean-Sebastien.Lerat@ulb.ac.be)
 * 
 * Permission is hereby granted, free of charge, to any person obtaining a copy
 * of this software and associated documentation files (the "Software"), to deal
 * in the Software without restriction, including without limitation the rights
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 * copies of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 * 
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 * 
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 * THE SOFTWARE.
 */

/**
 * 
 * @author Jean-Sebastien Lerat (Jean-Sebastien.Lerat@ulb.ac.be)
 * @version 1.00, 24/06/2013
 * @see <a href="http://stats.stackexchange.com/questions/32662/question-about-calculating-mutual-information">about-calculating-mutual-information</a>
 */

import be.ac.ulb.mlg.utils.Discretizer;
import be.ac.ulb.mlg.utils.Measure;
import be.ac.ulb.mlg.utils.MeasureUtils;
import be.ac.ulb.mlg.utils.measure.entropy.EmpiricalEntropy;

/**
 * Mutual information that use the entropy formula:
 * I(X,Y) = H(X) - H(X|Y) = H(Y) - H(Y|X) = H(X) + H(Y) - H(X,Y)
 */
public class MutualInformation implements Measure{
	private final Discretizer discretizer;
	private final Entropy entropyEstimator;

	/**
	 * The default constructor that uses the {@link EmpiricalEntropy} entropy estimator and no {@link Discretizer}
	 */
	public MutualInformation() {
		this(new EmpiricalEntropy(),null);
	}
	/**
	 * The default constructor that uses the {@link EmpiricalEntropy} entropy estimator
	 * @param discretizer The discretrization method
	 */
	public MutualInformation(Discretizer discretizer) {
		this(new EmpiricalEntropy(),discretizer);
	}
	/**
	 * Constructor that uses the parameter entropy estimator and no discretizer
	 * @param entropyEstimator The entropy estimator
	 */
	public MutualInformation(Entropy entropyEstimator) {
		this(entropyEstimator,null);
	}
	/**
	 * Constructor that uses the parameter entropy estimator
	 * @param entropyEstimator The entropy estimator
	 */
	public MutualInformation(Entropy entropyEstimator,Discretizer discretizer) {
		this.discretizer		= discretizer;
		this.entropyEstimator	= entropyEstimator;
	}
	@Override
	public double measure(double[] A,double[] B,boolean[] computable){
		if(discretizer != null){
			final double[][] X = discretizer.discretize(new double[][]{A,B});
			A = X[0];
			B = X[1];
		}
		
		final double entropyA,entropyB,entropyAB;
		
		long informations;
		final double[] frequencies = new double[A.length];

		informations= MeasureUtils.countOccurencies(A, frequencies, computable != null);
		entropyA	= entropyEstimator.entropy(frequencies,((int)(informations>>32)),((int)(informations)));

		informations= MeasureUtils.countOccurencies(B, frequencies, computable != null);
		entropyB	= entropyEstimator.entropy(frequencies,((int)(informations>>32)),((int)(informations)));

		informations= MeasureUtils.countOccurencies(new double[][]{B,A}, frequencies, computable);
		entropyAB	= entropyEstimator.entropy(frequencies,((int)(informations>>32)),((int)(informations)));

		return entropyA + entropyB - entropyAB;
	}
	@Override
	public boolean hasNativeImplementation() {
		return entropyEstimator.hasNativeImplementation() && (discretizer == null || discretizer.hasNativeImplementation());
	}
	@Override
	public boolean requireDataTransformation() {
		if(entropyEstimator.requireDataTransformation()) return true;
		if(discretizer == null) return false;
		return discretizer.requirePreprocessing();
	}
	@Override
	public void transform(double[][] data) {
		if(entropyEstimator.requireDataTransformation()){
			if(discretizer != null){
				if(discretizer.requirePreprocessing())
					discretizer.preprocess(data);
				discretizer.discretize(data);
			}
			entropyEstimator.transform(data);
		}else if(discretizer != null){
			if(discretizer.requirePreprocessing())
				discretizer.preprocess(data);
		}
	}
}
