/**
 * Functions to calculate KL-divergence 
 */
package ca.sfu.util;

import edu.cmu.tetrad.bayes.*;
//import edu.cmu.tetrad.graph.*; 
import edu.cmu.tetrad.data.*;
import java.util.*;

import edu.cmu.tetrad.graph.*;




/**
 * @author wluoa
 *
 */
public class DivergenceCalculator {

	private BayesIm im;
	/**
	 * 
	 */
	public DivergenceCalculator(BayesIm im) {
		super();
		this.im = im;
	}
	
	public double divergence_mc(BayesIm otherIm, int nSample)
	{
		double D = 0.0;
		
		
		RectangularDataSet dataSet = im.simulateData(nSample);
		
		int nVariables = dataSet.getNumColumns();
		int[] variableValues = new int[nVariables];
		for (int i=0; i<dataSet.getNumRows(); i++)
		{
			for (int j=0; j<nVariables; i++)
			{
				variableValues[j] = dataSet.getInt(i, j);
			}
			double thisProb = new BayesImProbs(im).getCellProb(variableValues);
			double otherProb = new BayesImProbs(otherIm).getCellProb(variableValues);
			if (thisProb > 0.0 && otherProb > 0.0) {
				D = D + Math.log(thisProb / otherProb);
			}
		}
		
		D = D / nSample;
		
		return D;
	}
	public double divergence(BayesIm otherIm)
	{
		double D = 0.0;
		List<Node> nodes = im.getVariables();
		int nNodes = im.getNumNodes();
		int [] nCard = new int[nNodes];
		for (int iNode = 0; iNode < nNodes; iNode++) {
//			System.out.println(((DiscreteVariable) nodes.get(iNode)).getNumCategories());
			nCard[iNode] = ((DiscreteVariable) nodes.get(iNode)).getNumCategories();
		}
		

		int x[]= new int[nNodes]; // Vector to aid interation 

		// simply sum over all configurations to calc divergence D
		int i = 0;
		x[0]=-1; // A hack for the convenience of looping 
		while (i < nNodes) {
			// update configuration
			x[i]++;

			while (i < nNodes && x[i] == nCard[i]) //carry
			{
				x[i] = 0;
				i++;
				if (i < nNodes){
					x[i]++;
				}
			}
			if (i < nNodes) {
				i = 0;
				// calc P(x) and Q(x)
				double P = 1.0;
				for (int iNode = 0; iNode < nNodes; iNode++) {
					int parents[] = im.getParents(iNode); // indices of parents
					int n = parents.length; 
					int vParents[] = new int[n]; // copy by value with primitive, should work
					for (int j=0; j<n ; j++)
						vParents[j] = x[parents[j]]; 
					//System.out.println("iNode: " + iNode + im.getRowIndex(iNode, vParents) + (x[iNode]));
//					System.out.println("x = " + x[0] + x[1] + x[2] + ", " + "prob = " + im.getProbability(iNode, im.getRowIndex(iNode, vParents), x[iNode]));
					P = P * im.getProbability(iNode, im.getRowIndex(iNode, vParents), x[iNode]);
				}

				double Q = 1.0;
				for (int iNode = 0; iNode < nNodes; iNode++) {
					int parents[] = otherIm.getParents(iNode); // indices of parents
					int n = parents.length; 
					int vParents[] = new int[n]; // copy by value with primitive, should work
					for (int j=0; j<n ; j++)
						vParents[j] = x[parents[j]]; 
					Q = Q * otherIm.getProbability(iNode, otherIm.getRowIndex(iNode, vParents), x[iNode]);
				}
				
				// update divergence if probabilities are positive
				if (P > 0.0 && Q > 0.0) {
					D = D + P * Math.log(P / Q);
				}
			}
		}
		return D;

	}

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		// TODO Auto-generated method stub
		Dag g1  = new Dag();
		Node a = new GraphNode("a");
		Node b = new GraphNode("b");
		Node c = new GraphNode("c");
		
		g1.addNode(a);
		g1.addNode(b);
		g1.addNode(c);
		
		Dag g2 = new Dag(g1);
		
		g1.addDirectedEdge(a,c);
		g1.addDirectedEdge(b,c);
		
		g2.addDirectedEdge(a,b);
		g2.addDirectedEdge(c,b);
		
		BayesPm pm1 = new BayesPm(g1);
		BayesPm pm2 = new BayesPm(g2);
		
		BayesIm im1 = new MlBayesIm(pm1, MlBayesIm.RANDOM);
		BayesIm im2 = new MlBayesIm(pm2, MlBayesIm.RANDOM);
		
		DivergenceCalculator cal = new DivergenceCalculator(im1);
		System.out.println("KL-divergence of im1 and im2: " + cal.divergence(im2));
		
		System.out.println("KL-divergence of im1 and im1: " + cal.divergence(im1));

		
	}

}
