function [ D ] = gm_compare_jmef( GMM1_IN, GMM2_IN )
%GM_COMPARE_JMEF Summary of this function goes here
%   Detailed explanation goes here

assert(length(GMM1_IN)==length(GMM2_IN));
assert(length(GMM1_IN{1}.MU) == length(GMM2_IN{1}.MU));
assert(length(GMM1_IN{1}.MU) > 0);

% Import jMEF
close all hidden
javaclasspath('H:\pai\jmef\jMEF.jar');
import jMEF.*;

% Wrap the GMM format
n=length(GMM1_IN);
dim=length(GMM1_IN{1}.MU);

GMM1=MixtureModel(n);
GMM2=MixtureModel(n);
GMM1.EF=MultivariateGaussian;
GMM2.EF=MultivariateGaussian;

for i=1:n
    GMM1.weight(i)=GMM1_IN{i}.WEIGHT;
    GMM2.weight(i)=GMM2_IN{i}.WEIGHT;
    
    GMM1.param(i) = PVectorMatrix.RandomDistribution(dim); %Random Mean and covariance
    GMM2.param(i) = PVectorMatrix.RandomDistribution(dim); %Random Mean and covariance
    
    for j=1:dim,
        GMM1.param(i).v.array(j)=GMM1_IN{i}.MU(j);
        GMM2.param(i).v.array(j)=GMM2_IN{i}.MU(j);
    end
    
    for j=1:dim,
        for k=1:dim,
            GMM1.param(i).M.array(j,k)=GMM1_IN{i}.SIGMA(j,k)
            GMM2.param(i).M.array(j,k)=GMM2_IN{i}.SIGMA(j,k)
        end
    end
end

%plot_gauss(GMM1_IN{2}.MU,GMM1_IN{2}.SIGMA);
%figure;
%hold on
%plot_gauss(GMM2.param(2).v.array,GMM1.param(2).M.array);
%plot_gauss(GMM2.param(1).v.array,GMM1.param(1).M.array);
%hold off;

fprintf('Kullback Leibler divergence between original and estimated: \n %f \n',GMM1.KLDMC (GMM1, GMM2, 10000));

end

