function mi = clusterMI(a,b);

% mi = clusterMI(a,b)
%
% Computes normalized mutual information between two group
% assignment vectors 'a' and 'b'. Both inputs are assumed
% to be vectors of length N and include positive numbers
% indicating the cluster memberships.
%
% The returned value is I(a,b)/(min(H(a),H(b)), where
% I(a,b) is the mutual information and H(a) denotes entropy.
% The value is zero if the two assignments are independent
% and one if they are identical.

% Find the joint distibution p(a,b);
joint = crosstab(a,b);
% Add a small value for empty cells; does not affect the result
% as entropy and mutual information are anyway computed as p*log(p)
% which approaches zero for small p
eps = 1e-10;
joint = joint + eps;
joint = joint/sum(joint(:));

% Marginal probabilities can be computed by summation of the joint
% probability
pa = sum(joint,2);
pb = sum(joint,1);
% Marginal entropies
Ha = -sum(pa.*log(pa));
Hb = -sum(pb.*log(pb));

% Mutual information
mi = joint.*log(joint./(pa*pb));
mi = sum(mi(:));

% ...and the normalized version
mi = mi/min(Ha,Hb);
