% compute normalized mutual information 
% of two cluster assignment
% c \in [1..N_1], w \in [1..N_2]
function MI = normalizedMutualInformation(c, w)
    if length(c) ~= length(w)
        error('Check length of cluster assignments');
    end
    c_class = unique(c);
    w_class = unique(w);
    N1 = length(c_class);
    N2 = length(w_class);
    n = length(c);
    
    c_count = zeros(N1,1);
    w_count = zeros(1,N2);
    wc_count = zeros(N1, N2);
    
    for i = 1:N1, c_count(i) = sum(c == c_class(i)) / n; end
    for j = 1:N2, w_count(j) = sum(w == w_class(j)) / n; end
    
    MI = 0;
    for i = 1:N1
        for j = 1:N2
            wc_count(i,j) = sum((c == c_class(i)) & (w == w_class(j))) / n;
            if wc_count(i, j) > 0
                MI = MI + wc_count(i, j) * log(wc_count(i,j) / c_count(i) ...
                                             / w_count(j));
            end
        end
    end
    %c_count
    %w_count
    %wc_count
    
    c_entropy = entropyCount(c_count);
    w_entropy = entropyCount(w_count);
    MI = MI / max([c_entropy w_entropy]);
end

