function [Z,argmax,score] = binary_link_clustering(INCIDENCE, K, a0, b0, dpm_alpha, batch_size, max_iter)
%
% [Z,argmax,score] = binary_link_clustering(INCIDENCE, TRUNC, A0, B0, DPMALPHA, BATCHSIZE, MAXITER)
%
% incidence(e,i) ~ Poisson(sum_k theta_k(i) z(e,k))
% theta(i)       ~ Gamma(a0, b0) for all i
% z              ~ Truncated DPM(alpha, K)
%
% This optimizes variational parameters using memoized online
% algorithm.  We can think of z as membership matrix of edges, from
% which we indirectly determine overlapping membership of vertices.
%
% INCIDENCE = edge x vertex incidence matrix {0,1,2}
% TRUNC     = truncation level of DPM prior
% A0        = prior Gamma(A0,B0)
% B0        = prior Gamma(A0,B0)
% DPMALPHA  = prior of DPM(ALPHA)
% BATCHSIZE = minibatch size
% MAXITER   = maximum number of iterations per minibatch
%
% code: Yongjin Park, ypp@csail.mit.edu
%

TOL = 1e-4;
per_num = 10;

[m,n] = size(INCIDENCE);

Y = INCIDENCE';

Y1 = Y;
Y1(Y1>1) = 0;

Y2 = spalloc(n,m,nnz(Y));
Y2(Y==2) = 1;
ysum = sum(Y)';

% ****************************************************************
% initial random seeding
num_rand = per_num * K;

rand_idx = randsample(m,num_rand);
Z = sparse(1+mod(1:num_rand,K), rand_idx, 1, K, m);

global_stat_edge = Z * Y';
global_stat_total = sum(Z,2);

% greedy seeding and re-estimate sufficient stats
stuff = - log(1+1./(b0+global_stat_total)).*(sum(global_stat_edge,2) + a0*n);
logQ = repmat(stuff',m,1) +  Y1' * log(1+global_stat_edge'/a0);
logQ = logQ + Y2' * log(1+global_stat_edge'/(a0+1));
logQ = logQ - ysum * log(1+b0+global_stat_total');
    
[~,argmax] = max(logQ');

Z = full(sparse(argmax, 1:m, 1, K, m));
dpm_u = zeros(K,1);
dpm_v = zeros(K,1);

% construct minibatches
num_batches = ceil(2*m/batch_size);
permuted_edges = randsample(m, batch_size*num_batches, true);

% For each batch b:
% permuted_edges((1+(b-1)*batch_size):(b*batch_size))
minibatches = arrayfun(@(b) permuted_edges((1+(b-1)*batch_size):(b*batch_size)), 1:num_batches, 'UniformOutput', false);
minibatch_stat_edge = cellfun(@(rand_edges) Z(:,rand_edges)*Y(:,rand_edges)', minibatches, 'UniformOutput', false);
minibatch_stat_total = cellfun(@(rand_edges) sum(Z(:,rand_edges),2), minibatches, 'UniformOutput', false);

global_stat_edge = sum(cat(3,minibatch_stat_edge{:}),3);
global_stat_total = sum(cat(2,minibatch_stat_total{:}),2);

stat_total_old = global_stat_total;

% ****************************************************************
for tt = 1:(max_iter*num_batches),

    rate = (tt+1)^(-0.55);

    prior = zeros(K,1);

    if tt > 1,
        [ ~, size_order ] = sort(-dpm_u);
        log_dpm_u_ordered = log(dpm_u(size_order));
        log_dpm_v_ordered = log(dpm_v(size_order));
        log_dpm_denom_ordered = log(dpm_u(size_order)+dpm_v(size_order));
        
        tmp_cum = [0; cumsum(log_dpm_v_ordered - log_dpm_denom_ordered)];
        prior(size_order) = log_dpm_u_ordered - log_dpm_denom_ordered + tmp_cum(1:K);
        prior = prior - max(prior);
    end

    prior = prior - log(1+1./(b0+global_stat_total)).*(sum(global_stat_edge,2) + a0*n);
    
    b = 1+mod(tt-1,num_batches);
    rand_edges = minibatches{b};

    % discount previous statistics
    global_stat_edge = global_stat_edge - minibatch_stat_edge{b};
    global_stat_total = global_stat_total - minibatch_stat_total{b};

    % estimate Z (locally)
    logQ = bsxfun(@plus, Y1(:,rand_edges)' * log(1+global_stat_edge'/a0), prior');
    logQ = logQ + Y2(:,rand_edges)' * log(1+global_stat_edge'/(a0+1));
    logQ = logQ - ysum(rand_edges) * log(1+b0+global_stat_total');
    
    Z_loc = exp(bsxfun(@minus, logQ', max(logQ')));
    Z_loc = bsxfun(@rdivide, Z_loc, sum(Z_loc));

    % stochastic update of stat
    minibatch_stat_edge{b} = Z_loc*Y(:,rand_edges)';
    minibatch_stat_total{b} = sum(Z_loc,2);

    global_stat_edge = global_stat_edge + minibatch_stat_edge{b};
    global_stat_total = global_stat_total + minibatch_stat_total{b};

    % update DPM prior
    [size_ordered, size_order] = sort(global_stat_total,'descend');
    ntot = sum(global_stat_total);
    
    dpm_u = 1+global_stat_total;
    dpm_v(size_order) = dpm_alpha + (ntot - cumsum(size_ordered));

    % local log-likelihood
    llik = sum(sum(logQ .* Z_loc'));
    fprintf(2,'Iter = %05d, llik = %.4e\r', tt, llik);

    if sum(abs(stat_total_old - global_stat_total)) < TOL,
        fprintf(2,'Converged\r');
        break;
    end
    
    stat_total_old = global_stat_total;
end

% resolve argmax assignment
[ ~, size_order ] = sort(-dpm_u);
log_dpm_u_ordered = log(dpm_u(size_order));
log_dpm_v_ordered = log(dpm_v(size_order));
log_dpm_denom_ordered = log(dpm_u(size_order)+dpm_v(size_order));

tmp_cum = [0; cumsum(log_dpm_v_ordered - log_dpm_denom_ordered)];
prior(size_order) = log_dpm_u_ordered - log_dpm_denom_ordered + tmp_cum(1:K);
prior = prior - max(prior);

prior = prior - log(1+1./(b0+global_stat_total)).*(sum(global_stat_edge,2) + a0*n);
logQ = bsxfun(@plus, Y1' * log(1+global_stat_edge'/a0), prior');
logQ = logQ + Y2' * log(1+global_stat_edge'/(a0+1));
logQ = logQ - ysum * log(1+b0+global_stat_total');

Z = exp(bsxfun(@minus, logQ', max(logQ')));
Z = bsxfun(@rdivide, Z, sum(Z));

[maxval,argmax] = max(logQ');
score = sum(maxval);
