function [best_mix best_vars] = clusteringGMMBayesUncertaintyFast(mix, data, options)


% initialisation
K = mix.ncentres;
N = size(data.Y, 1);
lmin = realmax;
best_mix = mix;

% initialise posterior
if isfield(mix.varprior, 'a0')
    mix.varposterior.a = ones(size(mix.priors))*mix.varprior.a0;
end
mix.varposterior.m = repmat(mix.varprior.m0, mix.ncentres, 1);
mix.varposterior.b = ones(1, mix.ncentres)*mix.varprior.b0;
mix.varposterior.W = repmat(mix.varprior.W0, [1 1 mix.ncentres]);
mix.varposterior.v = ones(1, mix.ncentres)*mix.varprior.v0;
mix.varposterior.ElndetLambda = ones(1, mix.ncentres)*wishart_exp_logdet(mix.varprior.W0, mix.varprior.v0);


% initial update
vars.R = repmat(data.Y, [1 1 K]);
vars.E = repmat(data.C, [1 1 1 K]);
vars.RRt = repmat(data.YYt, [1 1 1 K]);
vars.logdetE = repmat(data.logdetC, [1 K]);
vars.Z = gmmpostRobustUncertainty(mix, data);
if options.verbose
    negloglikelihood = -gmmBayesLowerboundUncertainty(mix, data, vars, options.reg);
    fprintf('initial negloglikelihood = %10.10f\n', negloglikelihood);
end



% the outer loop
k_cont = 1;
while k_cont   
    % init negloglikelihoods
    iters_count = 0;
    curr_negloglikelihood = realmax/2;
    prev_negloglikelihood = realmax;
    
    % the inner loop
    while abs(prev_negloglikelihood-curr_negloglikelihood) > abs(options.th*prev_negloglikelihood)
        iters_count = iters_count + 1;
        prev_negloglikelihood = curr_negloglikelihood;

%         vars = updatePosteriorXZ(mix, data, vars);

        % update posterior distributions 
        mix.varposterior = updatePosteriorUncertainty(mix, data, vars, options.reg);

        % calculate posterior probability based on old parameters
        vars.Z = gmmpostBayesUncertainty(mix, data, vars);

        % update mixing parameters
        mix.priors = sum(vars.Z) / N;
        % update mean and covariance parameters
        for k = 1:mix.ncentres
            mix.covars(:, :, k) = inv(mix.varposterior.v(k)*mix.varposterior.W(:, :, k));
            mix.centres(k, :) = mix.varposterior.m(k, :);
        end

        
        % calculate lower bound
        e = gmmBayesLowerboundUncertainty(mix, data, vars, options.reg);
        
        % test whether should update q(X|Z)
        if mod(iters_count, 4) == 0 || abs(prev_negloglikelihood+e) < abs(options.th*prev_negloglikelihood)
            vars = updatePosteriorXZ(mix, data, vars);
            e = gmmBayesLowerboundUncertainty(mix, data, vars, options.reg);
        end
 
        
        % kill components
        [mix vars] = killComponentsSmall(mix, vars, options);

        % compute data likelihoods
        curr_negloglikelihood = -e;
        if options.verbose
            tol = (prev_negloglikelihood-curr_negloglikelihood) / abs(options.th*prev_negloglikelihood);
            fprintf('ITERS (%d)   (%f)   negloglikelihood: %10.10f   knz: %d \n', ...
                     iters_count, tol, curr_negloglikelihood, mix.ncentres);
        end
    end

    % compute lowerbound
    curr_loglikelihood = -gmmBayesLowerboundUncertainty(mix, data, vars, options.reg);
    if curr_loglikelihood < lmin
        lmin = curr_loglikelihood;
        best_mix = mix;
        best_vars = vars;
    end

    % if there are more than 1 component, kill least significant component
    if mix.ncentres > options.kmin
        [mix vars] = killComponentSmallest(mix, data, vars, options.killheuristic, 'gmmbayesuncertainty');
    else
        k_cont = 0;
    end
    
    if options.verbose
        fprintf('%10.10f   %d\n', lmin, best_mix.ncentres);
    end
end
