function [U, S, V, out] = CndGM( D, lambda )
% <Conditional gradient algorithms for norm-regularized smooth convex optimization>
% it is slower than boost

maxIter = 1000;
tol = 1e-3;
maxR = 1;

[m, n] = size(D);
[row, col, val] = find(D);

grad = sparse(row, col, -val, m, n);
U = [];
V = [];
M = [];
s = [];

obj = zeros(maxIter, 1);
for i = 1:maxIter
    if(i > 1)
        grad = setSval(grad, res, length(res));
    end
    [u, ~, v] = lansvd(grad, 1, 'L');
    v = -v;
    
    U = cat(2, U, u);
    V = cat(2, V, v);
    
    mi = partXY(u', v', row, col, length(row))';
    M = cat(2, M, mi);
    
    if(length(s) > maxR)
        s = updateWeights(M, val, lambda);
%         nnzS = s > lambda*1e-6;
%         if(sum(nnzS) < length(s))
%             s = s(nnzS);
%             U = U(:, nnzS);
%             V = V(:, nnzS);
%             M = M(:, nnzS);
%         end
    else
        gamma = 2/(1 + i);
        s = (1 - gamma)*s;
        s = cat(1, s, lambda*gamma);
    end
    
    res = M*s - val;
    obji = (1/2)*sum(res.^2);
    
    fprintf('iter: %d; obj: %d; rank: %d \n', i, obji, length(s));
    obj(i) = obji;
    if(i > 1 && abs(obj(i) - obj(i - 1)) < tol)
        break;
    end
end

U = U*diag(s);
[U, V] = filteroutBoost(U, V, length(s));
S = eye(size(U, 2));

out.obj = obj(1:i);

end

%% ---------------------------------------------------------------
function [theta] = updateWeights(M, val, lambda)

param.mode = 0;
param.lambda = lambda;

theta = full(mexLasso(val, M, param));

end