% This is a test file that computes the entire Regularization path 
% for matrix completion under trace norm penalty.
%
% \lambda is a regularization parameter that belongs to a grid of values \{\lambda_min,..., \lambda_max \}.
% 
% We solve a problem of the type "minimize f(X) + \lambda ||X||_*" for a grid of
% \lambda values  using a first-order predictor-corrector approach.
%
% Refer "Low-rank optimization with trace norm penalty"
% Technical report (arXiv 1112.2318), 2011 for further details.
%
% Authors:
% Bamdev Mishra <b.mishra@ulg.ac.be>

clear all; close all; clc;

%% Setup
d1 = 1000;
d2 = 1000;
r = 5;
over_sampling = 5; 

dof = (d1 + d2 - r) * r;
f = over_sampling * dof / (d1 * d2);
fprintf('Rank %i matrix of size %i times %i and OS = %i\n', r, d1, d2, over_sampling);


% Options
verbosity = false;
compute_predictions = true;

%% Optimization algorithm is the trust-region algorithm
optim_algo = @tr_matrix_completion;




%% Paramters
params.tol = 1e-12;
params.vtol = 1e-12;
params.grad_tol = 1e-3;
params.dg_tol = 1e-5;
params.dg_vtol = 1e-5;
params.smax_tol = 1e-3;
params.max_iter = 10000;
params.max_iter_tr = 50;
params.verb =  verbosity;
params.compute_predictions = compute_predictions; % To compute predictions for a random number of entires

params = default_params(params);


%% Target model
n  = d1 * d2;




%% Generating random data
data = generate_randn_data(d1, d2, r, f);
 
% Testing data 
data.ts.nentries = data.ls.nentries;
data.ts.rows = randi(d1, data.ts.nentries, 1);
data.ts.cols = randi(d2, data.ts.nentries, 1);
data.ts.entries = partXY(data.Gs', data.Hs',data.ts.rows,data.ts.cols,data.ts.nentries)';




%% Creating a grid of lambda values
sparse_structure = sparse(data.ls.rows,data.ls.cols,data.ls.entries,d1,d2);
sparse_structure2 = sparse(data.ls.rows,data.ls.cols,2*data.ls.entries,d1,d2);

lambda_threshold = 2*svds(sparse_structure, 1); 
reduction_factor = .50;
lambda0 = lambda_threshold;% should be less than lambda_threshold for rank to be non-zero
lambda_array = generate_geometric_sequence(lambda0, 20, reduction_factor); % generating a geometric-sequence of parameters with a fixed reduction factor


% lambda_array = [lambda0/2, lambda0/4, lambda0/8, 1, 0.01, 1e-3]; % any given sequence of parameters


fprintf('We traverse through lambda values...\n');
for kk = 1: length(lambda_array); 
   fprintf('%s \n', num2str(lambda_array(kk), '%10.5e') ); 
end
fprintf('\n\n');



%% Setup initial model
model.d1 = d1;
model.d2 = d2;
model.p = 1; 
model.pmax = min(d1, d2);
model.sparse_structure = sparse_structure;
model.sparse_structure2 = sparse_structure2;
model.lambda_array = lambda_array;




%% Functions
fun_set=@functions_nsym_polar_geometry; % File that specifies geometry of the search space UBV^T
fun_obj=@functions_matrix_completion_polar; % File that computes cost function, gradient of the matrix completion problem




%% Random Intialization
G = randn(model.d1, model.p); H = randn(model.d2, model.p);
[Qg Rg] = qr(G, 0);
[Qh Rh] = qr(H, 0);
[q1 b q2] = svd(Rg * Rh');

model.U = Qg * q1;
model.V = Qh * q2;
model.B = b;




%% Run algorithm
tic;
[model, infos] = compute_path(optim_algo, data, model, params);
toc;




%% Plots
% Rank vs. Lambda plot

fs = 20;
figure('name', 'Rank fitting');
plot(-log10(infos.Regpath_lambda), infos.Regpath_rank,'--s','Color','b','LineWidth',2);
ax1 = gca;
set(ax1,'FontSize',fs);
xlabel(ax1,'1/\lambda  in log scale','FontSize',fs);
ylabel(ax1,'Rank','FontSize',fs);
title('Rank fitting');
legend 'boxoff';


% Optimal cost vs. Lambda plot
fs = 20;
figure('name', 'Regularization path');
semilogy(-log10(infos.Regpath_lambda), infos.Regpath_sol,'--s','Color','b','LineWidth',2);
ax1 = gca;
set(ax1,'FontSize',fs);
xlabel(ax1,'1/\lambda  in log scale','FontSize',fs);
ylabel(ax1,'Optimal cost','FontSize',fs);
title('Regularization path');
legend 'boxoff';



% Relative duality gap plot
fs = 20;
figure('name', 'Optimality certificate');
semilogy(-log10(infos.Regpath_lambda), abs(infos.Regpath_rel_duality_gap),'--s','Color','b','LineWidth',2);
ax1 = gca;
set(ax1,'FontSize',fs);
xlabel(ax1,'1/\lambda  in log scale','FontSize',fs);
ylabel(ax1,'Relative duality gap','FontSize',fs);
title('Optimality certificate');
legend 'boxoff';



% Relative comparison between Predictor-corrector and Warm restart
fs = 20;
figure('name', 'Predictor-corrector vs. Warm restart');
inaccuracy_warm = infos.Regpath_warm_preds - infos.Regpath_sol;
inaccuracy_predictor = infos.Regpath_predictor_preds - infos.Regpath_sol;
rel_inaccuracy_warm = inaccuracy_warm ./ infos.Regpath_sol;
rel_inaccuracy_predictor = inaccuracy_predictor ./ infos.Regpath_sol;

semilogy(-log10(infos.Regpath_lambda(2:end)), rel_inaccuracy_warm(2:end),'--*','Color','r','LineWidth',2);
hold on;
semilogy(-log10(infos.Regpath_lambda(2:end)), rel_inaccuracy_predictor(2:end),'--s','Color','b','LineWidth',2);
hold off;
ax1 = gca;
set(ax1,'FontSize',fs);
xlabel(ax1,'1/\lambda  in log scale','FontSize',fs);
ylabel(ax1,'Relativ inaccuracy in prediction','FontSize',fs);
title('Better prediction');
legend('Warm restart', 'Predictor-corrector');
legend 'boxoff';


% Test error vs. Lambda plot
if params.compute_predictions,
    fs = 20;
    figure('name', ' Test error Vs. Lambda');
    semilogy(-log10(infos.Regpath_lambda), infos.Regpath_preds_all,'--s','Color','b','LineWidth',2);
    ax1 = gca;
    set(ax1,'FontSize',fs);
    xlabel(ax1,'1/\lambda  in log scale','FontSize',fs);
    ylabel(ax1,'Test cost','FontSize',fs);
    title('Matrix recovery with deacrasing \lambda');
    legend 'boxoff';
end



