%% [model] = TrainAdaboost (features,labels,parameters)
% 
% train using training samples and adaboost method, then evaluate the
% posteriors on a grid so it is posible classify new outcoming samples.
%
% Inputs
%   - features: dxN (d = dimension of the space, N = num of samples)
%   - labels: 1xN (1 = positive samples and {0,-1} = negative samples (both
%                   values are allowed as negative labels)
%   - parameters: struct with the parameters, it can contain:
%       .NumOfBoosting, {def 400}
%       .verbose, {def 0}
%
% Outputs
%   - model: struct with all the information needed fot the clasification.
%       .NumOfBoostingNumOfBoostingRounds,
%       .alphas,
%       .coordinate_wls,
%       .theta_wls,
%       .polarity_wls,
%
% -------------------------------------------------------------------------
% matias di martino (2012), matiasdm@fing.edu.uy
% -------------------------------------------------------------------------

function [model] = TrainAdaboost (features,labels,parameters)

%% Load parameters --------------------------------------------------------

if isfield(parameters,'NumOfBoostingRounds')
    NumOfBoostingRounds = parameters.NumOfBoostingRounds;
else % set default value
    NumOfBoostingRounds = 400;
end

if isfield(parameters,'verbose')
    verbose = parameters.verbose;
else % set default value
    verbose = 0;
end 

% Check labels used for the negative class and in the case if 0 transform
% to -1
if min(labels)==0, %so labels are {0,1}
    labels(labels==0)=labels(labels==0)-1;
end

% -------------------------------------------------------------------------


%% Train Classifier %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
if verbose, 
    disp('[TrainAdaboost] Training classifier, this may take some time ....')
end

%% inicialization ---------------------------------------------------------
model.alphas = zeros(NumOfBoostingRounds,1);
model.coordinate_wls = zeros(NumOfBoostingRounds,1);
model.theta_wls = zeros(NumOfBoostingRounds,1);
model.polarity_wls = zeros(NumOfBoostingRounds,1);

nsamples = size(labels,2);
Distribution_on_indexes = ones(1,nsamples)/nsamples; 
% -------------------------------------------------------------------------

for it = 1:NumOfBoostingRounds,    
    if verbose,
        if mod(it,50)==0,
            fprintf('.\n')
        else
            fprintf('.')
        end
    end
           
    %%---------------------------------------------------------------------
    %% Find best weak learner at current round of boosting
    %%---------------------------------------------------------------------
    [coordinate_wl,polarity_wl,theta_wl,err_wl] = best_weak_learner(Distribution_on_indexes,features,labels);
    alpha = 1/2 * ( log( (1-err_wl)/(err_wl+eps) ) ); %weigth calculation
    [weak_learner_output]  = evaluate_stump(features,coordinate_wl,polarity_wl,theta_wl);
    % Build Model ---------------------------------------------------------
    model.alphas(it) = alpha;
    model.coordinate_wls(it) = coordinate_wl;
    model.theta_wls(it) = theta_wl;
    model.polarity_wls(it) = polarity_wl;
    % ---------------------------------------------------------------------
    
    Distribution_on_indexes = Distribution_on_indexes .* ...
        exp( -1 * alpha * labels .* weak_learner_output); % Update weigths
    
    Distribution_on_indexes = Distribution_on_indexes/sum(Distribution_on_indexes); % Normaliation
    
    [weak_learner_output]  = evaluate_stump(features,coordinate_wl,polarity_wl,theta_wl);
    
end

if verbose
    fprintf('\n [TrainAdaboost] Done \n');
end
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
end % function
