% -------------------------------------------------------------------------
% Matias Di Martino 
% matiasdm@fing.edu.uy
% 10/2012, 
% -------------------------------------------------------------------------
% [model,[classifier_lables]] = SVM_train(data, labels, [verbose], [Gamma], [Cost])
% this function trains diferent SVM algorithms using cros validation to
% find the optimal set of parameters. 
% Inputs:
%   - data: Nxd (N = number of instances, d = number of features);
%   - labels: Nx1 (-1 negative class and +1 positive class);
%   - verbose: [default 0]
%   - Gamma: 1x3, [MinGamma MaxGamma NumGamma], default [.1 10 20]
%               if Gamma = [0] linear svm is performed. 
%   - Cost: 1x3, [MinCost MaxCost NumCost], default [1e-2 200 20]
%   - SelectedMeasure [def 'Accuracy'], can be {'Gmean'. 'Accuracy'} this measure
%                     will be used to define the optimal condition, 
% Outputs:
%   - model: struct used by svmpredict_libsvm for the classification.
%   - classifier_labels: [optional] labels for the classification of the
%                        training samples


function [model,varargout] = SVM_train(data, labels, varargin)

%% checking parameters and seting default values --------------------------
if nargin>2, % verbose is provided,
    verbose = varargin{1};
else % set default value
    verbose = 0;
end

if nargin>3, % gamma range is provided,
    Gamma= varargin{2};
else % set default value
    Gamma = [.1 10 20];
end

if nargin>4, % cost range is provided,
    Cost= varargin{3};
else % set default value
    Cost = [1e-2 200 20];
end

if nargin>5, % cost range is provided,
    SelectedMeasure = varargin{4};
else % set default value
    SelectedMeasure = 'Accuracy';
end


% -------------------------------------------------------------------------

%% inicialization and parameters range determination ----------------------
NumberOfCrossValidations = 10; % number of cross validation 

if Gamma ~= 0;
    InitialGamma = Gamma(1);
    FinalGamma = Gamma(2);
    NumberOfGammas = Gamma(3);
    gamma_range = logspace(log10(InitialGamma),log10(FinalGamma),NumberOfGammas);
else
    gamma_range = 0;
    NumberOfGammas = 0;
end

InitialCost = Cost(1);
FinalCost = Cost(2);
NumberOfCosts = Cost(3);
cost_range = logspace(log10(InitialCost),log10(FinalCost),NumberOfCosts);
generalization_error=zeros(size(gamma_range,2),size(cost_range,2));
nsamples = size(data,1);
NumberOfClases = max(labels);
% -------------------------------------------------------------------------

%% For each combination of Gamma and Cost use CV to find the optimal pair.
for i=1:size(gamma_range,2)
    gamma=gamma_range(1,i);
    
    for j=1:size(cost_range,2);
        cost   =cost_range(1,j);
        
        % inicialize TPs,
        for c = 1:NumberOfClases, 
            TP(c) = 0;
        end
        
        if verbose,
            fprintf('[SVM_train] gamma = %.2f  [%i out of %i],  C = %.2f  [%i out of %i]\n',gamma,i,NumberOfGammas,cost,j,NumberOfCosts);
        end
        
        for validation_run=1:NumberOfCrossValidations
            if verbose,
                fprintf('.');
            end
            
            %split data into training set and validation set
            [training_set_inputs,training_set_targets,validation_set_features,...
                validation_set_targets] =  split_data(...
                data',labels',nsamples,NumberOfCrossValidations,validation_run);
            
            %Training the svm
            if gamma ~= 0;
                parameter_string = sprintf('-s 0 -g %.2f -c %.9f',gamma,cost');
            else %is gamma == 0 use linear svm
                parameter_string = sprintf('-s 0 -t 0 -c %.9f',cost');
            end
            model = svmtrain_libsvm(training_set_targets',training_set_inputs',parameter_string);
            
            %Test on the validation set
            [predict_label, accuracy, dec_values] = svmpredict_libsvm(validation_set_targets', ...
                validation_set_features', model);
            
            %Compute the number of TruePosive for each class,
            for c = 1:NumberOfClases,
                TP(c) = TP(c) + sum( (validation_set_targets' == predict_label)...
                                     & (validation_set_targets' == c) );                                 
            end
            
            
        end
        if verbose,
            fprintf(' \n');
        end
        
        generalization_error(i,j)=1 - ( sum(TP(:))/nsamples );
        generalization_gmean(i,j)=1;
        for c = 1:NumberOfClases,
            generalization_gmean(i,j) = generalization_gmean(i,j) * ...
                     (TP(c) / sum(labels==c));
        end
    end
end


%Picking the best gamma and cost, as those who minimize the
% SelectedMeasure

switch SelectedMeasure,
    case 'Accuracy',
        [gamma_ind,cost_ind]  = find(generalization_error==min(generalization_error(:)));
    case 'Gmean',
        [gamma_ind,cost_ind]  = find(generalization_gmean==max(generalization_gmean(:)));
end

gamma  = gamma_range(1,gamma_ind(1));
cost   = cost_range(1,cost_ind(1));

if verbose, 
    disp(['[SVM_train] Optimal Gamma = ' num2str(gamma) ' | Optimal Cost = ' num2str(cost)])
end
% -------------------------------------------------------------------------

%% Generates the model corresponding to the best gamma and cost -----------
%Training the svm
if gamma ~= 0;
    parameter_string = sprintf('-s 0 -g %.2f -c %.9f',gamma,cost');
else %is gamma == 0 use linear svm
    parameter_string = sprintf('-s 0 -t 0 -c %.9f',cost');
end
model            = svmtrain_libsvm(labels,data,parameter_string);
% -------------------------------------------------------------------------

% Classifying training samples --------------------------------------------
[classifier_labels, accuracy, dec_values] = svmpredict_libsvm(labels, data, model);
varargout{1} = classifier_labels;
% -------------------------------------------------------------------------

%% Visualization of support vectors and frontiers (just for 2D data and 2 classes) ------

DataIsTwoDimensional = (size(data,2) == 2);
if verbose && DataIsTwoDimensional && model.nr_class<3,
    x_min = min(data(:,1)); x_max = max(data(:,1));
    delta_x = ( x_max(1) - x_min(1) ) / 200;
    y_min = min(data(:,2)); y_max = max(data(:,2));
    delta_y = ( y_max(1) - y_min(1) ) / 200;
    % build a grid to see the desicion function;
    grid_x = [x_min(1): delta_x: x_max(1)]; grid_y = [y_min(1): delta_y: y_max(1)]; 
    
    SVs         = model.SVs;
    [gr_X,gr_Y] = meshgrid(grid_x,grid_y);
    [sv,sh]     = size(gr_X);
    coords      = [gr_X(:)';gr_Y(:)'];
    dummy       = zeros(1,size(coords,2));
    SV_labels = sign(model.sv_coef);

    [~, ~, dec_values]   = svmpredict_libsvm(dummy',coords', model);
    values               = reshape(dec_values,[sv,sh]);

    figure,contour(gr_X,gr_Y,values,[-1,0,1],'linewidth',2);
    hold on,scatter(SVs(SV_labels==1,1),SVs(SV_labels==1,2),'r','filled'); 
    scatter(SVs(SV_labels==-1,1),SVs(SV_labels==-1,2),'y','filled'); 
    axis off;
    axis ij;
end
% -------------------------------------------------------------------------

end

%% Auxiliary Functions %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [GM] = ComputeGmean(PL,L);
    NumClass = max(L(:));
    GM = 1;
    for c = 1:NumClass, 
        GM = GM * sum(PL(L==c)==c)/sum(L==c);
    end
        
end
% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
