%% [U,X]=CC_Train_nd(Z,labZ,parameters)
%
% Outputs:
%   U: struct with U{i}, n-dimensional-->R function where H(U{i}) is the
%      portion of the space we will label as class i.
%   X: Coordinates where U is evaluated, X{i} is a column vector containing
%      the i-th coordinate value.
%
% Input:
%   Z:  each row correspond to one sample,
%   labZ: column vector with the labels, 1 2 3 .. N
%   parameters: struct that may contain,
%        .verbose [def 0] 1 display some text, 2 display also some graphics
%        .max_iter [def 3000]
%        .tol [def 0.01]
%        .delta_t [def .1]
%        .GridSize [def 200]
%        .Sigma  [def 0] whith of the kernel used in probability density estimation, 
%                        if Sigma = 0, the value is tuned using 10-fold cross validation, 
%        .SigmaInterval {def [10^-3 0]} define the range in which search of
%                        ptimal sigma is perform. (only has sense if Sigma
%                        option is set as 0)
%        .RegularityCoef [def 0] (higher the coef, more regular the
%                                 solution)
% -------------------------------------------------------------------------
% 25/4/2013, Facultad de Ingenieria - UdelaR
% Authors: G. Hernandez,  M. Fiori, A. Fernandez and M. Di Martino
% E-mail: matiasdm@fing.edu.uy
% -------------------------------------------------------------------------

function [U,X]=CC_Train_nd(Z,labZ,parameters);

%% Load and set parameters

if isfield(parameters,'verbose')
    verbose = parameters.verbose;
else % set default value
    verbose = 0;
end

if isfield(parameters,'max_iter')
    max_iter = parameters.max_iter;
else % set default value
    max_iter = 200;
end

if isfield(parameters,'tol')
    tol = parameters.tol;
else % set default value
    tol = 0.01;
end

if isfield(parameters,'delta_t')
    delta_t = parameters.delta_t;
else %set default value
    delta_t = .01;
end

if isfield(parameters,'GridSize'),
    GridSize = parameters.GridSize;
else % set default value
    GridSize = 200;
end

if isfield(parameters, 'Lambda0')
    Lambda0 = parameters.Lambda0;
else % set default value
    Lambda0 = 0 ;
end

if isfield(parameters,'Sigma'),
    Sigma = parameters.Sigma;
else %set default value
    Sigma = 0;
end

if isfield(parameters,'RegularityCoef'),
    RegularityCoef = parameters.RegularityCoef;
else % set default value, 
    RegularityCoef = 0;
end


% END Load and set parameters

%% Training %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

% /////////////////////////////////////////////////////////////////////////
% ///// Inicialization and parameters' selection //////////////////////////
% /////////////////////////////////////////////////////////////////////////

NumberOfClasses = max(labZ);
NumberOfSamples = size(Z,1);

% ////////////////////////////////////////////////////////////////////////
if Sigma == 0, % ///// Choose sigma using 10-fold cross validation ///////
% ////////////////////////////////////////////////////////////////////////
    % Set the interval in which look for the optimal sigma ------
    if isfield(parameters,'SigmaInterval');
        SigmaI = parameters.SigmaInterval(1);
        SigmaF = parameters.SigmaInterval(2);
    else % set default value, 
        SigmaI = 10^-3;
        SigmaF = 0;
    end
    % -----------------------------------------------------------
    [Sigmas] = logspace(log10(SigmaI),log10(SigmaF),10);
    if verbose>0, fprintf(...
            '[CC_train] Looking for the optimal sigma in the interval %6.2g %6.2g (10-cv) \n',...
            SigmaI, SigmaF), 
    end
    for s=1:length(Sigmas),
        PredictedLabels = 0*labZ; % labels inicialization, 
        for n = 1:10, % cross validations, 
            if verbose, fprintf('[%d/%d , %d/%d] \n',s,length(Sigmas),n,10), end
            TestInd = [n:10:NumberOfSamples];
            TrainInd = setdiff([1:NumberOfSamples],TestInd);
            
            TestSamples = Z(TestInd,:); TestLabels = labZ(TestInd);
            TrainSamples = Z(TrainInd,:); TrainLabels = labZ(TrainInd);
            
            % Train CC
            p = parameters;
            p.verbose = 0;
            p.max_iter = max_iter/2;
            p.Sigma = Sigmas(s);
            [U,X] = CC_Train_nd(TrainSamples,TrainLabels,p);
            
            % Classify CC
            p2.verbose = 0;
            [PredictedLabels(TestInd)] = CC_Classify_nd(TestSamples,U,X,p2);
        end
        
        % Evaluate sigma, 
        Gmeans(s) = ComputeGmean(PredictedLabels,labZ);
    end
    [maxGmean,SigmaInd] = max(Gmeans);
    sigma = Sigmas(SigmaInd);
    if verbose>0, % display optimal sigma value,
        fprintf('[CC_Train] Optimal sigma %12.8f ', sigma);
    end

else 
   sigma=Sigma; % whith of the kernel used in probability density estimation
end

GridStep=(max(Z(:)) - min(Z(:)))/GridSize; % step in the grid
max_iter_reinit = 20; % Maximum number of iteration inside "Reinicialization"

% Deltan and Heavisede definitions, 
delta = @(Uaux)  10000 * 1/(GridStep) * (abs(Uaux) < (10000 * GridStep/2));
H = @(Uaux) double(Uaux > 0);
norma = @(U) norm(U(:));

iter=0; % inicialization 
dif=2*tol+1; % inicialization 
lambda=Lambda0; % inicialization (lagrange multiplier) 

% /////////////////////////////////////////////////////////////////////
% ////////////// Densities estimation /////////////////////////////////
% /////////////////////////////////////////////////////////////////////
dim = size(Z,2);

xgv=min(Z(:)):GridStep:max(Z(:));
[X{1:dim}] = ndgrid(xgv);

for i=1:NumberOfClasses
    f{i} = armoDensidad_nd(X,Z(labZ==i,:),sigma);
    if verbose
        %figure('name',['class: ' num2str(i)]);  mesh(x,y,f{i});
    end
end
% ----------------------------------------------------------------------

% inicialization of U{i}
for i=1:NumberOfClasses
    U{i} = f{i} - mean(f{i}(:));
end

if verbose>1
   h1=figure; 
   h2=figure; 
   h3=figure;
   h4=figure;
end



% /////////////////////////////////////////////////////////////////////////
% ///// Optimization of U{i} functions ////////////////////////////////////
% /////////////////////////////////////////////////////////////////////////

 
while (dif>=tol) && (iter < max_iter)
    iter=iter+1;
    
    % This is not critical but increase convergence speed:
    if mod(iter,100)==0,
        delta_t = delta_t*10;
        RegularityCoef = RegularityCoef/10;
    end;
    
    for k=1:NumberOfClasses
        A{k} = sum(f{k}(:).*H(U{k}(:)))*GridStep^dim;
        U_old{k} = U{k};
    end
    
    for k=1:NumberOfClasses
        
        Term1=1;
        for j=[1:k-1 k+1:NumberOfClasses]
            Term1 = Term1 * A{j};
        end
        Term1 = Term1 * f{k} .* delta(U{k});
        
        Term2=0;
        for j=[1:k-1 k+1:NumberOfClasses]
            Term2 = Term2 + delta(U{k}) .* H(U{j});
        end
        Term2 = Term2 * 2 * lambda;
        
        % We add a third term that impose regularity in the solution, 
        % the idea is to minimize U's gradiendt
 %       Term3 = + 2 * RegularityCoef * ( dx(dx(U{k}))+dy(dy(U{k})) );
        Term3 = + 2 * RegularityCoef * ( del2(U{k}) );
        

        % Term1 - Term2 is the gradient 
        %U{k} = U{k} +  delta_t * (Term1 - Term2 + Term3); % step
        U{k} = U{k} +  delta_t * (Term1 - Term2); % step
        
      
        % Truncate U (is only significant the zero-levelset of U{k})
        U{k}(abs(U{k})>1) = 1*sign(U{k}(abs(U{k})>1));
    end
    
    % Update the overlaping.
    S=0;
    for i=1:NumberOfClasses
        for j=i+1:NumberOfClasses
            S = S + sum( H(U{i}(:)).*H(U{j}(:)) )*GridStep^dim;
        end
    end
    
    % Update lambda
    lambda = lambda + delta_t * S;
    
    dif=0;
    for k=1:NumberOfClasses,
        if norma(U{k}-U_old{k}) > dif
            dif = norma(U{k}-U_old{k});
        end
    end
    
   
    %show
    if verbose>0,
            Gmean=1;
%            NormGrad = 0;
            for k=1:NumberOfClasses 
                 Gmean = Gmean * sum(f{k}(:).*H(U{k}(:)))*GridStep^dim;
            end 
            Snorm = S/(NumberOfClasses^2);
       
           
        fprintf('iter: %i  lambda: %1.4f   dif: %1.4g   Gmean: %1.4g   S: %1.4g \n',iter,lambda,dif,Gmean,Snorm);

        if (dim == 2) & verbose>1 & iter>1,
            PrintFigures = [2 3 4]; %figure we want to print, let it empty if 
            % no figures must be printed
            set(h1,'name',['iter ' num2str(iter)]); figure(h1);
            I(:,:,1) = H(U{1});
            I(:,:,2) = H(U{2});
            if NumberOfClasses>2,  I(:,:,3) = H(U{3});
            else I(:,:,3) = I(:,:,1)*0;
            end
            Colors = {'r','g','b'};
            figure(h1); clf,
            imagesc(X{1}(:,1),X{2}(1,:),I); figure(h1);
            drawnow, hold on
            for class = 1:NumberOfClasses,
            plot(Z(labZ==class,2),Z(labZ==class,1),...
                     'Marker','o',...
                     'MarkerFaceColor', Colors{class},...
                     'MarkerEdgeColor','k',...
                     'LineStyle','none'); hold on,
            end
            drawnow,
            if sum(iter==PrintFigures')>0, % print that figure,
                print(h1,'-depsc ',['NumIter' num2str(iter) '.eps'])
            end
        end
        
        if verbose >1,
            figure(h2), set(h2,'Position',[958 433 408 229]), hold on,
            plot(iter,Gmean,'r+');
            ylim([0 1])
            drawnow
            legend('Gmean')
            figure(h3), set(h3,'Position',[958 433 408 229]), hold on,
            plot(iter,Snorm,'+b');
            legend('S')
            drawnow
            figure(h4), set(h4,'Position',[958 433 408 229]), hold on,
            plot(iter,dif,'+g');
            legend('diff')
            drawnow
        end
        
    end
    
end

% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

% begin // edited by: matiasdm 9/7/13
% /////////////////////////////////////////////////////////////////////////
% //// Set outputs to the correct format //////////////////////////////////
% /////////////////////////////////////////////////////////////////////////
% CC_Classify expect one column vector for each dimension and a column
% vector U
for class = 1:NumberOfClasses,
    U{class} = double((U{class}(:)>0));
end

for d = 1:dim;
    X{d} = X{d}(:); 
end
% end // edited by: matiasdm 9/7/13
% -------------------------------------------------------------------------

end % END TrainClassifier

%% Auxiliary Functions %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [GM] = ComputeGmean(PL,L);
    NumClass = max(L(:));
    GM = 1;
    for c = 1:NumClass, 
        GM = GM * sum(PL(L==c)==c)/sum(L==c);
    end
        
end
% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
