%% [U,X]=CC_Train(Z,labZ,parameters)
%
% Outputs:
%   U: struct with U{i}, n-dimensional-->R function where H(U{i}) is the
%      portion of the space we will label as class i.
%   X: Coordinates where U is evaluated, X{i} is a column vector containing
%      the i-th coordinate value.
%
% Input:
%   Z:  each row correspond to one sample,
%   labZ: column vector with the labels, 1 2 3 .. N
%   parameters: struct that may contain,
%        .verbose [def 0] {1 display some text, 2 also graphics}
%        .max_iter [def 3000]
%        .tol [def 0.01]
%        .delta_t [def .1]
%        .GridSize [def 200]
%        .Sigma  [def 0] whith of the kernel used in probability density estimation, 
%                        if Sigma = 0, the value is tuned using 10-fold cross validation, 
%        .RegularityCoef [def 0] (higher the coef, more regular the
%                                 solution)
% -------------------------------------------------------------------------
% 25/4/2013, Facultad de Ingenieria - UdelaR
% Authors: G. Hernandez,  M. Fiori, A. Fernandez and M. Di Martino
% E-mail: matiasdm@fing.edu.uy
% -------------------------------------------------------------------------

function [U,X]=CC_Train(Z,labZ,parameters);

%% Load and set parameters

if isfield(parameters,'verbose')
    verbose = parameters.verbose;
else % set default value
    verbose = 0;
end

if isfield(parameters,'max_iter')
    max_iter = parameters.max_iter;
else % set default value
    max_iter = 200;
end

if isfield(parameters,'tol')
    tol = parameters.tol;
else % set default value
    tol = 0.01;
end

if isfield(parameters,'delta_t')
    delta_t = parameters.delta_t;
else %set default value
    delta_t = .01;
end

if isfield(parameters,'GridSize'),
    GridSize = parameters.GridSize;
else % set default value
    GridSize = 200;
end

if isfield(parameters, 'Lambda0')
    Lambda0 = parameters.Lambda0;
else % set default value
    Lambda0 = 0 ;
end

if isfield(parameters,'Sigma'),
    Sigma = parameters.Sigma;
else %set default value
    Sigma = 0;
end

if isfield(parameters,'RegularityCoef'),
    RegularityCoef = parameters.RegularityCoef;
else % set default value, 
    RegularityCoef = 0;
end


% END Load and set parameters

%% Training %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

% /////////////////////////////////////////////////////////////////////////
% ///// Inicialization and parameters' selection //////////////////////////
% /////////////////////////////////////////////////////////////////////////

NumberOfClasses = max(labZ);
NumberOfSamples = size(Z,1);

% ////////////////////////////////////////////////////////////////////////
if Sigma == 0, % ///// Choose sigma using 10-fold cross validation ///////
% ////////////////////////////////////////////////////////////////////////
    [Sigmas] = logspace(-3,0,10);
    if verbose>0, fprintf('\n [CC_train] Tuning sigma using 10fold cross validation \n'), end
    for s=1:length(Sigmas),
        PredictedLabels = 0*labZ; % labels inicialization, 
        for n = 1:10, % cross validations, 
            if verbose>0, fprintf('[%d/%d , %d/%d] \n',s,length(Sigmas),n,10), end
            TestInd = [n:10:NumberOfSamples];
            TrainInd = setdiff([1:NumberOfSamples],TestInd);
            
            TestSamples = Z(TestInd,:); TestLabels = labZ(TestInd);
            TrainSamples = Z(TrainInd,:); TrainLabels = labZ(TrainInd);
            
            % Train CC
            p = parameters;
            p.verbose = 0;
            p.max_iter = max_iter/2;
            p.Sigma = Sigmas(s);
            [U,X] = CC_Train(TrainSamples,TrainLabels,p);
            
            % Classify CC
            p2.verbose = 0;
           [PredictedLabels(TestInd)] = CC_Classify(TestSamples,U,X,p2);
        end
        
        % Evaluate sigma, 
        Gmeans(s) = ComputeGmean(PredictedLabels,labZ);
    end
    [maxGmean,SigmaInd] = max(Gmeans);
    sigma = Sigmas(SigmaInd);
    if verbose>0, fprintf('[CC_train] Optimal Sigma : %1.4g \n', sigma), end
else 
   sigma=Sigma; % whith of the kernel used in probability density estimation
end

GridStep=(max(Z(:)) - min(Z(:)))/GridSize; % step in the grid
max_iter_reinit = 20; % Maximum number of iteration inside "Reinicialization"

% Deltan and Heavisede definitions, 
delta = @(Uaux)  10000 * 1/(GridStep) * (abs(Uaux) < (10000 * GridStep/2));
H = @(Uaux) double(Uaux > 0);

iter=0; % inicialization 
dif=2*tol+1; % inicialization 
lambda=Lambda0; % inicialization (lagrange multiplier) 

% /////////////////////////////////////////////////////////////////////
% ////////////// Densities estimation /////////////////////////////////
% /////////////////////////////////////////////////////////////////////
[x,y] = meshgrid(min(Z(:))*1.2:GridStep:max(Z(:))*1.2,min(Z(:))*1.2:GridStep:max(Z(:))*1.2);
for i=1:NumberOfClasses
    f{i} = armoDensidad(x,y,Z(labZ==i,:),sigma);
    if verbose>1
        %figure('name',['class: ' num2str(i)]);  mesh(x,y,f{i});
    end
end
% ----------------------------------------------------------------------

% inicialization of U{i}
for i=1:NumberOfClasses
    U{i} = f{i} - mean(f{i}(:));
end

if verbose>1, % display also some charts.
   h2=figure; 
   h3=figure;
end


% define some shortcuts,
dx = @(G) [G(:,2:end)-G(:,1:end-1) 0*G(:,1)];
dy = @(G) [G(2:end,:)-G(1:end-1,:); 0*G(1,:)];


% /////////////////////////////////////////////////////////////////////////
% ///// Optimization of U{i} functions ////////////////////////////////////
% /////////////////////////////////////////////////////////////////////////

 
while (dif>=tol) && (iter < max_iter)
    iter=iter+1;
    
    % This is not critical but increase convergence speed:
    if mod(iter,100)==0, delta_t = delta_t*10; end;
    
    for k=1:NumberOfClasses
        A{k} = sum(f{k}(:).*H(U{k}(:)))*GridStep^2;
        U_old{k} = U{k};
    end
    
    for k=1:NumberOfClasses
        
        Term1=1;
        for j=[1:k-1 k+1:NumberOfClasses]
            Term1 = Term1 * A{j};
        end
        Term1 = Term1 * f{k} .* delta(U{k});
        
        Term2=0;
        for j=[1:k-1 k+1:NumberOfClasses]
            Term2 = Term2 + delta(U{k}) .* H(U{j});
        end
        Term2 = Term2 * 2 * lambda;
        
        % We add a third term that impose regularity in the solution, 
        % the idea is to minimize U's gradiendt
        Term3 = + 2 * RegularityCoef * ( dx(dx(U{k}))+dy(dy(U{k})) );
        
        % Term1 - Term2 is the gradient 
        U{k} = U{k} +  delta_t * (Term1 - Term2 - Term3); % step
        
        % Truncate U (is only significant the zero-levelset of U{k})
        U{k}(abs(U{k})>1) = 1*sign(U{k}(abs(U{k})>1));
    end
    
    % Update the overlaping.
    S=0;
    for i=1:NumberOfClasses
        for j=i+1:NumberOfClasses
            S = S + sum( H(U{i}(:)).*H(U{j}(:)) )*GridStep^2;
        end
    end
    
    % Update lambda
    lambda = lambda + delta_t * S;
    
    dif=0;
    for k=1:NumberOfClasses,
        if norm(U{k}-U_old{k},'fro') > dif
            dif = norm(U{k}-U_old{k},'fro');
        end
    end
    
    % Begin // edited by: matiasdm 9/7/2013 -----------------------------
    % Reinicialization (to keep the zero level set in stable shape)
%     if mod(iter,20)==0,
%         for k=1:NumberOfClasses
%             U{k} = reinicializar(U{k},max_iter_reinit,delta_t,tol,1,'paper');            
%         end
%     end
    % End // edited by: matiasdm 9/7/2013 -----------------------------

    
    %show
    if verbose>0,
            Gmean=1;
            NormGrad = 0;
            for k=1:NumberOfClasses 
                 Gmean = Gmean * sum(f{k}(:).*H(U{k}(:)))*GridStep^2;            
                 Ux = dx(U{k}); Uy=dy(U{k});
                 NormGrad = NormGrad + mean( Ux(:).^2 + Uy(:).^2 );
            end 
            Snorm = S/(NumberOfClasses^2);
       
           
        fprintf('iter: %i  lambda: %1.4f   dif: %1.4g   Gmean: %1.4g   S: %1.4g  NG %1.3g \n'...
            ,iter,lambda,dif,Gmean,Snorm,NormGrad);
        if verbose>1, % display also some graphics,
            set(h2,'name',['iter ' num2str(iter)]); figure(h2);
            I(:,:,1) = H(U{1});
            I(:,:,2) = H(U{2});
            if NumberOfClasses>2,  I(:,:,3) = H(U{3});
            else I(:,:,3) = I(:,:,1)*0;
            end
            imshow(I,[],'InitialMagnification',200); figure(h2);
            drawnow,
            
            figure(h3), set(h3,'Position',[958 433 408 229]), hold on,
            plot(iter,Gmean,'r+');
            plot(iter,Snorm,'+b');
            plot(iter,dif,'+g');
            legend('Gmean','S','diff')
            ylim([0 1])
            figure(h3), drawnow,
        end
    end
    
end

% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

%% Set outputs to the correct format
for class = 1:NumberOfClasses,
    U{class} = double((U{class}(:)>0));
end

X{1} = x(:); X{2} = y(:);


end % END TrainClassifier

%% Auxiliary Functions %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [GM] = ComputeGmean(PL,L);
    NumClass = max(L(:));
    GM = 1;
    for c = 1:NumClass, 
        GM = GM * sum(PL(L==c)==c)/sum(L==c);
    end
        
end
% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
