% -------------------------------------------------------------------------
% Matias Di Martino,
% matiasdm@fing.edu.uy
% 10/2012, Cachan, Paris.
% -------------------------------------------------------------------------
%% w = logistig_regression(X,y,[verbose],[tol],[max_iter],w0);
% function that implements the logistic regression for the data x with
% labels y 
% INPUTS:
% X - data Nxd: [x1 x2 x3 ... xN; y1 y2 y3 ... yN; 1 1 1 ... 1]';
% y - labels Nx1: [0 1 1 ... 1]'; (zeros and ones)
% verbose - [def 0] (if 1 images are shown and if 2 also a video output)
% tol - [def = .01] (optional)
% max_iter - [def = 1000] (optional)
% seed - initial w0, [def = zeros];
% OUTPUT:
% w - coeficients dx1: [w1 w2 w3];

function w = logistig_regression(X,y,varargin);

%% check inputs and set default values ------------------------------------
if nargin>2, % verbose is provided
    verbose = varargin{1};
else
    verbose = 0; 
end

if nargin>3, % tol is provided
    tol = varargin{2};
else 
    tol = 0.01;
end

if nargin>4, % max_iter is provided
    max_iter = varargin{3};
else
    max_iter = 1000;
end

if nargin>5, % seed is provided
    w_0 = varargin{4};
else
    d = size(X,2);
    w_0 = ones(d,1);
end
% END check inputs and set default values ---------------------------------

%% Inicialization ---------------------------------------------------------
diference = 2*tol;
num_iter = 0;
lambda =1e-2; % for regularization when we have few samples or a lot of dimensions
g = @(X) 1./(1+exp(-X)); % sigmoidal function

if verbose>1,
    fig_C = figure('name','Energy vs iteration number','NumberTitle','off')
    grid on
end
% -------------------------------------------------------------------------

%% Finding coeficients = argmin{C(w)};

while (diference>tol && num_iter<max_iter),
    num_iter = num_iter + 1;
    
    J = X' * (y - g(X*w_0)) - 2*lambda*w_0; %Jacobian for the Newton-Rapshon
    r = g(X*w_0) .* ( 1-g(X*w_0) ); %diagonal of the auxiliary matrix 
    R = sparse([1:length(r)],[1:length(r)],r); %make a square matrix
    H = -1*X'*R*X - 2*lambda*eye(length(w_0)); %hesian
    
    w_1 = w_0 - H\J;
    diference = mean(abs(w_1-w_0));
    w_0 = w_1;
    
    % cost function
    C = sum( y.*log(g(X*w_1)+eps) + ( (1-y).*log(1-g(X*w_1)+eps) ) );
    if verbose>0,
        disp(['[logistig_regression] Iteration ' num2str(num_iter) ...
            ' , Dif ' num2str(diference,'%.2f') ' , C ' num2str(C,'%.2f') ])
    end
    
    if verbose>1, 
        figure(fig_C),
        hold on
        plot(num_iter,C,'b+')
    end
        
end

if verbose>1, 
    figure(fig_C), hold off,
    print -depsc CAsFunctionOffNumIter %saving the image as eps
end

w=w_1';




% -------------------------------------------------------------------------








