function w = logreg(X,y,lambda)
    s = @(t) 1./(1+exp(-t)); %sigmoid function
    L = @(w) -(1-y)'*(X*w) - sum(log(1+exp(-X*w))); %likelihood 
    G = @(w) X'*(y-s(X*w))-lambda*w; %gradient
    H = @(w) -X'*diag(s(X*w).*(1-s(X*w)))*X-lambda*eye(length(w)); %hessian

    w = zeros(size(X,2),1); %initial guess

    %While gradient is not zero, try Newton, and use gradient descent
    %if Newton fails.
    while( norm(G(w)) > 10^-6)
        d = H(w)\G(w);
        if( L(w-d) < L(w)) %Newton failed - revert to gradient descent
            fprintf('Warning -  Reverting to gradient descent!\n')
            d = G(w);
            F = @(a) -L(w-a*d) ;
            a = fminbnd(F,0,1);
            w = w -a*d;
        else
            w = w-d;
        end
    end
end
