function [J, grad] = costFunctionReg(theta, X, y, lambda)
    %   Number of the training examples
    m = length(y);
    %   Initialize the gradient
    grad = zeros(size(theta));
    %   Get the the vaule of the h(x)
    temp = sigmoid(X * theta);
    %   Compute the cost, excluding the first of theta
    J = (-y' * log(temp) - (1 - y)' * log(1 - temp)) / m + lambda * theta(2:end)' * theta(2:end) / 2 / m;
    %   Compute the gradient
    %   The first item
    grad(1) = ((X(:, 1))' * (temp - y)) / m;
    %   The rest item
    grad(2:end) = ((X(:, 2:end))' * (temp - y)) / m + lambda * theta(2:end) / m;
end
