% rosenbrock function
function [f,g] = rosenbrock(x)

    % alternative method is definition as inline functions
    % f = @(x) 100*sum((x(1:end-1,:).^2 - x(2:end,:)).^2,1) + sum((1-x(1:end-1,:)).^2,1)

    % definition here uses vectorization for parallel evaluation
    % x is a [N x p] matrix, where N is search space dimensionality and p
    % is the population (number of points)
    % Thus x(:,1) are the coordinates of point 1
    % and x(1,:) are the first coordiantes off all points

    % function value - [1xp] vector as output (1 function value for each point)
    % Note: sum(x,1) is the sum over all coordinates for a single point
    f = 100*sum((x(1:end-1,:).^2 - x(2:end,:)).^2,1) + ...
        sum((1-x(1:end-1,:)).^2,1);

    % To better understand the gradient calculation it is helpful
    % to write out the function up to N = 4 and analytically perform
    % the derivation
    % gradient vector - [Nxp] matrix as output (one gradient vector for each point)
    g = [400*(x(1,:).^2 - x(2,:)).*x(1,:) - 2*(1-x(1,:));
         400*(x(2:end-1,:).^2 - x(3:end,:)).*x(2:end-1,:) - 2*(1-x(2:end-1,:)) - 200*(x(1:end-2,:).^2 - x(2:end-1,:));
         -200*(x(end-1,:).^2 - x(end,:))];
     
     
end % of function
