% input x: made of K feature row vectors of dimension M
% input d: {-1,1}^K: column vector of dimension K
% output w: (normalized) weight vector the M+1st component theta
% output n_w_changes: number of weight changes
% output n_iter: number of for-loop cycles
% output n_misclassified: dynamics of number of misclassified data
function [w, n_w_changes, n_iter, n_misclassified] = PerceptronLearner(x, d, max_iter = 1000)
	
	% get the number of rows (K) and the number of columns (M) of x
	[K, M] = size(x);
	
	% randomly initialize w
	w = rand(1, M+1);
	
	% calculate feature vector
	phi = x;
	
	% ones returns a matrix whose elements are all 1. Matrix size is K x 1
	phi(:,M+1) = ones(K, 1);
	
	n_w_changes = 0;
	
	% initialize with empty matrix
	n_misclassified = [];
	
	for n_iter=1:max_iter
	
		% variable to count the misclassified inputs
		n_miscl = 0;
		for k=1:K
			
			% ' is used to transpose the row vector to a column vector
			% phi(k,:) selects all elements from the k'th row
			if (sign(w*phi(k,:)') != d(k))
			
				% weight update
				w = w + d(k) * phi(k,:);
				n_w_changes = n_w_changes + 1;
				n_miscl = n_miscl + 1;
			endif
		endfor
		
		% ' is used to transpose the row vector to a column vector
		n_misclassified(n_iter) = sum((d'!= sign(w*phi')));
		if(n_miscl == 0)
			break;
		endif
	endfor
	
	% norm calculates the p-norm of w with p = 2 (euklidsche norm)
	w = w/norm(w);
endfunction