function [threshold, parity] = StatBoost(N, P, num_of_features);

negative_number = size(N, 2);
positive_number = size(P, 2);
example_number = negative_number + positive_number;

weight = ones(1 , example_number);
weight(1:negative_number) = (1/(2*negative_number)) * ones(1 , negative_number);
weight(negative_number+1 : example_number) = (1/(2*positive_number)) * ones(1 , positive_number);
   
% normalization
sum_weight = sum(weight);
weight_temp = weight./sum_weight;
weight = weight_temp;

z_negative = sum(weight(1:negative_number));
z_positive = sum(weight(negative_number+1 : example_number));

weight_matrix  = repmat(weight , image_size , 1);

m_negative = sum(N.* weight_matrix(: , 1 : negative_number) , 2)./z_negative;
m_positive = sum(P.* weight_matrix(: , negative_number+1 : example_number) , 2)./z_positive;

yyt_negative = (N.*weight_matrix(:, 1:negative_number)) * N')./z_negative; 
yyt_positive = (P.*weight_matrix(: , negative_number+1 : example_number)) * P')./z_negative; 

mmt_negative = m_negative * m_negative';
mmt_positive = m_positive * m_positive';

sigma_negative = yyt_negative - mmt_negative; clear yyt_negative mmt_negative;
sigma_positive = yyt_positive - mmt_positive; clear yyt_positive mmt_positive;

[error, threshold, parity] = ... 
    classifier_selection(m_negative, m_positive, sigma_negative, sigma_positive, weight, num_of_features);

[error_rank , error_index] = min(error);

h_index = error_index; % Choose the classifier

beta = error_rank/( 1 - error_rank); % Update the weights

feature = get_feature(h_index);  
feature = double(feature);

for i = 1:example_number

    if (feature(i) * parity(h_index) > threshold(h_index) * parity(h_index) ...
         && i > 500 ) || ( feature(i)* weight(i) * parity(h_index) < threshold(h_index) * parity(h_index) ...
         && i < 501 ) % correct classification

         weight(i) = weight(i) * beta;
    end % if
end % for     


