 % Copyright (C) 2012 	Paul Bovbel, paul@bovbel.com
 % 						Richard Abrich, abrichr@gmail.com
 %
 % This file is part our empirical study of boosting algorithms (http://code.google.com/p/boosting-study/)
 % 
 % This is free software; you can redistribute it and/or modify
 % it under the terms of the GNU General Public License as published by
 % the Free Software Foundation; either version 3 of the License, or
 % (at your option) any later version.
 % 
 % This source code is distributed in the hope that it will be useful,
 % but WITHOUT ANY WARRANTY; without even the implied warranty of
 % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 % GNU General Public License for more details.
 % 
 % You should have received a copy of the GNU General Public License
 % along with this source code. If not, see http://www.gnu.org/licenses/

function [ output ] = stump( data, parameters, prev_feature, train )
%STUMP train or test decision stump
%D
%data
%
if train == true
    
    D = parameters;
    class = unique(data(:,1));
    num_features = size(data,2)-1;
    num_class = size(class,1);
    
    %information gain
    S = H(data, D);
    gain = zeros(num_features,1);
    for i=2:num_features+1
        values = unique(data(:,i));
        num_values = size(values,1);
        cond_entropy = zeros(num_values,1);
        for j=1:num_values
            index = find(data(:,i) == j);
            cond_entropy(j) =  P(i,j,data,D) * H(data(index,1), D(index,1));
        end
        gain(i-1) = (S - sum(cond_entropy))/log(num_values+1);
        %prevent stump from repeating last stump's feature
        if prev_feature > 0
            gain(prev_feature)=0;
        end
    end
    
    %find max IG
    split_feature = max(find(gain == max(gain)));
    num_split = size(unique(data(:,split_feature+1)),1);
    %bar(gain)
    for i=1:num_split
       	%get index of datapoints on leaf
        leaf_data = find(data(:,split_feature+1) == i);
        %get weighted mode of class on leaf
        for j = 1:num_class
            %take weighted sum
            net(j) = P(1,class(j),data(leaf_data),D(leaf_data));
        end
        %check which class is max and assign to leaf
        if size(unique(net),2) == 1
            net = eye(1,num_class);
            net = net(randperm(num_class));
        end
        class_at_split(i) = class(find(net == max(net)));
    end
    
    %output parameters, mismatch not needed
    output = [split_feature class_at_split];

elseif train == false
    
    num_data = size(data,1);
    
    split_feature = parameters(1);
    class_at_split = parameters(2:end);
    
    output = changem(data(:,split_feature+1), [class_at_split], [1:size(class_at_split,2)]);

end
end

function [ entropy ] = H( data, D)

class = unique(data(:,1));
num_classes = size(class,1);
entropy = 0;

for i=1:num_classes
    count = P(1,class(i),data,D);
    entropy = entropy - count * log(count); 
end
end

function [ count ] = P( dim, value, data, D)

count = sum((data(:,dim) == value) .* D);

end
