 % Copyright (C) 2012 	Paul Bovbel, paul@bovbel.com
 % 						Richard Abrich, abrichr@gmail.com
 %
 % This file is part our empirical study of boosting algorithms (http://code.google.com/p/boosting-study/)
 % 
 % This is free software; you can redistribute it and/or modify
 % it under the terms of the GNU General Public License as published by
 % the Free Software Foundation; either version 3 of the License, or
 % (at your option) any later version.
 % 
 % This source code is distributed in the hope that it will be useful,
 % but WITHOUT ANY WARRANTY; without even the implied warranty of
 % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 % GNU General Public License for more details.
 % 
 % You should have received a copy of the GNU General Public License
 % along with this source code. If not, see http://www.gnu.org/licenses/

function [tree, mismatch] = decisiontree(runmode, data, feature_vals, max_depth, params)

    % mode:         0 - training
    %               1 - testing
    % data:         training or testing data
    % params:       weights on data or tree
    % max_depth:    max depth of tree
    
    %profile on;
    
    if runmode == 2
        [classes, features] = preprocess(data);
    else
        data(:,1) = changem(data(:,1), 2, 1);
        data(:,1) = changem(data(:,1), 1, -1);
        classes = data(:,1);
        features = data(:, 2:end);   
    end

    num_examples = size(classes, 1);
    num_features = size(features, 2);

    
    if runmode == 0 || runmode == 2
        
        weights = params;
        
        % initialize data structures
        % tree:     parent | feature_idx | label | value
        used_features = [];
        tree = [0];

        tree = id3(tree, classes, features, used_features, feature_vals, weights, 0, max_depth);

        %print_tree(tree);
        %treeplot(tree(:,1)');
    else
        tree = params;
    end
    
    N = size(features, 1);
    mismatch = ones(N,1) * -1;
    for i = 1 : N

        prediction = traverse(tree, features(i,:));
        mismatch(i) = ~(prediction == classes(i));
        
        predictions(i) = prediction;
    end
    
    if runmode == 1
        tree = predictions;
    end
    
    %profile off;
    
    %profile viewer;
    
end

function p = get_parent(tree, idx)
    p = tree(idx, 1);
end

function f = get_feature(tree, idx)
    f = tree(idx, 2);
end

function l = get_label(tree, idx)
    l = tree(idx, 3);
end

function v = get_value(tree, idx)
    v = tree(idx, 4);
end
    

function prediction = traverse(tree, datapoint)
    tidx = 1;
    
    % continue until we reach a leaf
    while get_label(tree, tidx) == 0
        
        % get feature to look at
        feature_data = datapoint(get_feature(tree, tidx));

        % store parent index and go to first child
        pidx = tidx;
        tidx = tidx + 1;
        
        % find node corresponding to this data point
        while get_value(tree, tidx) ~= feature_data
            % go to next child
            tidx = tidx + 1;
            while get_parent(tree, tidx) ~= pidx
                tidx = tidx + 1;
            end
        end 
    end
    
    prediction = get_label(tree, tidx);
    
end

function print_tree(tree)
    fprintf('index\tparent\tfeature\tlabel\tvalue\n');
    for i = 1 : size(tree, 1)
        fprintf('%d\t\t%d\t\t%d\t\t%d\t\t%d\n', i, tree(i,1), tree(i,2), tree(i,3), tree(i, 4));
    end
end

function tree = set_value(tree, value)
    tidx = size(tree, 1);
    tree(tidx, 4) = value;
end

function tree = set_label(tree, label)
    tidx = size(tree, 1);
    tree(tidx, 3) = label;
end

function tree = set_feature(tree, feature)
    tidx = size(tree, 1);
    tree(tidx, 2) = feature;
end

function tree = add_node(tree, parent)
    tidx = size(tree, 1) + 1;
    tree(tidx, 1) = parent;
    tree(tidx, 4) = -1;
end

function wm = weighted_mode(classes, weights)
    N = length(unique(classes));
    totals = zeros(N,1);
    for n = 1 : N
        totals(n) = sum(weights(classes==n));
    end
    [m, idx] = max(totals);
    wm = idx;
end

%http://en.wikipedia.org/wiki/ID3_algorithm
function tree = id3(tree, classes, features, used_features, feature_vals, ...
    weights, depth, max_depth)

    if depth >= max_depth
        %tree = set_label(tree, mode(classes));        
        tree = set_label(tree, weighted_mode(classes, weights));        
        return
    end

    if length(unique(classes)) == 1
        tree = set_label(tree, classes(1));
        return;
    end    
    
    if length(used_features) == size(features, 2)
        %tree = set_label(tree, mode(classes));
        tree = set_label(tree, weighted_mode(classes, weights));
        return;
    end
    
    % 1. find feature which maximizes information gain
    [max_gain, max_i] = find_max_gain(classes, features, used_features, weights);
    used_features = [used_features max_i];

    % 2. assign to node
    tree = set_feature(tree, max_i);

    
    % 3. create children for each value of feature
    pidx = size(tree, 1);
    feature = features(:,max_i);
    
    vals = feature_vals{max_i};
    for j = 1 : length(vals)
        v = vals(j);
        
        tree = add_node(tree, pidx);
        tree = set_value(tree, v);

        %4. sort training samples to leaf nodes
        %subset = feature == v;
        if ~any(feature == v)
            %tree = set_label(tree, mode(classes));
            tree = set_label(tree, weighted_mode(classes, weights));
        else
            tree = id3(tree, classes(feature==v), features(feature==v, :), ...
                used_features, feature_vals, weights(feature==v), depth + 1, max_depth);
        end
    end

end

function [maxgain, max_i] = find_max_gain(classes, features, used_features, weights)
    num_features = size(features, 2);

    maxgain = -1;
    for i = 1 : num_features
        % skip used attributes
        if any(i == used_features)
            continue;
        end

        thisgain = gain(classes, features(:,i), weights);

        if thisgain > maxgain
            maxgain = thisgain;
            max_i = i;
        end
        
    end            
end

function [classes, features] = preprocess(data)
    % splits up classes and features
    % converts continuous features to 1 of 4 intervals

    %data = data(:,2:end);
 
    N = size(data, 2);
    
    classes = data(:, 1) + 1;

    for i = 1 : N - 1
        feature = data(:, i + 1) + 1;

        if ~isequal(fix(feature), feature)
            % continuous
            num_intervals = 4;
            interval = range(feature) / num_intervals;

            for k = 1 : num_intervals
                f(feature >= min(feature) + (k-1) * interval) = k;
            end 
        else
            f = feature; 
        end
        
        features(:,i) = f;
    end
end

function G = gain(classes, feature, weights)
    if weights == 0
        p = probabilities(feature);
    else
        p = weights;
    end
    G = calc_entropy(p) - cond_entropy(classes, feature, weights);
end

function p = probabilities(feature)
    num_unique = max(feature);
    num_entries = length(feature);

    p = zeros(num_unique, 1);
    for i = 1 : num_entries;
        p(feature(i)) = p(feature(i)) + 1;
    end
    p = p / num_entries;           
end        

function E = calc_entropy(prob)
    %prob = prob(prob ~= 0);
    E = -sum(prob .* log2(prob));
end

function CE = cond_entropy(classes, feature, weights)
    num_classes = size(unique(classes), 1);

    % calculate conditional entropy specific to each class

    % group each feature value by class
    f = cell(num_classes, 1);
    for i = 1 : length(feature)
        %fprintf('%d of %d\n', i, length(feature));
        for j = 1 : num_classes
            if classes(i) == j
                f{j} = [f{j} feature(i)];
            end
        end
    end

    
    class_to_index = zeros(max(classes), 1);
    unique_classes = unique(classes);
    for i = 1 : num_classes
        class_to_index(unique_classes(i)) = i;
    end

    if weights == 0
        p = zeros(num_classes, 1);
        for i = 1 : length(feature)
            class = classes(i);
            p(class_to_index(class)) = p(class_to_index(class)) + 1;
        end
        p = p / length(feature);
    else
        p = weights;
    end
        
    CE = 0;
    for i = 1 : num_classes
        CE = CE + p(i) * calc_entropy(f{i});
    end
end               


