function Adaboostclassification
% This program reads the data pertaining to the following activities
%actionsample {1}-----------lift to mouth
%actionsample{2}---------------------pour
%actionsample{3}--------------------scoop
%actionsample{4}--------------unscrew cap
%actionsample{5}---------------------stir
% this function computes the histogram of the first derivative along with
% the statistical features for classification using adaboost.
clc;
clear all;
close all;

%% All the vairables that need to be initialized.
% PCA needs to be performed?
fpca = false;

% Compute Stat Features
fstat = true;

% Compute Histogram?
fhist = false;

% Compute Deriv Histogram?
fdhist = false;

% Normalize the train data?
fnorm = false;

% DeMean the train data?
fdmean = false;

% Location of the data.
subjects = {'../mock-data/wesubj1.mat','../mock-data/wesubj2.mat','../mock-data/wesubj3.mat','../mock-data/wesubj4.mat','../mock-data/wesubj5.mat'};

%% Compute the Feature Vectors.
% Create the features, for each activity, for
% each sample for each subject.
for subj = 1 : 5
    load(subjects{subj});
    feat = [];
    label = [];
    for activity = 1 : 5
        for i = 1 : actionsample{activity}.tsamples            
            temp = [];
            if fstat
                temp = [temp computestatfeatures([actionsample{activity}.sample{i}.acc1,actionsample{activity}.sample{i}.acc2])];
            end 
            if fhist
                temp = [temp computerawhist(actionsample{activity}.sample{i}.acc1)];
            end
            if fdhist                
                temp = [temp computefirstderivativehist(actionsample{activity}.sample{i}.acc1)];
            end                        
            feat = [feat;temp];
            label = [label; activity];
        end       
    end
    save(sprintf('feat%d.mat',subj),'feat','label');
end

% Perform a 5 fold cross validation on the feature vectors (histograms)
% using adaboost.

% Number of iterations for Adaboost.
nIter = 100;

% Variable for storing the confusion matrix.
cfmatrix = zeros(5);
%% AdaBoost Classification.
for fold = 1: 5 
    % Create the train and test data set.
    trdata = [];
    trlabel = [];
    tstdata = [];
    tstlabel = [];
    for j = 1 : 5
        load(sprintf('feat%d.mat',j));
        % Subject Independent
        if fold == j
            tstdata = feat;
            tstlabel = label;
        else
            trdata = [trdata;feat];
            trlabel = [trlabel;label];
        end       
    end
  
    % normalize the training and the test data.
    if fnorm
        [trdata maxc minc] = normalizedata(trdata);
        [tstdata maxc minc] = normalizedata(tstdata, maxc, minc);
    end
    
    % Demean the data
    if fdmean
        mtrdata = mean(trdata);
        trdata = trdata - ones(size(trdata,1),1)*mtrdata;
        tstdata = tstdata - ones(size(tstdata,1),1)*mtrdata;
    end
    
    % If PCA
    if fpca
        % compute the model
        model = pca(trdata',10);
        % Project the training data
        trdata = linproj(trdata',model);
        trdata = trdata';
        % Project the test data
        tstdata = linproj(tstdata',model);
        tstdata = tstdata';
    end
    
    % Create the binary boosted classifiers for each of the actions.
    act_model{fold} = trainadaboost(trdata,trlabel,5,nIter);  
  
     % Multi class model evaluation with test data.    
    [error(fold) accuracy(fold) yest{fold}] = eval_multiclass_boost(tstdata,tstlabel',act_model{fold},nIter);
    
    tcmatrix = computeconfusionmatrix(tstlabel,yest{fold});
    
    cfmatrix = cfmatrix + tcmatrix;
    
    disp(sprintf('Fold %d Accuracy %f',fold,accuracy(fold)));
end
% Activity model contains the nIter stumps and alpha values for each
% fold
  save('act_model.mat','act_model');

%% Display the results.

disp('Results with Adaboost');

disp(sprintf('Accuracy over 5 folds'));

disp(accuracy);

disp(sprintf('Overall Accuracy over 5 folds %f ',(sum(accuracy)/5)));

% Display Confusion Matrix

disp(sprintf('Confusion Matrix'));

disp(cfmatrix)

% Compute Precision and Recall Values
[p r] = computepr(cfmatrix);
% Display Precision

disp(sprintf('Precision'));
disp(p);

% Display Recall
disp(sprintf('Recall'));
disp(r);

feature_weight();

function feature = computestatfeatures(data)

% mean of the data
m = mean(data);

% variance of the data
v = var(data);

% correlation between the axis.
c = corr(data);

% absolute value of the fourier transform.
fftdata = abs(fft(data));

% remove the mean value.
fftdata = fftdata(2:end,:);

% energy.
e = sum(fftdata.^2)/size(fftdata,1);

% spectral entropy
fftdata = 1 + fftdata/size(data,1);
temp = [fftdata(:,1)/sum(fftdata(:,1)), fftdata(:,2)/sum(fftdata(:,2)),...
                                        fftdata(:,3)/sum(fftdata(:,3)),...
                                        fftdata(:,4)/sum(fftdata(:,4))...
                                        fftdata(:,5)/sum(fftdata(:,5))...
                                        fftdata(:,6)/sum(fftdata(:,6))];
ent = -sum(temp.*log(temp),1);

%feature = [m v c(1,2) c(1,3) c(2,3) e ent];
feature = [m v c(1,2:6) c(2,3:6) c(3,4:6) c(4,5:6) c(5,6) e ent];
return;

function rawhist = computerawhist(data)
    % centers for the histogram
    % X - Axis
    centersx = 100:450;
    % Y - Axis
    centersy = 0:700;
    % Z - Axis
    centersz = 250:600;
    histx = hist(data(:,1),centersx);
    histy = hist(data(:,2),centersy);
    histz = hist(data(:,3),centersz);
    rawhist = [histx/sum(histx) histy/sum(histy) histz/sum(histz)];
return;

function derivhist = computefirstderivativehist(data)
    % centers for the histogram
    centers = -50:50;
    l = data(2:end,:);
    d = data(1:end-1,:)-l;
    histx = hist(d(:,1),centers);
    histy = hist(d(:,2),centers);
    histz = hist(d(:,3),centers);
    derivhist = [histx/sum(histx) histy/sum(histy) histz/sum(histz)];
return;

function cfmatrix = computeconfusionmatrix(ytest,yest)

% this function computes the confusion matrix, given the estimated and
% actual labels.

cfmatrix = zeros(max(ytest),max(ytest));

for i = 1 : size(ytest,1)
    cfmatrix(ytest(i),yest(i)) = cfmatrix(ytest(i),yest(i)) + 1;
end
    
return;

function [p r] = computepr(cfmatrix)
    nclass = size(cfmatrix,1);
    % compute the Individual Class Accuracies, Precision and Recall Values:
    tp = zeros(1,nclass);
    fp = zeros(1,nclass);
    fn = zeros(1,nclass);

    p = zeros(1,nclass);
    r = zeros(1,nclass);

    for i = 1 : nclass
        tp(i) = cfmatrix(i,i);
        for j = 1 :nclass
            if i ~= j
               fp(i) = fp(i) + cfmatrix(j,i);
               fn(i) = fn(i) + cfmatrix(i,j);
            end
        end
        p(i) = tp(i)/(tp(i)+fp(i));
        r(i) = tp(i)/(tp(i)+fn(i));
    end
function [feat_wts] = feature_weight()
% best_feature_count best_feature_ind
load act_model.mat
fields= {'act1','act2','act3','act4','act5'};
feat_wts = zeros(39, 5);

for fold=1:5
    model=cell2struct(act_model{fold},fields,2); %#ok<USENS>
    model_length = [(length(model.act1)) (length(model.act2)) (length(model.act3)) (length(model.act4)) (length(model.act5))];
    max_model_length= max(model_length);              
    ind_mat= zeros(max_model_length,5);           
    for i=1: max_model_length
        if i < length(model.act1)
            ind_mat(i,1)= model.act1{i}.ind;
        end;
        if i < length(model.act2)
            ind_mat(i,2)= model.act2{i}.ind;
        end;
        if i < length(model.act3)
            ind_mat(i,3)= model.act3{i}.ind;
        end;
        if i < length(model.act4)
            ind_mat(i,4)= model.act4{i}.ind;
        end;
        if i < length(model.act5)
            ind_mat(i,5)=model.act5{i}.ind;
        end;
    end;

%     hist(ind_mat,39);
%     title(sprintf('Histogram for features for fold %d',fold));
%     legend('lifttomouth','pour','scoop','unscrew cap','stir');

    feature_support = hist(ind_mat,39);    
    feat_wts = feat_wts+feature_support.*1/5;
end;
feat_wts = [normalize(feat_wts(:,1)) normalize(feat_wts(:,2)) normalize(feat_wts(:,3)) normalize(feat_wts(:,4)) normalize(feat_wts(:,5))];
save feat_wts.mat feat_wts;
return;