% ML TAU 2013 final project script
function final



    close all; clear all; clc; tic;

%
% Helper functions
%
    
    
    function [Xsub]=licols(X,tol)
%Extract a linearly independent set of columns of a given matrix X
%
%    [Xsub,idx]=licols(X)
%
%in:
%
%  X: The given input matrix
%  tol: A rank estimation tolerance. Default=1e-10
%
%out:
%
% Xsub: The extracted columns of X
% idx:  The indices (into X) of the extracted columns
     if ~nnz(X) %X has no non-zeros and hence no independent columns
         Xsub=[]; idx=[];
         return
     end
     if nargin<2, tol=1e-14; end
       [Q, R, E] = qr(X,0); 
       if ~isvector(R)
        diagr = abs(diag(R));
       else
        diagr = R(1);   
       end
       %Rank estimation
       r = find(diagr >= tol*diagr(1), 1, 'last'); %rank estimation
       idx=sort(E(1:r));
       Xsub=X(:,idx);      
    end

    
    %lirows = @(X) licols(X')';
    lirows = @(X) X;
    
    



% y - actual results, p - predicted results. Calculates accuracy
    getresults = @(y,p) sum(y.*p>0)/size(y,1); 

% sums absolute differences per sample pair
    diff = @(X1,X2) (X1-X2);
    
    
    % for each of the 102 histograms, create two new variables: average and
    % std dev. do that for log versions as well
    
    
    
    sumabsdiff = @(X1,X2) sum(abs(diff(X1,X2)));
    
    meanstddiff = @(X1,X2) lirows(diff(histstat(X1),histstat(X2)));    
    meanstddifflog = @(X1,X2) meanstddiff(log(X1+1),log(X2+1)); 
    normhist = @(X) bsxfun(@rdivide,X,areamat(X));
    normhistdiff = @(X1,X2) lirows(diff(normhist(X1),normhist(X2)));
    
    
     function A = sum6part(X)
        A = zeros(size(X,1)/61*6,size(X,2));
        for block=0:101
            for part=0:5
                A(6*block+part+1,:) = sum(X(61*block+10*part+1:61*block+10*part+10,:));
            end
        end
    end

    
    function A = histstat(X)
        A = zeros(size(X,1)/61*4,size(X,2));
        for block=0:101
            A(2*block+1,:) = mean(X(61*block+1:61*(block+1),:));
            A(2*block+2,:) = std(X(61*block+1:61*(block+1),:));
            A(2*block+3,:) = skewness(X(61*block+1:61*(block+1),:));
            A(2*block+4,:) = kurtosis(X(61*block+1:61*(block+1),:));
        end
    end
        
    
    naive6diff = @(X1,X2) naivediff(sum6part(X1),sum6part(X2));
    norm6histdiff = @(X1,X2) normhistdiff(sum6part(X1),sum6part(X2));
    linmodel = @(X,y) LinearModel.fit((X)',y,'linear');
    svmmodel = @(X,y) svmtrain(y,X,[]);
    svmpred = @(X1, X2, model, prepare)  svmpredict(prepare(X1,X2),model);
    lmpredict = @(X1, X2, model, prepare, y)  predict(model,prepare(X1,X2)');
    logdiff = @(X1,X2) naivediff(log(X1+1),log(X2+1));
    lognormdiff = @(X1,X2) normhistdiff(log(X1+1),log(X2+1));
    %ormhistfiaivediff(nor,bsxfun(@rdivide,log(X2),areamat(log(X2))));   

 
% calculate a matrix of per histogram sum of counts (used for
% normalization)
    function A = areamat(X)
        A = zeros(size(X));
        blocks = 102;
        blocksize = size(X,1)/blocks;
        
        for block=0:blocks-1
            A(blocksize*block+1:blocksize*block+blocksize,:) = repmat(sum(X(blocksize*block+1:blocksize*(block+1),:)),blocksize,1);
        end   
    end

    function [] = comphist(h,s)
        subplot(1,2,1)
        hshow(X1train,h,s)
        subplot(1,2,2)
        hshow(X2train,h,s)
    end
    

    function [] = hshow(X,h,s)
       hist(X(61*h+1:61*(h+1),s));
    end

%--------------------------------------------------------------------------------------------------------%
% Main patch panel
%--------------------------------------------------------------------------------------------------------%

% add lines here to mix and match pre process/normalize stuff with different models
% fields are
% 1 - id string for reporting
% 2 - preprocess (normalize etc)
% 3 - training (model building)
% 4 - predictions based on model
% 5 - accuracy of predictions on training data
% 6 - resulting model
% 7 - simulated test results

 


actions = { 
    % single variable per sample
  'abs sum of diffs 1var-linear model', sumabsdiff, linmodel, lmpredict; %
 
 
  'mean and std for 102 histograms: 204-vars-linear model', meanstddiff, linmodel, lmpredict; 
  'mean and std for 102 histograms LOG: 204-vars-linear model', meanstddifflog, linmodel, lmpredict; 
  'normhistdiff linear model', normhistdiff, linmodel, lmpredict;  
  'naive6 linear model', naive6diff, linmodel, lmpredict; % naive 
  'norm6histdiff linear model', norm6histdiff, linmodel, lmpredict;  
  'naive log linear model', logdiff, linmodel, lmpredict; % naive 
  'normhistdiff log linear model', lognormdiff, linmodel, lmpredict;  
  'meanstd', meanstddiff, linmodel,lmpredict;
    % so far got 50.7% 
    % 102 variables per sample
    

 % 'lognormhistdiff linear model', lognormhistdiff, linmodel, defaultpredict;
};


%----------------------------------------------------------------------------------------------------%
%   Execution 
%----------------------------------------------------------------------------------------------------%

%
% Initialization
%

if ~exist('dataforproject.mat','file')
    error('Data file not found in current directory')
end;
% Saving memory. Loading vars on a need to load basis only throughout
load('dataforproject.mat','X1train','X2train','gidtrain','ytrain');
fprintf('Training Data successfully loaded\n');



% Mechanics

for p=1:size(actions,1)   
    results = zeros(3,1);

    for k=1:3
        % split data three fold by group. items with group id (gid) = k are in
        % current test set of the 3-fold cross validation
        %fprintf('Splitting data for kfold cross validation k=%d\n',k);        
        X1 = X1train(:,(gidtrain~=k));
        X2 = X2train(:,(gidtrain~=k));
        y = ytrain(gidtrain~=k);        
        % normalize
        X = actions{p,2}(X1,X2);    
        % train
        actions{p,6} = actions{p,3}(X,y);    
        % prepare cv test set 
        X1 = X1train(:,(gidtrain==k));
        X2 = X2train(:,(gidtrain==k));
            
        % predict
        predictions =  actions{p,4}(X1,X2,actions{p,6},actions{p,2},y); % some prediction functions require y used for training
        y = ytrain(gidtrain==k); % true results for cv set
        % store results
        results(k,1) = getresults(y,predictions);
    end;
    actions{p,5} = mean(results);
    
    fprintf('3-fold CV Training Results for %s are %f\n',char(actions(p,1)), actions{p,5});
end;
% perform some algorithm on 2 folds and test against the third. 
% average results and print result
clear X1;
clear X2;
clear X1train;
clear X2train;
clear gidtrain;
[best_train_result, best_train_index]  = max(vertcat(actions{:,5}));
fprintf('Best Train Results achieved for %s are %f\n',char(actions(best_train_index,1)), best_train_result);
toc
% processing extra data: create test scenario
tic
xtra_samples = 1000;
fprintf('Loading Extra Y..');
load('dataforproject.mat','yyextratrain');
rng(1);
X1ind = randsample(size(yyextratrain,1),xtra_samples);
% create desired results artificially
xtra_y = random('bino',1,0.5,[1,xtra_samples])'; % random order not really necessary here
xtra_y(xtra_y==0)=-1;
X2ind = zeros(xtra_samples,1);
% this could probably be implemented better
fprintf('populating..');
for i=1:xtra_samples
    if (xtra_y(i))
       candidates = find(yyextratrain == yyextratrain(X1ind(i)));        
    else
       candidates = find(yyextratrain ~= yyextratrain(X1ind(i)));         
    end;    
    rng(2);
    X2ind(i) =  randsample(candidates(candidates ~= X1ind(i)),1);
    if (mod(i,100)==0) 
        fprintf('.');
    end;   
end;
fprintf('Done.\nLoading Extra X..');
load('dataforproject.mat','XXextratrain');
fprintf('loaded. populating..');
X1 = XXextratrain(:,X1ind);
X2 = XXextratrain(:,X2ind);
clear XXextratrain;
fprintf('Done\n');
for p=1:size(actions,1)
    xtra_p =  actions{p,4}(X1,X2,actions{p,6},actions{p,2});
    actions{p,7} = getresults(xtra_y,xtra_p);
    fprintf('Test Results using %s are %f\n', char(actions(p,1)) ,actions{p,7});
end   
[best_test_result, best_test_index]  = max(vertcat(actions{:,7}));
fprintf('Best Test Results achieved for %s are %f\n',char(actions(best_test_index,1)), best_test_result);
if (best_test_index ~= best_train_index)
    fprintf('Although Training Best Results achieved for %s are %f\n',char(actions(best_train_index,1)), best_train_result);
end
save('actions.mat','actions');
%------------------------------------------------
% Generating final prediction
%--------------------------------------------------
fprintf('Loading Final test X..');
load('dataforproject.mat','X1test','X2test');
fprintf('loaded. predicting..');
y =  actions{best_test_index,4}(X1test,X2test,actions{best_test_index,6},actions{best_test_index,2});
save('y.mat','y');
clear X1test;
clear X2test;
fprintf('Good morning Itay its Monday\n');
toc
%clear;
end
