function loores = ecogSpiderTSLeaveOneOut(ecog1,ecog2,c,crit)
% loores = ecogSpiderTSLeaveOneOut(ecog1,ecog2,c,crit) SVM leave-one-out cross validation with prior feature selection
%
% PURPOSE:  Perform a LOO crossvalidation of an SVM trained on two ecog
%           dataset. Only seleceted channels will be included. The baseline
%           interval is excluded from the anaylsis
%
% INPUT:
% ecog1:    The first dataset.
% ecog2:    The second dataset. Make sure the same channels are selected as
%           in ecog1.
% c:        OPTIONAL: The penalty paramter. If omitted an estimated is
%           derived from the data.
% crit:     A criterion for t-value feature selection. Defaults to 0 (include 
%           everything) if omitted. 
%
% OUTPUT:
% loores:   A structure containing results. Fields are
%           superThreshIdx: A cell arry containing the indices in the time
%           series included for the classification of this trial. CAREFUL:
%           the baseline was removed and only the selected channels were
%           included when these indices were calculated.
%           w: A cell array of feature weights learned during training. The
%           indices in superThreshIdx map the weights into the original
%           dataspace.
%           prediction: The class predicitons on the test sets
%           predAcc: The proportion of correct predictions
%           rec1: The recall in class 1
%           rec2: The recall in class 2
%           prec1: The precision in class 1 
%           prec2: The precision in class 2
%           errorOnTrainset: The averae error on the training set
%           sVs: The average proportion of support vectors
%           N1: The number of trials available in class 1
%           N2: The number of trials available in class 2

% 20110224 JR included feature weights in output structure
% TODO:      


%function loores = ecogSpiderLeaveOneOut( TRAIN, OBJECTIVE, C)
% loores = leaveOneOut( TRAIN, OBJECTIVE, C)
% TRAIN - the train data; format: TRAIN(trial,featureNumber)
% OBJECTIVE - the corresponding class labels
% C - penalty parameter (optional) default: see Joachims

if exist('svm','file')~=2
    if exist('use_spider','file')~=2
        error('Add the spider toolbox to the matlab path or download it')
    else
        use_spider
    end 
end

if nargin<3
    c=[];
end

if  nargin<4
    crit=0;
end

% construct input data
% put data in a matrix with dimensions repetitions*features
% on row in features should be ch1(1),...,ch1(end),ch2(1),....ch2(end),...
% MAY REQUIRE TOO MUCH MEMORY. LOOPAROUND TRIALS IF NECESSARY

% exclude baseline, only use selcted channels
data1TrialLength=prod(size(ecog1.data(ecog1.selectedChannels,ecog1.nBaselineSamp+1:end,1)));
nRepSet1=size(ecog1.data,3);
nRepSet2=size(ecog2.data,3);
dataTS=zeros(nRepSet1+nRepSet1,data1TrialLength);
%the first dataset
for k=1:nRepSet1
    tmp=ecog1.data(ecog1.selectedChannels,ecog1.nBaselineSamp+1:end,k)';
    dataTS(k,:)=tmp(:);
end
for k=1:nRepSet2
    tmp=ecog2.data(ecog2.selectedChannels,ecog2.nBaselineSamp+1:end,k)';
    dataTS(nRepSet1+k,:)=tmp(:);
end
% Here we have all trials as row vetors in a matrix
objective=ones(size(dataTS,1),1);
objective(nRepSet1+1:end)=-1;   %the labels

predictions=zeros(size(objective));
loores.losses=zeros(size(objective)); %
sVs=zeros(size(objective));           %collects the number of support vectors
% set classification algorithm
alg = svm;
alg.C = c;
alg.optimizer='libsvm';

g1TrialsIdx=find(objective>0);
g2TrialsIdx=find(objective<0);
% leave one out loop
sampIdx=1:size(dataTS,1);
loores.w=cell(size(dataTS,1),1);
for k=1:size(dataTS,1),
    % Nice output
    if(mod(k,10)==0), fprintf('%i',k); else fprintf('%s','.'); end
    if(mod(k,50)==0), fprintf('%s\n',''); end
    %curIdx=setdiff(sampIdx,k);

    %remove the current trial from the list
    if k<=length(g1TrialsIdx)
        idx1=g1TrialsIdx;
        idx1(k)=[];
        idx2=g2TrialsIdx;
    else 
        idx1=g1TrialsIdx;
        idx2=g2TrialsIdx;
        idx2(k-length(g1TrialsIdx))=[];
    end
    curIdx=[idx1;idx2];
    
    % select features
    tVal=getTValues(mean(dataTS(idx1,:),1),var(dataTS(idx1,:),0,1),length(idx1),...
        mean(dataTS(idx2,:),1),var(dataTS(idx2,:),0,1),length(idx2));
    if k<=length(g1TrialsIdx)
        superThreshIdx{k,1}=find(abs(tVal>crit));
        %all but the current trial as input data
        tmp=dataTS(curIdx,:);
        d = data(tmp(:,superThreshIdx{k,1}),objective(curIdx));
    else 
        superThreshIdx{k,2}=find(abs(tVal>crit));
        %all but the current trial as input data
        tmp=dataTS(curIdx,:);
        d = data(tmp(:,superThreshIdx{k,2}),objective(curIdx));
    end
    
    % train and classify
    % suppress output
    %evalc('[tr resAlg] = train(alg,d)');
    [tr resAlg] = train(alg,d);
    loores.losses(k) = sum(tr.X~=tr.Y)/length(tr.X);
    sVs(k)=sum(abs(resAlg.alpha)>0)/length(tr.X);
    loores.w{k}=get_w(resAlg);
    tstTmp=dataTS(k,:);
    if k<=length(g1TrialsIdx)
        tstD=data(tstTmp(superThreshIdx{k,1}),objective(k));
    else
        tstD=data(tstTmp(superThreshIdx{k,2}),objective(k));
    end
    tst=test(resAlg, tstD);
    predictions(k)=tst.X;
end

% gather results
loores.prediction = predictions;
loores.predAcc = mean(predictions==objective);
loores.rec1 = sum(predictions==objective&objective>0)/sum(objective>0);
loores.rec2 = sum(predictions==objective&objective<0)/sum(objective<0);
loores.prec1 = sum(predictions>0&objective>0)/...
    (sum(predictions>0&objective>0)+sum(predictions>0&objective<0));
loores.prec2 = sum(predictions<0&objective<0)/...
    (sum(predictions<0&objective<0)+sum(predictions<0&objective>0));
loores.errorOnTrainset = mean(loores.losses);
loores.sVs=mean(sVs);
loores.N1 = nRepSet1;
loores.N2 = nRepSet2;
loores.superThreshIdx=superThreshIdx;

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%Helper functions

function tVals=getTValues(mu1,var1,n1,mu2,var2,n2);

seMeanDiff=sqrt( ((n1-1)*var1 + (n2-1)*var2)./((n1-1)+(n2-1)) ) .* sqrt(1/n1 + 1/n2);
tVals = (mu1-mu2)./seMeanDiff;
return;

