function loores = ecogSpiderTSLeaveOneOut(ecog1,ecog2,C)

% INPUT:
% ecog1:    The first dataset.
% ecog2:    The second dataset. Make sure the same channels are selected as
%           in ecog1.
%function loores = ecogSpiderLeaveOneOut( TRAIN, OBJECTIVE, C)
% loores = leaveOneOut( TRAIN, OBJECTIVE, C)
% TRAIN - the train data; format: TRAIN(trial,featureNumber)
% OBJECTIVE - the corresponding class labels
% C - penalty parameter (optional) default: see Joachims

if exist('svm','file')~=2
    if exist('use_spider','file')~=2
        error('Add the spider toolbox to the matlab path or download it')
    else
        use_spider
    end 
end

% construct input data
% get the data in a matrix with dimensions repetitions*features
% on row in features should be ch1(1),...,ch1(end),ch2(1),....ch2(end),...
data1=ecog1.data(ecog1.selected,:,:);
data2=ecog1.data(ecog1.selected,:,:);



if exist('svm','file')~=2,
    addpath /octo1/reichert/mr_scripts/spider/
    use_spider;
end

d_idx = find(OBJECTIVE == 0);
if ~isempty(d_idx),
    OBJECTIVE(d_idx)=[];
    TRAIN(d_idx,:)=[];
    fprintf('%i datapoints discarded from trainset.\n',length(d_idx));
end

numberData = length(OBJECTIVE);
numberNeg = sum(OBJECTIVE<0);
numberPos = sum(OBJECTIVE>0);

if nargin == 2,
    C=[];
end
if isempty(C),
    % determine default C according to Joachims
    scalar_prod=zeros(1,numberData);
     for k=1:numberData, 
        scalar_prod(k)=TRAIN(k,:)*TRAIN(k,:)';
     end
     C=1/mean(scalar_prod);
end

predictions=zeros(size(OBJECTIVE));
loores.losses=zeros(size(OBJECTIVE));
SVs=zeros(size(OBJECTIVE));
    
% set classification algorithm
alg = svm;
alg.C = C;
alg.optimizer='libsvm';
% --------------------------------------
% leave one out loop
    for k=1:numberData,
            if(mod(k,10)==0), fprintf('%i',k); else fprintf('%s','.'); end
            if(mod(k,50)==0), fprintf('%s\n',''); end
        curTRAIN=TRAIN;
        curOBJECTIVE=OBJECTIVE;
        curTRAIN(k,:)=[];
        curOBJECTIVE(k)=[];
        d = data(curTRAIN,curOBJECTIVE);
        % suppress output
        evalc('[tr res_alg] = train(alg,d)');
        loores.losses(k) = sum(tr.X~=tr.Y)/length(tr.X);
        SVs(k)=sum(abs(res_alg.alpha)>0)/length(tr.X);
        tst_d=data(TRAIN(k,:),OBJECTIVE(k));
        tst=test(res_alg, tst_d);
        predictions(k)=tst.X;
    end
    loores.prediction = predictions;
    loores.pred_acc = mean(predictions==OBJECTIVE);
    loores.rec1 = sum(predictions==OBJECTIVE&OBJECTIVE>0)/sum(OBJECTIVE>0);
    loores.rec2 = sum(predictions==OBJECTIVE&OBJECTIVE<0)/sum(OBJECTIVE<0);
    loores.prec1 = sum(predictions>0&OBJECTIVE>0)/...
                (sum(predictions>0&OBJECTIVE>0)+sum(predictions>0&OBJECTIVE<0));
    loores.prec2 = sum(predictions<0&OBJECTIVE<0)/...
                (sum(predictions<0&OBJECTIVE<0)+sum(predictions<0&OBJECTIVE>0));
    loores.err_on_trainset = mean(loores.losses);
    loores.SV=mean(SVs);
    loores.N1 = numberNeg;
    loores.N2 = numberPos;