function R = ecogMultiClassCV( N,TRAIN, OBJECTIVE, varargin)
% R = ecogMultiClassCV( N,TRAIN, OBJECTIVE,...)
% TRAIN - the train data; format: TRAIN(trial,featureNumber)
% OBJECTIVE - the corresponding class labels
% options:  'C' - penalty parameter (optional) default: see Joachims
%           'nfeat' - number of features to use default: all features
%           'featsel' - feature selection method 'tval'(ANOVA) 'pca'
%           'functname' - name of classifier algorithm
%                         'one_vs_one' (default)
%                         'one_vs_rest', (spider)
%                         'mc_svm', (spider)
%                         'nbayes'
%                         'oneVsRest' - linear svm, 
%                         'oneVsOne' - linear svm,
%                         'smlr' - sparse multinomial logistic regression
%           'param' - parameter struct (additionally or alternatively to
%                     other options) 
%                     possible fields:
%                       'nfeat' - number of features to use default: all features
%                       'featselect' - feature selection method 'none'(default),
%                                      'tval' (ANOVA), 
%                                      'csp' (currently only pairwise in oneVsRest and oneVsOne)
%                                      'pca'
%                       'cspFeat'='var'; %'filter' or 'var'
%                       'nSpatialFilter' - number of spatial filter
%                       
% example:
% prm.featselect='CSP';
% prm.cspFeat='var'; %'filter' or 'var'
% prm.nSpatialFilter=4;
% Rmulti = ecogMultiClassCV(5,TRAIN, LABELS,'functname','oneVsRest','C',1000,'trialorder','cont','param',prm);


prm=args2Param(varargin);

if exist('svm','file')~=2,
    error('Spider toolbox not installed!');
end

if ndims(TRAIN)>2,
    [TRAIN, prm.reshape] = megReshapeTrain(TRAIN,false);
end

d_idx = find(OBJECTIVE == 0);
if ~isempty(d_idx),
    OBJECTIVE(d_idx)=[];
    TRAIN(d_idx,:)=[];
    fprintf('%i datapoints discarded from trainset.\n',length(d_idx));
end

% create multiclass objective for spider
classes = unique(OBJECTIVE);
nclasses=length(classes);
OBJmat = -ones(length(OBJECTIVE),length(classes));
for c_i = 1:length(classes),
    OBJmat(OBJECTIVE==classes(c_i),c_i) = 1;
end

feat_bin = ones(1,size(TRAIN,2))>0;

predictions=zeros(size(OBJECTIVE));
loores.losses=zeros(1,max(N));
loores.foldPredAcc=zeros(1,max(N));
doFeatRed = true;
% set classification algorithm
alg = svm;
alg.optimizer='libsvm';
%alg.child = kernel(kStr, kernel_param);
funcHandle=str2func(prm.functName);
if strcmp(prm.functName,'mc_svm'),
    mc_alg = mc_svm;
elseif strcmp(prm.functName,'nbayes'),
    mc_alg = prm.functName;
    cmeth=1;
elseif strcmpi(prm.functName,'oneVsOne'),
    mc_alg = prm.functName;
    cmeth=2;
    pairWisePred = zeros(nclasses*(nclasses-1)/2,length(OBJECTIVE));
    doFeatRed = false; % feature reduction wihin oVsO framework
elseif strcmpi(prm.functName,'oneVsRest'),
    mc_alg = prm.functName;
    cmeth=3;
    pairWisePred = zeros(nclasses,length(OBJECTIVE));
    doFeatRed = false; % feature reduction wihin oVsO framework
elseif strcmp(prm.functName,'smlr'),
    mc_alg = prm.functName;
    cmeth=4;
elseif strcmp(prm.functName,'logreg'),
    mc_alg = prm.functName;
    cmeth=5;
    doFeatRed = false;
else
    mc_alg = feval(funcHandle,alg);
end
% --------------------------------------
%  loop
% create subsets
if length(N)==1,
    if findstr('rand',prm.trialorder)==1,
    %     r_idx = ;
        selector = ceil((1:length(OBJECTIVE))/(length(OBJECTIVE)/N));
        selector = selector(randperm(length(OBJECTIVE)));
    elseif findstr('cont',prm.trialorder)==1,
    %     r_idx = 1:length(OBJECTIVE);
        selector = ceil((1:length(OBJECTIVE))/(length(OBJECTIVE)/N));
    else
        error('trialorder not defined. Check arguments');
    end
elseif length(N)==length(OBJECTIVE)+length(d_idx),
    N(d_idx)=[];
    selector=N;
    N=max(N);
else
    error('Length of selector N doesn''t match number of trials');
end

for k=1:N,
    fprintf('CV step #%i\n',k);
    left_idx = find(selector==k);
    used_idx = setdiff(1:length(OBJECTIVE),left_idx);
    if prm.doBalance,
        labelIdx = getBalancedTrainset(OBJECTIVE(used_idx));
        used_idx(~labelIdx) = [];
    end

    curTRAIN=TRAIN(used_idx,:);
    curOBJECTIVE=OBJECTIVE(used_idx);
    curOBJmat = OBJmat(used_idx,:);
    % select features
    if doFeatRed && ~isempty(strfind(prm.featselect,'tval')) && prm.nFeat>0,
        % ANOVA
        [feat_bin] = ecogFeatSelect(curTRAIN,curOBJECTIVE,prm);%prm.nFeat,prm.featselect);
        curTRAIN=curTRAIN(:,feat_bin);
        curTEST=TRAIN(left_idx,feat_bin);
    elseif strcmpi(prm.featselect,'pca'),
        % PCA
        if prm.nFeat>0,
            [curTRAIN,param]=kneu_pca(curTRAIN',[],'components',prm.nFeat);
            curTEST = kneu_pca(TRAIN(left_idx,:)',param,'method','apply');
            curTRAIN=curTRAIN';
            curTEST=curTEST';
        else
            [curTRAIN,param]=kneu_pca(curTRAIN');
            curTEST = kneu_pca(TRAIN(left_idx,:)',param,'method','apply');
            % select most significant components
            Pval = zeros(1,size(curTRAIN,1));
            for v_i = 1:length(Pval),
                Pval(v_i) = anova1(curTRAIN(v_i,:),curOBJECTIVE,'off');
            end
            curTRAIN=curTRAIN(Pval<0.01,:)';
            curTEST=curTEST(Pval<0.01,:)';
        end     
    else
        curTEST = TRAIN(left_idx,feat_bin);
    end
    % classification
    if ischar(mc_alg), % no spider objects
        if cmeth==1, % nbayes
            nbc=nbayes_train(curTRAIN,curOBJECTIVE);
            CL= nbayes_test(nbc,curTRAIN);
            loores.losses(k) = sum(curOBJECTIVE~=CL)/length(CL);
            CL= nbayes_test(nbc,curTEST);
            predictions(left_idx)=CL;
        elseif cmeth==2, % one vs one
            oVsO = megTrainOneVsOne(curTRAIN,curOBJECTIVE,'param',prm);%'C',prm.C,'balance',doBalance);
            loores.classifier{k}=oVsO;
            tst = megTestOneVsOne(oVsO,curTRAIN,'validation','sumsquare','param',prm);
            loores.losses(k) = sum(curOBJECTIVE~=tst.prediction)/length(curOBJECTIVE);
            tst = megTestOneVsOne(oVsO,TRAIN(left_idx,:),'validation','sumsquare','param',prm);
            predictions(left_idx)=tst.prediction;
            pairWisePred(:,left_idx)=sign(tst.distW2');
        elseif cmeth==3, % one vs Rest
            oVsR = megTrainOneVsRest(curTRAIN,curOBJECTIVE,'param',prm); %,'C',prm.C,'balance',doBalance,'param',prm);
            loores.classifier{k}=oVsR;
            tst = megTestOneVsRest(oVsR,curTRAIN,'validation','max','param',prm);
            loores.losses(k) = sum(curOBJECTIVE~=tst.prediction)/length(curOBJECTIVE);
            tst = megTestOneVsRest(oVsR,TRAIN(left_idx,:),'validation','max','param',prm);
            predictions(left_idx)=tst.prediction;
            pairWisePred(:,left_idx)=sign(tst.distW');
        elseif cmeth==4, % Sparse Multinomial Logistic Regression
            smlr=kneu_train_smlr(curTRAIN,curOBJECTIVE,prm);
            tst= kneu_test_smlr(smlr, curTRAIN);
            loores.losses(k) = sum(curOBJECTIVE~=tst.prediction)/length(curOBJECTIVE);
            tst= kneu_test_smlr(smlr,curTEST);
            predictions(left_idx)=tst.prediction;
        elseif cmeth==5, % (penalized) Logistic Regression
            lr=kneu_train_logreg(curTRAIN,curOBJECTIVE,'param',prm);
            tst= kneu_test_logreg(lr, curTRAIN);
            loores.losses(k) = sum(curOBJECTIVE~=tst.prediction)/length(curOBJECTIVE);
            tst= kneu_test_logreg(lr,curTEST);
            predictions(left_idx)=tst.prediction;
        end
    else %  Spider objects
        if isempty(prm.C),
            % determine default C according to Joachims
            mc_alg.C=ecogGetDefC(curTRAIN);
        else
            mc_alg.C=prm.C;
        end
        d = data(curTRAIN,curOBJmat);
        % suppress output
        evalc('[tr res_alg] = train(mc_alg,d)');
        loores.losses(k) = sum(sum(tr.X~=tr.Y))/numel(tr.X);
        tst_d=data(curTEST,OBJmat(left_idx,:));
        tst=test(res_alg, tst_d);
        for p=1:length(left_idx),
            predictions(left_idx(p))=classes(tst.X(p,:)>0);
        end
    end
    loores.foldPredAcc(k)=sum(predictions(left_idx)==OBJECTIVE(left_idx))/length(left_idx);
end
% performance measures
if ischar(mc_alg)&&cmeth==2,
    loores.classPair=oVsO.classPair;
    for k=1:size(oVsO.classPair,1),
        c_1=classes(oVsO.classPair(k,1));
        c_2=classes(oVsO.classPair(k,2));
        idx = OBJECTIVE==c_1|OBJECTIVE==c_2;
        pred = zeros(sum(idx),1);
        pred(pairWisePred(k,idx)>0)=c_1;
        pred(pairWisePred(k,idx)<0)=c_2;
        loores.pairWisePredAcc(k)=mean(pred==OBJECTIVE(idx));
        loores.pairWiseRecall(k,:)=...
            [sum(pred==OBJECTIVE(idx)&OBJECTIVE(idx)==c_1)/sum(OBJECTIVE(idx)==c_1);...
            sum(pred==OBJECTIVE(idx)&OBJECTIVE(idx)==c_2)/sum(OBJECTIVE(idx)==c_2)];
        
    end
end
if ischar(mc_alg)&&cmeth==3,
    for k=1:length(classes),
        pred = zeros(length(OBJECTIVE),1);
        pred(pairWisePred(k,:)>0)=classes(k);
        pred(pairWisePred(k,:)<0)=0;
        loores.pairWisePredAcc(k)=mean(pred==OBJECTIVE|(pred==0&OBJECTIVE~=classes(k)));
        loores.pairWiseRecall(k,:)=...
            [sum(pred==OBJECTIVE&OBJECTIVE==classes(k))/sum(OBJECTIVE==classes(k));...
            sum(pred==0&OBJECTIVE~=classes(k))/sum(OBJECTIVE~=classes(k))];
        
    end
end
loores.prediction = predictions;
loores.pred_acc = mean(predictions==OBJECTIVE);
for c_i=1:length(classes),
    loores.recall(c_i) = sum(predictions==OBJECTIVE&OBJECTIVE==classes(c_i))/sum(OBJECTIVE==classes(c_i));
    loores.precision(c_i) = sum(predictions==classes(c_i)&OBJECTIVE==classes(c_i))/...
        (sum(predictions==classes(c_i)&OBJECTIVE==classes(c_i))+...
        sum(predictions==classes(c_i)&OBJECTIVE~=classes(c_i)));
    loores.N(c_i) = sum(OBJECTIVE==classes(c_i));
end
loores.err_on_trainset = mean(loores.losses);
R=loores;
% ------------------------------------------------------------
function param = args2Param(args)

% set default values
param.C = [];
param.featselect = 'none';
param.nFeat = 0;
param.trialorder = 'rand';
param.doBalance = false;
param.functName = 'one_vs_one';
param.penalty = 50;

k=1;
while k<length(args),
    if ischar(args{k}),
        if strcmpi('param',args{k}),
            tmp=args{k+1};
            fNames=fieldnames(tmp);
            for p=1:length(fNames),
                eval(['param.' fNames{p} '=tmp.' fNames{p} ';'] );
            end
        end
        if strcmpi('c',args{k}),
            param.C=args{k+1};
        end
        if strcmpi('nfeat',args{k}),
            param.nFeat=args{k+1};
        end
        if strcmpi('featsel',args{k}),
            param.featselect=args{k+1};
        end
        if strcmpi('functname',args{k}),
            param.functName=args{k+1};
        end
        if strcmpi('trialorder',args{k}),
            param.trialorder=args{k+1};
        end
        if strcmpi('balance',args{k}),
            param.doBalance=args{k+1};
        end
    else
        error('argument name must be a string');
    end
    k=k+2;
end
