% This is how I do the cross validation and training of data. If you look for the function online you will find their description and more details.

%create the partitions for the crossvalidation
%     CVO = cvpartition(classes,'k',8);
%     
%     disp(CVO);
%     %computes the misclassification error using Bayes
%     nbGauClassFun = @(xtrain,ytrain,xtest)...
%                (predict(NaiveBayes.fit(xtrain,ytrain), xtest));
% 
%     nbGauCVErr  = crossval('mcr',trainingData,classes,...
%               'predfun', nbGauClassFun,'partition',CVO,'mcreps',64);
%     
%     %computes the misclassification error using Decission Trees
%     dtClassFun = @(xtrain,ytrain,xtest)(eval(classregtree(xtrain,ytrain),xtest));
%     dtCVErr  = crossval('mcr',trainingData,classes, ...
%           'predfun', dtClassFun,'partition',CVO,'mcreps',64);
%        
%     crosValMCR = [nbGauCVErr, dtCVErr];


% For crossvalidation for PCA you will need to define your own function handle. Because you will do the PCA on the 7folds and then project the test fold on the axis you already got you need to do something a little more complex.

% You define a function handle like so:
% 
% f = @(xtr,ytr,xte,yte)getMsePCA(xtr,ytr,xte,yte);
% 
% xtr and ytr are the training set you use, respective the class for the training set. It has to has row correspondence. xtr contains observation row-wise and features column-wise. ytr has the class correspondence row-wise. Xte and Yte are the test set and the class corespondence for the test set.
% 
% You do PCA like so: [pc,score,latent,tsquare] = princomp(moments); Score represents the respresentation of moments in pca space. It also returns the variance for each component so you do not have to do that. (http://www.mathworks.co.uk/help/toolbox/stats/princomp.html)
% 
% So getMsePca has to do PCA for xtr, project xte on the axes and then return the miss-classification error for xte by using yte.
% 
% I think that's it.
% 
% George
%For cross-fold validation, sub-divide the data into parts
c = cvpartition(y, 'k', 4);

% Compute mean-square error for regression using 10-fold
% cross-validation.
load('fisheriris');
y = meas(:,1);
x = [ones(size(y,1),1) meas(:,2:4)];
regf = @(xtrain, ytrain,xtest)(xtest * regress(ytrain,xtrain));
cvMse = crossval('mse',x,y,'predfun',regf)

x
%phi = [ones(size(x,1),1),dataVector ];

