

%  load '../SVM_DATA/ARData2';
  data = state.FV_burg; clear state.FV_burg;
%  load SVM_Trained_Model;

%% SVM   
    %------------------------------------------------------------------
    % Classification - LibSVM
    %------------------------------------------------------------------
    % libsvm_options:
    % -s svm_type : set type of SVM (default 0)
    %    0 -- C-SVC              (multi-class classification)
    %    1 -- nu-SVC             (multi-class classification)
    %    2 -- one-class SVM
    %    3 -- epsilon-SVR        (regression)
    %    4 -- nu-SVR             (regression)
    % -t kernel_type : set type of kernel function (default 2)
    %    0 -- linear: u'*v
    %    1 -- polynomial: (gamma*u'*v + coef0)^degree
    %    2 -- radial basis function: exp(-gamma*|u-v|^2)
    %    3 -- sigmoid: tanh(gamma*u'*v + coef0)
    %    4 -- precomputed kernel (kernel values in training_instance_matrix)
    % -d degree : set degree in kernel function (default 3)
    % -g gamma : set gamma in kernel function (default 1/num_features)
    % -r coef0 : set coef0 in kernel function (default 0)
    % -c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)
    % -n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)
    % -p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)
    % -m cachesize : set cache memory size in MB (default 100)
    % -e epsilon : set tolerance of termination criterion (default 0.001)
    % -h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)
    % -b probability_estimates : whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)
    % -wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)
    % -v n : n-fold cross validation mode
    % -q : quiet mode (no outputs)

    %---------------------------------------------------------------------------------------------------
    libsvm_options = '-s 1 -t 2 -d 3 -r 0 -c 1 -n 0.1 -p 0.1 -m 100 -e 0.0001 -h 1 -b 0 -wi 1 -q';
    Result = 0;
    %---------------------------------------------------------------------------------------------------

    R=size(data,1);
    C=size(data,2);
    
% cross validation
for i = 1:10
    indices = crossvalind('Kfold',data(:,C),10);
    
    for j = 1:10
        [X, Z] = find(indices(:,end)==j);%testing
        [Y, Z] = find(indices(:,end)~=j);%training
               
feature_training = data([Y'],[1:C-1]); feature_testing = data([X'],[1:C-1]);
class_training = data([Y'],end); class_testing = data([X'], end);

% SVM Training
       disp('training');
       [feature_training,ps] = mapminmax(feature_training',0,1);
       feature_training = feature_training';
       feature_testing = mapminmax('apply',feature_testing',ps)';
       model = svmtrain(class_training,feature_training,libsvm_options);  
%      save SVM_Trained_Model model;

% SVM Prediction       
        disp('testing');
        TestPredict = svmpredict(class_testing,sparse(feature_testing),model);
        % TrainErr = sum(TrainPredict~=class_training)./length(class_training)*100;
        TestErrap = sum(TestPredict~=class_testing)./length(class_testing)*100;
        confm_i = confmat_svm(TestPredict, class_testing, 3);
        error(j,i)=TestErrap;
        Result = Result + confm_i;
        disp(((i-1)*10 )+j);
     end;
end;

MeanError = mean2(error);
StdError = std2(error);
Kappa = kappa(Result);
Result_Percentage = (Result./(sum(sum(Result))))*100;

clear R C Z X Y samp i j error confm_i ps indices feature_training feature_testing class_training class_testing TestErrap TestPredict model;
 