function [error, stddev] = cv_knn(k, kernel, Y, cv_ind, test_ind)
% cross-validation error.
%
% Usage:
%
%   error = cv_knn(k, kernel, Xcv, Ycv, Xtest, Ytest)
%
% This function outputs an 'error' corresponding to the
% value in 'K'. Y is the label of the training examples.
%
% SEE ALSO
%   MAKE_CV_PARTITION

n = size(cv_ind, 2);
n_folds = 8;

%choose the number of partitions
part = make_cv_partition(n, n_folds);

% error vector 
err = zeros(n_folds, 2); 
% 
% %compute the kernel matrix for training and testing (without re-running 
% %the kernel function)
% Ktrain = kernel(cv_ind,cv_ind);
% Ktest = kernel(test_ind,cv_ind);

Ycv = Y(cv_ind);

for i = 1:n_folds
    
    fprintf('Currently training on fold #%d ... \n', i); 
    
    %find the indicies of partitiion i
    not_ith_ind = find(part~=i);
    ith_ind = find(part==i);
    
%     %Index the X and Y arrays by the appropriate indicies
%     train_points = Xcv(not_ith_ind,:); %Training set i (all indicies ~= i)
%     train_labels = Ycv(not_ith_ind,:); %Training labels i (all indicies ~= i)
%     ith_points = Xcv(ith_ind,:); %Test set i (all indicies == i)
    ith_labels_true = Ycv(ith_ind); % Test labels i (all indices == i)
       
    %TRAIN
    ith_labels = knn_test(Ycv, k, kernel(cv_ind,cv_ind), not_ith_ind, ith_ind);
    test_labels = knn_test(Y, k, kernel, cv_ind, test_ind); 
    
    %Compare the predictions by the algorithm to the acual values. This is done by
    %comparing the signs of test_labels and test_labels_true element-wise.
    %The error will be the number of misclassifications over the total number
    %of test points.
    
    %Call rank_err function
    err(i,1) = rank_err(ith_labels, ith_labels_true);
    err(i,2) = rank_err(test_labels, Y(test_ind)); 
end

error = mean(err);
stddev = std(err); 

