function scores = knn_test(Y, k, kernel, train_ind, test_ind)
% Returns the knn predicted rankings, given lyric features.
%
% Usage:
%
%   RANKS = KNN_TEST(TRAIN_STRUCT, X_TEST);
%
% This is the function that we will use for checkpoint evaluations and the
% final test. It takes a set of lyric and audio features and produces a
% ranking matrix as explained in the project overview. 
%
% This function SHOULD NOT DO ANY TRAINING. This code needs to run in under
% 5 minutes. Therefore, you should train your model BEFORE submission, save
% it in a .mat file, and load it here.

fprintf('Now looking at %d neighbors \n', k)

% Predict based on Lyrics 
N = size(test_ind, 2);            % number of examples
N_train = size(train_ind, 2);     % number of training examples

% quiz-training silimarities (quiz along row, training along col)
D = kernel(test_ind, train_ind);

% sort in descending order along rows 
[~, D_sorted] = sort(D, 2, 'descend');                                

% converts N x M to NM x 1 after transpose --> examples to class labels
D_classes = Y( reshape( D_sorted', numel(D_sorted), 1 ) ); 

% convert back to N x M 
D_classes = transpose ( reshape( D_classes, N_train, N ) ); 


% for each class, pick out the k first examples and compute the average
% index

avg_ind = zeros(N,10);

%i is the class, j is the example
for i = 1:10
%     fprintf('Now evluating for class %d \n', i)
    compare_mat = i*ones(size(D_classes));
    logical_compare = (compare_mat == D_classes);
    for j=1:N
        ind = find(logical_compare(j,:), k);
        avg_ind(j,i) = mean(ind,2);
    end
end 

%normalize
scores = bsxfun(@rdivide, avg_ind, sum(avg_ind,2));

%lowest index = highest probability 
scores = ones(size(scores)) - scores; 
scores = bsxfun(@rdivide, scores, sum(scores, 2)); 


end
