function [Result, TestingProb, FeaturesTraining, FeaturesTesting] = Ddavid_MLkLNN(SampledTrueLabelTraining, TrueLabelTesting, K, TrainingKNNList, TestingKNNList)

% [Result, TestingProb] = Ddavid_MLkLNN(SampledTrueLabelTraining, TrueLabelTesting, K, TrainingKNNList, TestingKNNList)
%
% <Input>
% SampledTrueLabelTraining: [n*k], the value is {-1, 1}, the sampled labels
%                                  of the training data, k is the number of
%                                  labels
% TrueLabelTesting: [n'*k], the value is {-1, 1}, the real answer of labels
%                           of the testing data
% K: The K value of KLNN, K < n
% TrainingKNNList: [n*(n-1)], The KNN List between training points and
%                             other training points (for saving time)
% TestingKNNList: [n'*n], The KNN List between testing points and training
%                         points (for saving time)
%
% <Output>
% Result: [n'*k], the predicted rank of testing data
% TestingProb: [n'*k]

TrainingSize = size(SampledTrueLabelTraining, 1);
TestingSize = size(TrueLabelTesting, 1);
LabelSize = size(SampledTrueLabelTraining, 2);

TestingProb = ones(TestingSize, LabelSize);

s = 1; % Laplace smoothing

% Training
%%% Computing the TrainingLabeledSize: [1*LabelSize] for all labels and
%%% Prior prob

TrainingLabeledSize = Ddavid_get_training_labeled_size(SampledTrueLabelTraining);
PriorProb = (s + TrainingLabeledSize) / (2 * s + TrainingSize);

%%% Define features

FeatureNumber = 0;

FKNN = true;
if(FKNN == true)
    FKNNStart = FeatureNumber + 1;
    FKNNSize = 1;
    FeatureNumber = FeatureNumber + FKNNSize;
end
FLabeledRanks = false;
if(FLabeledRanks == true)
    FLabeledRanksStart = FeatureNumber + 1;
    FLabeledRanksSize = K;
    FeatureNumber = FeatureNumber + FLabeledRanksSize;
end

FeaturesRangeSize = zeros(FeatureNumber);

%%% Ranges of labeled ranks
if(FKNN == true)
    FeaturesRangeSize(FKNNStart:(FKNNStart + FKNNSize - 1)) = ones(1, FKNNSize) * (K + 1);
end
if(FLabeledRanks == true)
    FeaturesRangeSize(FLabeledRanksStart:(FLabeledRanksStart + FLabeledRanksSize - 1)) = ones(1, FLabeledRanksSize) * (TrainingSize + 1);
end

for LabelCounter = 1:LabelSize
    FeaturesTraining = zeros(TrainingLabeledSize(LabelCounter), FeatureNumber);
    FeaturesTesting = zeros(TestingSize, FeatureNumber);

    SampledTrueLabelTrainingOfSingleLabel = SampledTrueLabelTraining(:, LabelCounter);
    
    % Training
    
    %%% Filter the positive sorted training KNN list and transform it to
    %%% the labeled list
    
    FilteredTrainingKNNLabeledList = Ddavid_get_filtered_knn_labeled_list(SampledTrueLabelTrainingOfSingleLabel, TrainingKNNList);
    
    %%% Get feature: KNN
    
    if(FKNN == true)
        FeaturesTraining(:, FKNNStart:(FKNNStart + FKNNSize - 1)) = sum(FilteredTrainingKNNLabeledList(:, 1:K) == 1, 2);
    end
    
    %%% Get feature: labeled ranks

    if(FLabeledRanks == true)
        for SizeCounter = 1:TrainingLabeledSize(LabelCounter)
            TempList = find(FilteredTrainingKNNLabeledList(SizeCounter, :) == 1);
            TempListSize = TrainingLabeledSize(LabelCounter) - 1; % TempListSize is equal to (the number of instances with the label) - 1
            if(TempListSize > FLabeledRanksSize)
                FeaturesTraining(SizeCounter, FLabeledRanksStart:(FLabeledRanksStart + FLabeledRanksSize - 1)) = TempList(1:FLabeledRanksSize);
            else
                FeaturesTraining(SizeCounter, FLabeledRanksStart:(FLabeledRanksStart + FLabeledRanksSize - 1)) = [TempList ones(1, FLabeledRanksSize - TempListSize) * (TrainingSize + 1)];
            end
        end
    end
    
    % Testing

    TestingKNNLabeledList = Ddavid_get_knn_labeled_list(SampledTrueLabelTrainingOfSingleLabel, TestingKNNList);
    
    %%% Get feature: KNN
    
    if(FKNN == true)
        FeaturesTesting(:, FKNNStart:(FKNNStart + FKNNSize - 1)) = sum(TestingKNNLabeledList(:, 1:K) == 1, 2);
    end
    
    %%% Get labeled ranks
    
    if(FLabeledRanks == true)
        for SizeCounter = 1:TestingSize
            TempList = find(TestingKNNLabeledList(SizeCounter, :) == 1);
            TempListSize = size(TempList, 2);
            if(TempListSize > FLabeledRanksSize)
                FeaturesTesting(SizeCounter, FLabeledRanksStart:(FLabeledRanksStart + FLabeledRanksSize - 1)) = TempList(1:FLabeledRanksSize);
            else
                FeaturesTesting(SizeCounter, FLabeledRanksStart:(FLabeledRanksStart + FLabeledRanksSize - 1)) = [TempList ones(1, FLabeledRanksSize - TempListSize) * (TrainingSize + 1)];
            end
        end
    end
    
    %%% Computing the Prob: [TestingSize*LabelSize]
    
    for SizeCounter = 1:TestingSize
        TestingProb(SizeCounter, LabelCounter) = TestingProb(SizeCounter, LabelCounter) * PriorProb(LabelCounter);
        for FeatureCounter = 1:FeatureNumber
            Denominator = TrainingLabeledSize(LabelCounter) + FeaturesRangeSize(FeatureCounter) * s;
            Numerator = s;
            for FilteredCounter = 1:TrainingLabeledSize(LabelCounter)
                if FeaturesTesting(SizeCounter, FeatureCounter) == FeaturesTraining(FilteredCounter, FeatureCounter)
                    Numerator = Numerator + 1;
                end
            end
            TestingProb(SizeCounter, LabelCounter) = TestingProb(SizeCounter, LabelCounter) * Numerator / Denominator;
        end
    end
end

[~, Result] = sort(TestingProb, 2, 'descend');
