function [RecoveredResult] = Ddavid_kNN_recover_multi_label_ver3(AllDataTraining, SampledLabel, Option, KNNList, CTable, TrueLabel)

% [RecoveredResult] = Ddavid_kNN_recover_multi_label_ver3(AllDataTraining, SampledLabel, Option, KNNList, CTable, TrueLabel)
%
% <Input>
% AllDataTraining: [n*l], the training data, where n is the number of
%                  instances, l is the number of features
% SampledLabel: [n*m], the value is {-1, 1}, the sampled labels of all
%               instances, where n is the number of instances, m is the
%               number of labels, -1 means unknown and 1 means the
%               instance has the label
% Option: structure, includes the options required of this algorithm
%   BaseClassifier: 'WkNN' or 'WeightedSVM'
%   E: double, the range is (0.5, 1), the threshold
%   C: double, the range is (0, 0.5), the updating range
%   T: the max number of iterations
%   K: the K value of KNN
%   CombineLabel: set to ture to combine the original labels
%   MissingThreshold: the missing threshold used in stage 2
%   MethodName: the final result use which top-k values in stage 2
%               WkNNRVer3Max
%               WkNNRVer3MinM_R
%               WkNNRVer3MinM_S
%               WkNNRVer3MinM_S2
%   ValidRate: the size rate of the validation set for calculating the
%              sampling rate constant
% KNNList: The KNN List (for saving time)
% CTable: [1*m], The Sampling Rate Table
% TrueLabel: [n*m], the value is {-1, 0, 1}, the true labels of all
%            instancess
%
% <Output>
% RecoveredResult: structure
%   RecoveredLabel: [n*m], the value is {-1, 0, 1}, all recovered labels
%                   of all instances
%   kNNResult: [n*m], the value is [-1, 1], the scores of all instances
%   Weight: [n*m], the value is [-1. 1], the final weights of all instances
%   OriHammingLoss: double, the hamming loss of the SampledLabel
%   HammingLoss: double, the hamming loss of the RecoveredLabel
%   CondInfo1: struct, the information of the triggered times of each
%             condition in the 1st stage
%     HighConfidence: [T*m]
%     UnimportantTrueLabel: [T*m]
%     Normal: [T*m]
%     ProcessedTable: [n*T*m]
%     ClassResultTable: [n*(T+1)*m]
%     ConfusionMatrix: [4*m]
%     HammingLoss: double

N = size(SampledLabel, 1);
M = size(SampledLabel, 2);
RecoveredResult.RecoveredLabel = zeros(N, M);

RecoveredResult.SingleLabelHammingLoss = zeros(1, M);
RecoveredResult.OriSingleLabelHammingLoss = zeros(1, M);
RecoveredResult.kNNResult = zeros(N, M);
RecoveredResult.Weight = zeros(N, M);
RecoveredResult.CondInfo1.HighConfidence = zeros(Option.T, M);
RecoveredResult.CondInfo1.UnimportantTrueLabel = zeros(Option.T, M);
RecoveredResult.CondInfo1.Normal = zeros(Option.T, M);
RecoveredResult.CondInfo1.ProcessedTable = zeros(N, Option.T, M);
RecoveredResult.CondInfo1.ClassResultTable = zeros(N, Option.T + 1, M);
if(Option.BaseClassifier == 'WeightedSVM')
    RecoveredResult.C = zeros(1, M);
    RecoveredResult.G = zeros(1, M);
end


% 1st stage: basic recovering
for i = 1:M
    Label = SampledLabel(:, i);
    Label(Label == -1) = 0;

    [RecoveredResult.RecoveredLabel(:, i), RecoveredResult.kNNResult(:, i), RecoveredResult.Weight(:, i), CondInfo] = Ddavid_kNN_recover_binary(AllDataTraining, Label, Option, KNNList);
   
    RecoveredResult.CondInfo1.HighConfidence(:, i) = CondInfo.HighConfidence;
    RecoveredResult.CondInfo1.UnimportantTrueLabel(:, i) = CondInfo.UnimportantTrueLabel;
    RecoveredResult.CondInfo1.Normal(:, i) = CondInfo.Normal;
    RecoveredResult.CondInfo1.ProcessedTable(:, :, i) = CondInfo.ProcessedTable;
    RecoveredResult.CondInfo1.ClassResultTable(:, :, i) = CondInfo.ClassResultTable;
    
    if(Option.BaseClassifier == 'WeightedSVM')
        RecoveredResult.C(1, i) = CondInfo.C;
        RecoveredResult.G(1, i) = CondInfo.G;
    end
    
    RecoveredResult.RecoveredLabel(RecoveredResult.RecoveredLabel(:, i) > 0, i) = 1;
    RecoveredResult.RecoveredLabel(RecoveredResult.RecoveredLabel(:, i) <= 0, i) = -1;
    RecoveredResult.SingleLabelHammingLoss(i) = Hamming_loss(RecoveredResult.RecoveredLabel(:, i)', TrueLabel(:, i)');
    RecoveredResult.OriSingleLabelHammingLoss(i) = Hamming_loss(SampledLabel(:, i)', TrueLabel(:, i)');
end
RecoveredResult.CondInfo1.RecoveredLabel = RecoveredResult.RecoveredLabel;

% Calculate the size, missing and recovery rate of each label
RecoveredResult.CondInfo1.SampledLabelSize = sum(SampledLabel == 1, 1);
RecoveredResult.CondInfo1.PreLabelSize = sum(RecoveredResult.CondInfo1.RecoveredLabel == 1, 1);
RecoveredResult.CondInfo1.Missing = sum((SampledLabel == 1) & (RecoveredResult.CondInfo1.RecoveredLabel == -1), 1);

% Output information of stage 1
RecoveredResult.CondInfo1.HammingLoss = Hamming_loss(RecoveredResult.CondInfo1.RecoveredLabel', TrueLabel');

% 2nd stage: improve the result according to missing
RecoveredResult.CondInfo2.RecoveryRate = zeros(1, M);
RecoveredResult.CondInfo2.AvgRecoveryRate = 0.0;
RecoveredLabelNumber = 0;

for i = 1:M
    % All missing (includes recovering nothing)
    if(RecoveredResult.CondInfo1.Missing(i) == RecoveredResult.CondInfo1.SampledLabelSize(i))
        % Use only sampled training labels
        RecoveredResult.RecoveredLabel(:, i) = SampledLabel(:, i);
    else
        % Calculate the recovery rate of each recovered label
        if(RecoveredResult.CondInfo1.SampledLabelSize(i) ~= 0 && RecoveredResult.CondInfo1.PreLabelSize(i) ~= 0)
            RecoveredResult.CondInfo2.RecoveryRate(i) = RecoveredResult.CondInfo1.PreLabelSize(i) / RecoveredResult.CondInfo1.SampledLabelSize(i);
            RecoveredResult.CondInfo2.AvgRecoveryRate = RecoveredResult.CondInfo2.AvgRecoveryRate + RecoveredResult.CondInfo2.RecoveryRate(i);
            RecoveredLabelNumber = RecoveredLabelNumber + 1;
        end
    end
end

% Calculate the average recovery rate of recovered labels
if(RecoveredLabelNumber ~= 0)
    RecoveredResult.CondInfo2.AvgRecoveryRate = RecoveredResult.CondInfo2.AvgRecoveryRate / RecoveredLabelNumber;
end

% Calculate the sampling rate constant C
% RecoveredResult.CondInfo2.C = zeros(1, M);
% for i = 1:M
%     RecoveredResult.CondInfo2.C(1, i) = Ddavid_get_sampling_rate_C_by_MLkNN(AllDataTraining, SampledLabel(:, i), Option.ValidRate);
% end
RecoveredResult.CondInfo2.C = CTable;

RecoveredResult.CondInfo2.RecoveredLabelMax = RecoveredResult.RecoveredLabel;
RecoveredResult.CondInfo2.RecoveredLabelMinM_R = RecoveredResult.RecoveredLabel;
RecoveredResult.CondInfo2.RecoveredLabelMinM_S = RecoveredResult.RecoveredLabel;
RecoveredResult.CondInfo2.RecoveredLabelMinM_S2 = RecoveredResult.RecoveredLabel;

RecoveredResult.CondInfo2.TopKMissing = zeros(1, M);
RecoveredResult.CondInfo2.TopKRecoveryRate = zeros(1, M);
RecoveredResult.CondInfo2.TopKSampledRate = zeros(1, M);

% The debug information for finding the best top-k by sorting the result
RecoveredResult.CondInfo1.TrueLabelSize = sum(TrueLabel == 1, 1);
SortedResultTable = zeros(N, M);

RecallTable = zeros(N, M);
PrecisionTable = zeros(N, M);
RecallSTable = zeros(N, M);
PrecisionSTable = zeros(N, M);
RecoveredResult.CondInfo2.PrecisionRate = zeros(1, M);
RecoveredResult.CondInfo2.RecallRate = zeros(1, M);

for i = 1:M
    [SortList, SortIndex] = sort(RecoveredResult.kNNResult(:, i), 'descend');
    
    SortedResultTable(:, i) = TrueLabel(SortIndex, i);
    
    TPCounter = 0;
    TPSCounter = 0;
    for j = 1:N
        if(SampledLabel(SortIndex(j), i) == 1 && RecoveredResult.kNNResult(SortIndex(j), i) > 0.0)
            TPSCounter = TPSCounter + 1;
        end
        if(TrueLabel(SortIndex(j), i) == 1 && RecoveredResult.kNNResult(SortIndex(j), i) > 0.0)
            TPCounter = TPCounter + 1;
        end
        RecallTable(j, i) = TPCounter / RecoveredResult.CondInfo1.TrueLabelSize(i);
        PrecisionTable(j, i) = TPCounter / j;
        RecallSTable(j, i) = TPSCounter / RecoveredResult.CondInfo1.SampledLabelSize(i);
        PrecisionSTable(j, i) = TPSCounter / j;
    end
    RecoveredResult.CondInfo2.PrecisionRate(i) = PrecisionSTable(N, i) / PrecisionTable(N, i);
    RecoveredResult.CondInfo2.RecallRate(i) = RecallTable(N, i) / RecallSTable(N, i);
    
    plot(1:N, RecallTable(:, i), 'g', ...
        1:N, PrecisionTable(:, i), 'r', ...
        1:N, RecallSTable(:, i), 'c', ...
        1:N, PrecisionSTable(:, i), 'b');
    title({'x Green  Recall_T    = TP_T / # of Total True Labels   '; ...
        'x Red    Precision_T = TP_T / K                        '; ...
        'o Cyan   Recall_S    = TP_S / # of Total Sampled Labels'; ...
        'o Blue   Precision_S = TP_S / K                        '}, ...
        'fontname', 'Courier New');
    xlabel('Top-K Instances Sorted by Probability');
    saveas(gcf, ['Feature' num2str(i)], 'png');
end

for i = 1:M
    % Calculate TopKs
    MissingRate = RecoveredResult.CondInfo1.Missing(i) / RecoveredResult.CondInfo1.SampledLabelSize(i);
    RecoveredResult.CondInfo2.TopKMissing(i) = ceil(RecoveredResult.CondInfo1.PreLabelSize(i) * (1 - MissingRate));
    RecoveredResult.CondInfo2.TopKRecoveryRate(i) = ceil(RecoveredResult.CondInfo1.SampledLabelSize(i) * RecoveredResult.CondInfo2.AvgRecoveryRate);
    RecoveredResult.CondInfo2.TopKSampledRate(i) = ceil(RecoveredResult.CondInfo1.SampledLabelSize(i) * RecoveredResult.CondInfo2.C(1, i));

    % WkNNRVer3Max: Use max of TopKMissing and TopKRecoveryRate
    if((RecoveredResult.CondInfo1.Missing(i) < RecoveredResult.CondInfo1.SampledLabelSize(i)) && ...
            (RecoveredResult.CondInfo1.Missing(i) > RecoveredResult.CondInfo1.SampledLabelSize(i) * Option.MissingThreshold))
        % High missing but not total missing
        if(RecoveredResult.CondInfo2.RecoveryRate(i) > RecoveredResult.CondInfo2.AvgRecoveryRate)
            % High missing and high recovery rate -> recover too much
            TopKMax = max(RecoveredResult.CondInfo2.TopKMissing(i), RecoveredResult.CondInfo2.TopKRecoveryRate(i));
            [~, SortIndex] = sort(RecoveredResult.kNNResult(:, i), 'descend');
            RecoveredResult.CondInfo2.RecoveredLabelMax(:, i) = -1;
            RecoveredResult.CondInfo2.RecoveredLabelMax(SortIndex(1:TopKMax), i) = 1;
        end
        
        % Combine the sampled training labels
        RecoveredResult.CondInfo2.RecoveredLabelMax(SampledLabel(:, i) == 1, i) = 1;
    end
    
    % WkNNRVer3MinM_R: Use min of TopKMissing and TopKRecoveryRate
    if((RecoveredResult.CondInfo1.Missing(i) < RecoveredResult.CondInfo1.SampledLabelSize(i)) && ...
            (RecoveredResult.CondInfo1.Missing(i) > RecoveredResult.CondInfo1.SampledLabelSize(i) * Option.MissingThreshold))
        % High missing but not total missing
        if(RecoveredResult.CondInfo2.RecoveryRate(i) > RecoveredResult.CondInfo2.AvgRecoveryRate)
            % High missing and high recovery rate -> recover too much
            TopKMinM_R = min(RecoveredResult.CondInfo2.TopKMissing(i), RecoveredResult.CondInfo2.TopKRecoveryRate(i));
            [~, SortIndex] = sort(RecoveredResult.kNNResult(:, i), 'descend');
            RecoveredResult.CondInfo2.RecoveredLabelMinM_R(:, i) = -1;
            RecoveredResult.CondInfo2.RecoveredLabelMinM_R(SortIndex(1:TopKMinM_R), i) = 1;
        end
        
        % Combine the sampled training labels
        RecoveredResult.CondInfo2.RecoveredLabelMinM_R(SampledLabel(:, i) == 1, i) = 1;
    end
    
    % WkNNRVer3MinM_S: Use min of TopKMissing and TopKSsmpledRate
    if((RecoveredResult.CondInfo1.Missing(i) < RecoveredResult.CondInfo1.SampledLabelSize(i)) && ...
            (RecoveredResult.CondInfo1.Missing(i) > RecoveredResult.CondInfo1.SampledLabelSize(i) * Option.MissingThreshold))
        % High missing but not total missing
        if(RecoveredResult.CondInfo2.C(1, i) ~= 0 && RecoveredResult.CondInfo2.RecoveryRate(i) > RecoveredResult.CondInfo2.C(1, i))
            % High missing and high recovery rate -> recover too much
            TopKMinM_S = min(RecoveredResult.CondInfo2.TopKMissing(i), RecoveredResult.CondInfo2.TopKSampledRate(i));
            [~, SortIndex] = sort(RecoveredResult.kNNResult(:, i), 'descend');
            RecoveredResult.CondInfo2.RecoveredLabelMinM_S(:, i) = -1;
            RecoveredResult.CondInfo2.RecoveredLabelMinM_S(SortIndex(1:TopKMinM_S), i) = 1;
        end
        
        % Combine the sampled training labels
        RecoveredResult.CondInfo2.RecoveredLabelMinM_S(SampledLabel(:, i) == 1, i) = 1;
    end
    
    % WkNNRVer3MinM_S2: Use min of TopKMissing and TopKSsmpledRate, and
    % recover more when the missing rate and recovery rate are low
    RecoveredResult.CondInfo2.RecoveredLabelMinM_S2 = RecoveredResult.CondInfo2.RecoveredLabelMinM_S;
    if(RecoveredResult.CondInfo1.Missing(i) < RecoveredResult.CondInfo1.SampledLabelSize(i) * Option.MissingThreshold)
        % Low missing
        if(RecoveredResult.CondInfo2.C(1, i) ~= 0 && RecoveredResult.CondInfo2.RecoveryRate(i) > RecoveredResult.CondInfo2.C(1, i))
            % Low recovery rate -> recover too less
            TopKMinM_S = RecoveredResult.CondInfo2.TopKSampledRate(i);
            [~, SortIndex] = sort(RecoveredResult.kNNResult(:, i), 'descend');
            RecoveredResult.CondInfo2.RecoveredLabelMinM_S(:, i) = -1;
            RecoveredResult.CondInfo2.RecoveredLabelMinM_S(SortIndex(1:TopKMinM_S), i) = 1;
        end
        
        % Combine the sampled training labels
        RecoveredResult.CondInfo2.RecoveredLabelMinM_S(SampledLabel(:, i) == 1, i) = 1;
    end
end

switch(Option.MethodName)
    case 'WkNNRVer3Max'
        RecoveredResult.RecoveredLabel = RecoveredResult.CondInfo2.RecoveredLabelMax;
    case 'WkNNRVer3MinM_R'
        RecoveredResult.RecoveredLabel = RecoveredResult.CondInfo2.RecoveredLabelMinM_R;
    case 'WkNNRVer3MinM_S'
        RecoveredResult.RecoveredLabel = RecoveredResult.CondInfo2.RecoveredLabelMinM_S;
    case 'WkNNRVer3MinM_S2'
        RecoveredResult.RecoveredLabel = RecoveredResult.CondInfo2.RecoveredLabelMinM_S2;
    otherwise
        RecoveredResult.RecoveredLabel = RecoveredResult.CondInfo2.RecoveredLabelMinM_S;
end

% Output information of stage 2
RecoveredResult.CondInfo2.HammingLossMax = Hamming_loss(RecoveredResult.CondInfo2.RecoveredLabelMax', TrueLabel');
RecoveredResult.CondInfo2.HammingLossMinM_R = Hamming_loss(RecoveredResult.CondInfo2.RecoveredLabelMinM_R', TrueLabel');
RecoveredResult.CondInfo2.HammingLossMinM_S = Hamming_loss(RecoveredResult.CondInfo2.RecoveredLabelMinM_S', TrueLabel');
RecoveredResult.CondInfo2.HammingLossMinM_S = Hamming_loss(RecoveredResult.CondInfo2.RecoveredLabelMinM_S2', TrueLabel');

% final Output information
RecoveredResult.OriHammingLoss = Hamming_loss(SampledLabel', TrueLabel');
RecoveredResult.HammingLoss = Hamming_loss(RecoveredResult.RecoveredLabel', TrueLabel');
[RecoveredResult.Micro, RecoveredResult.Macro] = Ddavid_get_micro_macro_prf(TrueLabel, RecoveredResult.RecoveredLabel);
