function [RecoveredLabel, ClassResult, Weight, CondInfo] = Ddavid_kNN_recover_binary(Data, Label, Option, KNNList)

% [RecoveredLabel, ClassResult, Weight, CondInfo] = Ddavid_kNN_recover_binary(Data, Label, Option, KNNList)
%
% <Input>
% Data: [n*l], the training data, where n is the number of instances, l is
%              the number of features
% Label: [n*1], the value is {0, 1}, the labels of all instances, where
%        0 means unknown and 1 means the instance has the label
% Option: structure, includes the options required of this algorithm
%   BaseClassifier: 'WkNN' or 'WeightedSVM'
%   E: double, the range is (0.5, 1), the threshold
%   C: double, the range is (0, 0.5), the updating range
%   T: the max number of iterations
%   CombineLabel: set to ture to combine the original labels
%   'WkNN' options:
%   K: the K value of KNN
%   FinalWeightedKNN: set to true to do the final weight KNN, or set to
%                     false to use the last weight as the result
%   WeightMethod: use different weight optimization
%                 1 - the method in the paper "Multi-View Learning from
%                     Imperfect Tagging"
%                 2 - the same as 1, but set the weights of unimportant
%                     points to 0
%   'WeightedSVM' options:
% 
% KNNList: The KNN List (for saving time)
%
% <Output>
% RecoveredLabel: [n*1], the value is {-1, 0, 1}, the recovered labels
%                 of all instances
% ClassResult: [n*1], the value is [-1, 1], the scores of all instances
% Weight: [n*1], the value is [-1. 1], the final weights of all instances
% CondInfo: struct, the information of the triggered times of each
%           condition
%   HighConfidence: [T*1]
%   UnimportantTrueLabel: [T*1]
%   Normal: [T*1]
%   ProcessedTable: [n*T]
%   ClassResultTable: [n*(T + 1)]

InitialLabel = Ddavid_kNN_recover_binary_initialize_negative_instances(Label);
Weight = InitialLabel;

N = size(InitialLabel, 1);
LastWeight = Weight;
CondInfo.HighConfidence = zeros(Option.T, 1);
CondInfo.UnimportantTrueLabel = zeros(Option.T, 1);
CondInfo.Normal = zeros(Option.T, 1);
CondInfo.ProcessedTable = zeros(N, Option.T);
CondInfo.ClassResultTable = zeros(N, Option.T + 1);
CondInfo.ClassResultTable(:, 1) = InitialLabel;

for IterNum = 1:Option.T
    disp(['Ddavid_kNN_recover_binary Iteration ' num2str(IterNum)]);
    
    switch(Option.BaseClassifier)
        case 'WkNN'
            ClassResult = Ddavid_weighted_kNN_binary_classification(Weight, InitialLabel, Option.K, KNNList);
        case 'WeightedSVM'
            CondInfo.C = 1.0;
            CondInfo.G = 1.0 / N;
            WeightP = abs(Weight);
            Ddavid_call_SVM_training_single_label(Data, Label, CondInfo.C, CondInfo.G, WeightP);
            [ClassResult] = Ddavid_call_SVM_testing_single_label(Data, Label);
            ClassResult = ClassResult * 2.0 - 1.0;
        otherwise
            ClassResult = Ddavid_weighted_kNN_binary_classification(Weight, InitialLabel, Option.K, KNNList);
    end
    CondInfo.ClassResultTable(:, IterNum + 1) = ClassResult;
    
    MaxR = max(ClassResult((ClassResult < 0) & (InitialLabel > 0)));
    MinR = min(ClassResult((ClassResult < 0) & (InitialLabel > 0)));
    
    for i = 1:N
        switch Option.WeightMethod
            case 1
                if(sign(ClassResult(i)) == sign(LastWeight(i)) && abs(ClassResult(i)) > Option.E && abs(LastWeight(i)) > Option.E)
                    Weight(i) = sign(ClassResult(i)) * 1.0;
                    CondInfo.ProcessedTable(i, IterNum) = 1;
                    CondInfo.HighConfidence(IterNum, 1) = CondInfo.HighConfidence(IterNum, 1) + 1;
                elseif(sign(ClassResult(i)) == -1 && Label(i) == 1)
                    if(MaxR > MinR)
                        Weight(i) = Option.C * (ClassResult(i) - MinR) / (MaxR - MinR);
                    else
                        Weight(i) = Option.C;
                    end
                    CondInfo.ProcessedTable(i, IterNum) = -1;
                    CondInfo.UnimportantTrueLabel(IterNum, 1) = CondInfo.UnimportantTrueLabel(IterNum, 1) + 1;
                else
                    Weight(i) = ClassResult(i);
                    CondInfo.Normal(IterNum, 1) = CondInfo.Normal(IterNum, 1) + 1;
                end
            case 2
                if(sign(ClassResult(i)) == sign(LastWeight(i)) && abs(ClassResult(i)) > Option.E && abs(LastWeight(i)) > Option.E)
                    Weight(i) = sign(ClassResult(i)) * 1.0;
                    CondInfo.ProcessedTable(i, IterNum) = 1;
                    CondInfo.HighConfidence(IterNum, 1) = CondInfo.HighConfidence(IterNum, 1) + 1;
                elseif(sign(ClassResult(i)) == -1 && Label(i) == 1)
                    if(MaxR > MinR)
                        Weight(i) = 0;
                    else
                        Weight(i) = 0;
                    end
                    CondInfo.ProcessedTable(i, IterNum) = -1;
                    CondInfo.UnimportantTrueLabel(IterNum, 1) = CondInfo.UnimportantTrueLabel(IterNum, 1) + 1;
                else
                    Weight(i) = ClassResult(i);
                    CondInfo.Normal(IterNum, 1) = CondInfo.Normal(IterNum, 1) + 1;
                end
        end        
        % Show debug CondInfo
        % if(CondInfo.ProcessedTable(i, IterNum) == 1)
        %     disp(['Iter ' num2str(IterNum) ' High Confidence Point ' num2str(i) ': ' num2str(CondInfo.ClassResultTable(i, IterNum)) ' -> ' num2str(CondInfo.ClassResultTable(i, IterNum + 1))]);
        % end
    end
    
    LastWeight = Weight;
end

if(Option.FinalWeightedKNN)
    switch(Option.BaseClassifier)
        case 'WkNN'
            ClassResult = Ddavid_weighted_kNN_binary_classification(Weight, InitialLabel, Option.K, KNNList);
        case 'WeightedSVM'
            CondInfo.C = 1.0;
            CondInfo.g = 1.0 / N;
            WeightP = abs(Weight);
            Ddavid_call_SVM_training_single_label(Data, Label, CondInfo.C, CondInfo.G, WeightP);
            [ClassResult] = Ddavid_call_SVM_testing_single_label(Data, Label);
            ClassResult = ClassResult * 2.0 - 1.0;
        otherwise
            ClassResult = Ddavid_weighted_kNN_binary_classification(Weight, InitialLabel, Option.K, KNNList);
    end
else
    ClassResult = Weight;
end
RecoveredLabel = sign(ClassResult);
if(Option.CombineLabel == true)
    RecoveredLabel(Label == 1) = 1;
end
