% Dataset sort by importance/size
DatasetList = { ...
    'Ddavid_yeast', ...
    };
% DatasetList = { ...
%     'Ddavid_CAL500_normal', ...
%     'Ddavid_yeast', ...
%     'Ddavid_emotions_normal', ...
%     'Ddavid_scene', ...
%     'Ddavid_mediamill_small', ...
%     'Ddavid_birds' ...
%     'Ddavid_NUS-WIDE-cVLADplus' ...
%     'Ddavid_EUR-Lex', ...
%     };
DatasetListSize = size(DatasetList, 2);

%%% Method list:
%%% MLkNNFullLabeled
%%% MLkNNSampledLabeledAndUnlabeled
%%% MLkNNSampledOnlyLabeled
%%% WkNNRVer3Max
%%% WkNNRVer3MinM_R
%%% WkNNRVer3MinM_S
%%% WkNNRVer3MinM_S2
%%% WeightedSVM
%%% MPUWithoutStacking
%%% MPU
MethodName = 'WeightedSVM';

ExperimentTimes = 1;

% Parameter for Ver1~
Recover_Option.E = 0.8;  %   E: double, the range is (0.5, 1), the threshold
Recover_Option.C = 0.2;  %   C: double, the range is (0, 0.5), the updating range
Recover_Option.T = 5;
Recover_Option.K = 10;
Recover_Option.IncludeInstanceItself = true;
Recover_Option.CombineLabel = false;
Recover_Option.FinalWeightedKNN = true;
MLkNN_K = 10;
%%% WeightMethod: use different weight optimization
%                 1 - the method in the paper "Multi-View Learning from
%                     Imperfect Tagging"
%                 2 - the same as 1, but set the weights of unimportant
%                     points to 0
Recover_Option.WeightMethod = 2;

% Parameters for ver2~
Recover_Option.MissingThreshold = 0.5;
Recover_Option.TopKMethod = 3;

% Parameters for ver3~
Recover_Option.MethodName = MethodName;
% Recover_Option.ValidRate = 0.2;

% Parameters for MPU
MPU_Option.IterN = 5;
MPU_Option.Alpha = 0.1;
MPU_Option.StackingLevel = 2;

% Experiment range
RootResultFolderName = 'D:\School\Meeting\Program\Matlab\Result\kNN_Recover_Ver3_4';
% UsedKList = [3, 11, 19, 27];
UsedKList = [19];
PercentLabeledDataList = [20];
PercentKeepingLabelsList = [25];
FoldNumberList = [1];
FoldNumber = size(FoldNumberList, 2);

% Debug Parameter Record
Parameters = 'WM2';

cd(RootResultFolderName);

for NameC = 1:DatasetListSize
    Dataset = DatasetList{NameC};
    
    disp(Dataset);
    
    % Different K for the recovering
    for l = UsedKList
        Recover_Option.K = l;
        NewFolderName = [Dataset '_K' int2str(Recover_Option.K)];
        mkdir(NewFolderName);
        cd(NewFolderName);

        % Percent of labeled data
        for i = PercentLabeledDataList

            % % Percent of keeping labels
            for j = PercentKeepingLabelsList
                Filename = [Dataset '_' int2str(i) '_' int2str(j)];

                % Initialize avg hamming scores of all folds
                FinalHamTrain = 0.0;
                FinalHamTest = 0.0;
                
                for k = FoldNumberList
                    FilenameWithFold = [Filename '_Fold' int2str(k)];
                    load(FilenameWithFold);
                    
                    %%% Build KNNLists
                    ExcludeInstanceItselfKNNList = FullKNNListTrainingToTraining(:, 1:(Recover_Option.K));

                    %%% Initialize avg hamming scores of single fold
                    AvgHamTrain = 0.0;
                    AvgHamTest = 0.0;
                    
                    %%% Recover and Test
                    for CurrentExperimentTimes = 1:ExperimentTimes
                        ResultTrain.HammingLoss = 0.0;
                        
                        switch(MethodName)
                            % MLkNN
                            case 'MLkNNFullLabeled'
                                %%% Full labeled training data MLkNN
                                if(l == UsedKList(1))
                                    [ResultTest] = Ddavid_MLkNN_caller(AllDataTraining, TrueLabelTraining, MLkNN_K, FullKNNListTrainingToTraining, AllDataTesting, TrueLabelTesting, FullKNNListTestingToTraining);
                                end
                            case 'MLkNNSampledLabeledAndUnlabeled'
                                %%% All Sampled training data MLkNN
                                if(l == UsedKList(1))
                                    [ResultTest] = Ddavid_MLkNN_caller(AllDataTraining, SampledTrueLabelTraining, MLkNN_K, FullKNNListTrainingToTraining, AllDataTesting, TrueLabelTesting, FullKNNListTestingToTraining);
                                end
                            case 'MLkNNSampledOnlyLabeled'
                                %%% Only Sampled training data MLkNN
                                if(l == UsedKList(1))
                                    ResultTrain.OriHammingLoss = 0.0;
                                    [ResultTest] = Ddavid_MLkNN_caller(SampledOnlyDataTraining, SampledOnlyTrueLabelTraining, MLkNN_K, FullKNNListSampTrainingToSampTraining, AllDataTesting, TrueLabelTesting, FullKNNListTestingToSampTraining);
                                end
                                
                            % WkNN Recover
                            case 'WkNNRVer3Max'
                                % ver 3 Recover training labels without II
                                [ResultTrain] = Ddavid_kNN_recover_multi_label_ver3(AllDataTraining, SampledTrueLabelTraining, Recover_Option, ExcludeInstanceItselfKNNList, MLkNNCTable, TrueLabelTraining);

                                % Ver 3 Max Test Stage 2 recovered training labels without II
                                [ResultTest] = Ddavid_MLkNN_caller(AllDataTraining, ResultTrain.CondInfo2.RecoveredLabelMax, MLkNN_K, FullKNNListTrainingToTraining, AllDataTesting, TrueLabelTesting, FullKNNListTestingToTraining);
                            case 'WkNNRVer3MinM_R'
                                % ver 3 Recover training labels without II
                                [ResultTrain] = Ddavid_kNN_recover_multi_label_ver3(AllDataTraining, SampledTrueLabelTraining, Recover_Option, ExcludeInstanceItselfKNNList, MLkNNCTable, TrueLabelTraining);

                                % Ver 3 Min Missing Rate and Recovered Rate Test Stage 2 recovered training labels without II
                                [ResultTest] = Ddavid_MLkNN_caller(AllDataTraining, ResultTrain.CondInfo2.RecoveredLabelMinM_R, MLkNN_K, FullKNNListTrainingToTraining, AllDataTesting, TrueLabelTesting, FullKNNListTestingToTraining);
                            case 'WkNNRVer3MinM_S'
                                % ver 3 Recover training labels without II
                                [ResultTrain] = Ddavid_kNN_recover_multi_label_ver3(AllDataTraining, SampledTrueLabelTraining, Recover_Option, ExcludeInstanceItselfKNNList, MLkNNCTable, TrueLabelTraining);

                                % Ver 3 Min Missing Rate and Sampled Rate Test Stage 2 recovered training labels without II
                                [ResultTest] = Ddavid_MLkNN_caller(AllDataTraining, ResultTrain.CondInfo2.RecoveredLabelMinM_S, MLkNN_K, FullKNNListTrainingToTraining, AllDataTesting, TrueLabelTesting, FullKNNListTestingToTraining);
                            case 'WkNNRVer3MinM_S2'
                                % ver 3 Recover training labels without II
                                [ResultTrain] = Ddavid_kNN_recover_multi_label_ver3(AllDataTraining, SampledTrueLabelTraining, Recover_Option, ExcludeInstanceItselfKNNList, MLkNNCTable, TrueLabelTraining);

                                % Ver 3 Min Missing Rate and Sampled Rate + recover more Test Stage 2 recovered training labels without II
                                [ResultTest] = Ddavid_MLkNN_caller(AllDataTraining, ResultTrain.CondInfo2.RecoveredLabelMinM_S2, MLkNN_K, FullKNNListTrainingToTraining, AllDataTesting, TrueLabelTesting, FullKNNListTestingToTraining);
                                
                            % WeightedSVM Recover
                            case 'WeightedSVM'
                                Recover_Option.BaseClassifier = 'WeightedSVM';
                                
                                % ver 3 Recover training labels without II
                                [ResultTrain] = Ddavid_kNN_recover_multi_label_ver3(AllDataTraining, SampledTrueLabelTraining, Recover_Option, ExcludeInstanceItselfKNNList, MLkNNCTable, TrueLabelTraining);
                                [ResultTest] = Ddavid_Weighted_SVM_caller(AllDataTraining, ResultTrain.RecoveredLabel, ResultTrain.C, ResultTrain.G, ones(size(SampledTrueLabelTraining, 1), size(SampledTrueLabelTraining, 2)), AllDataTesting, TrueLabelTesting);
                                
                            % MPU
                            case 'MPUWithoutStacking'
                                % MPU without Stacking
                                if(l == UsedKList(1))
                                    [ResultTest] = Ddavid_MPU(AllDataTraining, AllDataTesting, SampledTrueLabelTraining, TrueLabelTesting, MPU_Option, MLkNNCTable);
                                end
                                
                            otherwise
                        end
                        
                        AvgHamTrain = AvgHamTrain + ResultTrain.HammingLoss;
                        AvgHamTest = AvgHamTest + ResultTest.HammingLoss;
                    
                        %%% Save results
                        save([FilenameWithFold '_' MethodName '_' Parameters '_Results_T' int2str(CurrentExperimentTimes)], ...
                            'ResultTrain', ...
                            'ResultTest' ...
                            );
                    end

                    %%% Update avg hamming scores of single fold
                    AvgHamTrain = AvgHamTrain / ExperimentTimes;
                    AvgHamTest = AvgHamTest / ExperimentTimes;
                    
                    % Update avg hamming scores of all folds
                    FinalHamTrain = FinalHamTrain + AvgHamTrain;
                    FinalHamTest = FinalHamTest + AvgHamTest;
                end

                % Update avg hamming scores of all folds
                FinalHamTrain = FinalHamTrain / FoldNumber;
                FinalHamTest = FinalHamTest / FoldNumber;

                % Save final results
                fid = fopen([Filename '_' MethodName '_' Parameters '_Final_Result.txt'], 'w');
                
                fprintf(fid, '[Training]\n');
                fprintf(fid, '%s\n', ...
                    MethodName ...
                    );
                fprintf(fid, '%.4f\n' , ...
                    FinalHamTrain ...
                    );
                fprintf(fid, '\n');
                fprintf(fid, '[Testing]\n');
                fprintf(fid, '%s\n', ...
                    MethodName ...
                    );
                fprintf(fid, '%.4f\n' , ...
                    FinalHamTest ...
                    );
                
                fclose(fid);

            end
        end

        cd('..');
    end
end

sound(sin(2 * pi * 25 * (1:1000) / 400));
