warning off %#ok<WNOFF>
clear;clc

addpath(genpath('.'));
starttime = datestr(now,0);

load 'data\birds.mat';
if exist('train_targets','var')==1&&exist('test_targets','var')==1
    test_target=test_targets';
    train_target=train_targets';
    clear train_targets test_targets
end 

train_target(train_target==-1)=0;
test_target(test_target==-1)=0;
%% Optimization Parameters
optmParameter.alpha   = 2^-4 ;  
optmParameter.beta    = 2^-3 ; 
optmParameter.gamma   = 1.0;  
optmParameter.theta   = 2^-5;

optmParameter.maxIter           = 10;
optmParameter.maxIter2          = 100;
optmParameter.epsilon           = 10^-3;
optmParameter.minimumLossMargin = 10^-5;

optmParameter.maxrho  = 10^10;
optmParameter.rho     = 10^-6;
optmParameter.p       = 1.1;

optmParameter.searchPara = 1; % indicate whether tuning the parameters, {0:not,1:yes}
optmParameter.tuneParaOneTime = 1; % indicate that tuning the parameter one time or tuning it in each fold. {0: each fold,1: only one time}

% for large scale dataset, search ranges for alpha and beta should be set to large values,
optmParameter.alpha_searchrange = 2.^[-6:-1]; 
optmParameter.beta_searchrange  = 2.^[-5:-1];
optmParameter.gamma_searchrange = 1.^[0];
% optmParameter.theta_searchrange = 10.^[-4,-6];

optmParameter.bQuiet            = 1;

%% Model Parameters
modelparameter.crossvalidation    = 1; % {0,1}
modelparameter.cv_num             = 10;
modelparameter.L2Norm             = 1; % {0,1}
modelparameter.deleteData         = 1; % {0,1}

%% Train and Test
if modelparameter.crossvalidation==0 
else
%% cross validation
    if exist('train_data','var')==1
        data=[train_data;test_data];
        target=[train_target,test_target];
        clear train_data test_data train_target test_target
    end
    data     = double(data);
    num_data = size(data,1);
    if modelparameter.L2Norm == 1
            temp_data = data;
            temp_data = temp_data./repmat(sqrt(sum(temp_data.^2,2)),1,size(temp_data,2));
            if sum(sum(isnan(temp_data)))>0
                temp_data = data+eps;
                temp_data = temp_data./repmat(sqrt(sum(temp_data.^2,2)),1,size(temp_data,2));
            end
    else
        temp_data = data;
    end
    if modelparameter.deleteData
        clear data
    end
    
    randorder = randperm(num_data);
    Result_zwj  = zeros(15,modelparameter.cv_num);

    for j = 1:modelparameter.cv_num
        fprintf('\n Running Fold - %d/%d \n',j,modelparameter.cv_num);

       %% the training and test parts are generated by fixed spliting with the given random order
        [cv_train_data,cv_train_target,cv_test_data,cv_test_target ] = generateCVSet( temp_data,target',randorder,j,modelparameter.cv_num );
        cv_train_target=cv_train_target';
        cv_test_target=cv_test_target';

       %% Tune the parametes
        if optmParameter.searchPara == 1
            if (optmParameter.tuneParaOneTime == 1) && (exist('BestResult','var')==0)
                fprintf('\n-  parameterization for JLCLS by cross validation on the training data  -');
                [optmParameter, BestResult ] = JLCLS_adaptive_validate( cv_train_data, cv_train_target, optmParameter);
            elseif (optmParameter.tuneParaOneTime == 0)
                fprintf('\n-  parameterization for JLCLS by cross validation on the training data  -');
                [optmParameter, BestResult ] = JLCLS_adaptive_validate( cv_train_data, cv_train_target, optmParameter);
            end
        end
        
       %% If we don't search the parameters, we will run JLCLS with the fixed parametrs
        [model_2] = JLCLS(cv_train_data, cv_train_target', optmParameter);
        train_slfeature      = cv_train_data*model_2.W;
        training_instance_matrix = train_slfeature ;
        
        test_slfeature       = cv_test_data*model_2.W;
        testing_instance_matrix=test_slfeature;
        
              %%  SVM 
        [num_labels,num_train]=size(cv_train_target);
        svm.type='Linear';
        svm.para=[];
        switch svm.type
            case 'RBF'
                gamma=num2str(svm.para);
                str=['-t 2 -g ',gamma,' -b 1'];
            case 'Poly'
                gamma=num2str(svm.para(1));
                coef=num2str(svm.para(2));
                degree=num2str(svm.para(3));
                str=['-t 1 ','-g ',gamma,' -r ', coef,' -d ',degree,' -b 1'];
            case 'Linear'
                str='-t 0 -b 1';
            otherwise
                 error('SVM types not supported, please type "help LIFT" for more information');
        end

         num_test=size(cv_test_target,2);
         Pre_Labels=[];
         Outputs=[];

         for i=1:num_labels

            training_label_vector=cv_train_target(i,:)';

            Models{i,1}=svmtrain(training_label_vector,training_instance_matrix,str);     
            
            testing_label_vector=cv_test_target(i,:)';
        
            [predicted_label,accuracy,prob_estimates]=svmpredict(testing_label_vector,testing_instance_matrix,Models{i,1},'-b 1');
            
            if(isempty(predicted_label))
                predicted_label=cv_train_target(i,1)*ones(num_test,1);
                if(cv_train_target(i,1)==1)
                    Prob_pos=ones(num_test,1);
                else
                    Prob_pos=zeros(num_test,1);
                end
                Outputs=[Outputs;Prob_pos'];
                Pre_Labels=[Pre_Labels;predicted_label'];
            else
                pos_index=find(Models{i,1}.Label==1);
                Prob_pos=prob_estimates(:,pos_index);
                Outputs=[Outputs;Prob_pos'];
                Pre_Labels=[Pre_Labels;predicted_label'];
            end
         end      
       %% evaluation of JLCLS
        Result_JLCLS(:,j) = EvaluationAll(Pre_Labels,Outputs,cv_test_target);

    end

   %% the average results of JLCLS
    Avg_Result = zeros(15,2);
    Avg_Result(:,1)=mean(Result_JLCLS,2);
    Avg_Result(:,2)=std(Result_JLCLS,1,2);
    fprintf('\nResults of JLCLS\n');
    PrintResults(Avg_Result);

end
endtime = datestr(now,0);