function [selected_lists] = F_Score(class_cell)
    train_wake_num  = 800;
    train_n1_num = 400;
    train_n2_num = 1000;
    train_n3_n4_num = 600;
    train_rem_num = 800;
    TOTAL_FEATURE_NUM = 38;
    
    train_number = [800 400 1000 600 800];
    %train_number = [400,1000];
%     wake_feature = wake_feature(randperm(length(wake_feature)), 1:1:TOTAL_FEATURE_NUM);
%     n1_feature = n1_feature(randperm(length(n1_feature)), 1:1:TOTAL_FEATURE_NUM);
%     n2_feature = n2_feature(randperm(length(n2_feature)), 1:1:TOTAL_FEATURE_NUM);
%     n3_n4_feature = n3_n4_feature(randperm(length(n3_n4_feature)), 1:1:TOTAL_FEATURE_NUM);
%     r_feature = r_feature(randperm(length(r_feature)), 1:1:TOTAL_FEATURE_NUM);
% 
%     wake_feature = wake_feature(1 : train_wake_num, :);
%     n1_feature = n1_feature(1 : train_n1_num, :);
%     n2_feature = n2_feature(1 : train_n2_num, :);
%     n3_n4_feature = n3_n4_feature(1 : train_n3_n4_num, :);
%     r_feature = r_feature(1 : train_rem_num, :);
    
    folds = 5;
%     c_g = [32768 * 2, 1.2207e-04; 32, 1.2207e-04; 8192, 1.2207e-04; ...
%       32768, 3.0518e-05; 2048, 4.8828e-04; 128, 1.2207e-04;...
%       512, 4.8828e-04; 32768, 3.0518e-06;32768, 3.0518e-05;...
%       512, 3.0518e-05
% ]
c_g = [32768, 0.0020;
      2, 0.0020;
      8192, 1.2207e-04;
      32768, 1.2207e-04;
      32, 0.1250;
      8, 0.0313;
      8192, 0.0020;
      8192, 0.0313;
      8, 0.0313;
      8, 0.1250 ]
    % number of class
    class_number = size(class_cell, 2);

    %feature_number
    feature_number = size(class_cell{1}, 2);

    %number of observations of each class
    data_size = [];
    for i = 1:1:class_number
        single_class_number = size(class_cell{i}, 1);
        data_size = [data_size single_class_number];
    end
    total_size = sum(data_size);
    
    %mean of each class
    data_mean = [];
    for i = 1:1:class_number
        single_class_mean = mean(class_cell{i});
        data_mean = [data_mean; single_class_mean];
    end 

    total_mean = mean(data_mean);

    % Center the data
    for i = 1:1:class_number
        center_data{i} = class_cell{i} - repmat(data_mean(i,:), data_size(i), 1);
    end 

    % Calculate the F_Score
    F_Score = [];
    for feature_index = 1 : 1 :feature_number
        S_W = 0;
        S_B = 0;
        for j = 1 : 1 : class_number
            for i = j + 1 : 1 : class_number
                S_B = S_B + (data_mean(j, feature_index) - data_mean(i, feature_index))* (data_mean(j, feature_index) - data_mean(i, feature_index))';
            end
            S_W = S_W + center_data{j}(:, feature_index)'* center_data{j}(:, feature_index)/size(center_data(j), 1);
        end 
        F_Score = [F_Score S_B / S_W]
    end 
%     fprintf('F_Score %6f', F_Score);

    [sorted_F_score, IJ] = sort(F_Score, 'descend')
    % choose F_Score

    best_accu_rate = 0;
    related_number = 0;
    selected_lists = [];
    backup_lists = [];
    for selected_number = 1 : 1 : feature_number
        backup_lists = [backup_lists selected_number];
        for i = 1 : 1 : class_number
            train_data{i} = class_cell{i}(:, IJ(backup_lists));
            train_lable{i} = i * ones(data_size(i), 1);
        end
        k = 0;
        total_acc = 0;
        
        for j = 1 : class_number - 1
            for i = class_number : -1 :j + 1
                k = k + 1;
                train_time = 5;
                acc = 0;
                for m = train_time : -1 : 1
                    random_data_i = train_data{i}(randperm(length(train_data{i})), :);
                    random_data_j = train_data{j}(randperm(length(train_data{j})), :);
                    train_i =random_data_i(1:train_number(i),:);
                    train_j =random_data_j(1:train_number(j),:);
                    model_train_data = [train_j; train_i];
                    label = [train_lable{j}(1:train_number(j));train_lable{i}(1:train_number(i))];    
%                     [cv_acc_best, best_C,best_gamma] = cross_validate(label, model_train_data)
                    acc = acc + svmtrain(label, model_train_data,sprintf('-s 0 -c %f -g %f -v %d', c_g(k,1), c_g(k,2), folds));
                end 
                cv_acc_best = acc / 10;
               total_acc = total_acc +  cv_acc_best / 100 * (length(train_data{j}) + length(train_data{i}))/ 2 / total_size;
            end
        end 
        selected_number
        total_acc
        if total_acc > best_accu_rate * 1.002
            best_accu_rate = total_acc
            related_number = selected_number;
            selected_lists = backup_lists
         elseif total_acc < best_accu_rate * 0.996
             backup_lists = selected_lists
        end
            
    end
    selected_lists = IJ(selected_lists);
end




