function results = ensemble(settings)
% -------------------------------------------------------------------------
% Ensemble Classification | June 2011 | public version 1.0
% -------------------------------------------------------------------------
% Contact: jan@kodovsky.com
% -------------------------------------------------------------------------
% References:
% [1] - J. Kodovsky, J. Fridrich, and V. Holub. Ensemble classifiers for
% steganalysis of digital media. IEEE Transactions on Information Forensics
% and Security. Currently under review.
% -------------------------------------------------------------------------
% settings
%   .cover - cover feature file(s); a string or a cell array (example_4.m)
%   .stego - stego feature file(s); a string or a cell array (example_4.m)
%   .seed_trntst - PRNG seed for training/testing set division
%   .seed_subspaces (default = random) - PRNG seed for random subspace
%         generation 
%   .seed_bootstrap (default = random) - PRNG seed for bootstrap samples
%         generation 
%   .ratio (default = 0.5) - relative number of training images
%   .d_sub (default = 'automatic') - random subspace dimensionality; either
%         an integer (e.g. 200) or the string 'automatic' is accepted; in
%         the latter case, an automatic search for the optimal subspace
%         dimensionality is performed, see [1] for more details
%   .L (default = 'automatic') - number of random subspaces / base
%         learners; either an integer (e.g. 50) or the string 'automatic'
%         is accepted; in the latter case, an automatic stopping criterion
%         is used, see [1] for more details
%   .output (default = './output/date_x.log') - log file where both the
%         progress and the results of the classification are stored
%   .bootstrap (default = 1) - turn on/off bootstrap sampling of the
%         training set for training of individual base learners; this
%         option will be automatically turned on when either search for
%         d_sub or an automatic stopping criterion for L is to be performed
%         as bootstrapping is needed for out-of-bag (OOB) estimates these
%         techniques are based on; see [1] for more details
%    .verbose (default = 1) - turn on/off screen output
%    .keep_cov (default = 0) - a memory demanding speed-up of the search
%         for d_sub; by default turned off; turn on only when the search
%         for d_sub is to be performed, and only if your system has enough
%         memory; otherwise turn this option off
%    .ignore_warnings (default = 1) - ignore 'MATLAB:nearlySingularMatrix'
%         warning during the FLD training => speed-up; ignoring these
%         warnings had no effect on performance in our experiments; if the
%         value is set to 0, warnings will not be ignored; in that case,
%         the diagonal of the ill-conditioned covariance matrix will be
%         iteratively weighted with increasing weights until the matrix is
%         well conditioned (see the code for details)
%
% Parameters for the search for d_sub (when .d_sub = 'automatic'):
%
%    .k_step (default = 200) - initial step for d_sub when searching from
%         left (stage 1 of Algorithm 2 in [1])
%    .Eoob_tolerance (default = 0.02) - the relative tolerance for the
%         minimality of OOB within the search, i.e. specifies the stopping
%         criterion for the stage 2 in Algorithm 2
%
% Both default parameters work well for most of the steganalysis scenarios.
%
% Parameters for automatic stopping criterion for L (when .L ='automatic');
% see [1] for more details:
%
%    .L_kernel (default = ones(1,5)/5) - over how many values of OOB
%         estimates is the moving average taken over
%    .L_min_length (default = 25) - the minimum number of random subspaces
%         that will be generated
%    .L_memory (default = 50) - how many last OOB estimates need to stay in
%         the epsilon tube
%    .L_epsilon (default = 0.005) - specification of the epsilon tube
%
% According to our experiments, these values are sufficient for most of the
% steganalysis tasks (different algorithms and features). Nevertheless, any
% of these parameters can be modified before calling the ensemble if
% desired.
% -------------------------------------------------------------------------

% check settings, set default values, initial screen print
settings = check_initial_setup(settings);
% pre-generate seeds for random subspaces and bootstrap samples
PRNG = generate_seeds(settings);
% create training set
[Xc,Xs,settings] = create_training(settings);
% initialization of the search for k (if requested)
[SEARCH,settings] = initialize_search(settings);
[search_counter,results,MIN_OOB,OOB.error] = deal(0,[],1,1);
% create structures for caching covariance matrices
sigCstored.k = zeros(settings.max_number_base_learners,1,'uint16');
sigCstored.sig = cell(settings.max_number_base_learners,1);
sigSstored.k = zeros(settings.max_number_base_learners,1,'uint16');
sigSstored.sig = cell(settings.max_number_base_learners,1);

if settings.verbose, fprintf('Full dimensionality = %i\n',settings.max_dim); end

% search loop (if search for k is to be executed)
while SEARCH.in_progress
    search_counter = search_counter+1;

    % initialization
    [SEARCH.start_time_current_k,i,next_random_subspace,TXT,base_learner] = deal(tic,0,1,'',cell(settings.max_number_base_learners,1));

    % loop over individual base learners
    while next_random_subspace
        i = i+1;

        %%% RANDOM SUBSPACE GENERATION
        rand('state',double(PRNG.subspaces(i)));
        base_learner{i}.subspace = randperm(settings.max_dim);
        subspace = base_learner{i}.subspace(1:settings.k);

        %%% BOOTSTRAP INITIALIZATION
        OOB = bootstrap_initialization(PRNG,Xc,Xs,OOB,i,settings);

        %%% TRAINING PHASE
        base_learner{i} = FLD_training(Xc,Xs,i,base_learner{i},OOB,subspace,settings,sigCstored,sigSstored);

        %%% OOB ERROR ESTIMATION
        OOB = update_oob_error_estimates(Xc,Xs,base_learner{i},OOB,i,subspace,settings);

        [next_random_subspace,MSG] = getFlag_nextRandomSubspace(i,OOB,settings);

        % SCREEN OUTPUT
        CT = double(toc(SEARCH.start_time_current_k));
        if settings.bootstrap
            TXT = updateTXT(TXT,sprintf(' - d_sub %s : OOB %.4f : L %i : T %.1f sec%s',k_to_string(settings.k),OOB.error,i,CT,MSG),settings);
        else
            TXT = updateTXT(TXT,sprintf(' - d_sub %s : L %i : T %.1f sec%s',k_to_string(settings.k),i,CT,MSG),settings);
        end

    end % while next_random_subspace

    results.search.k(search_counter) = settings.k;
    if settings.verbose, 
        fprintf('\n'); 
    end

    if OOB.error<MIN_OOB || ~settings.bootstrap
        % found the best value of k so far
        FINAL_BASE_LEARNER = base_learner;
        [MIN_OOB,OPTIMAL_K,OPTIMAL_L] = deal(OOB.error,settings.k,i);
    end

    [settings,SEARCH] = update_search(settings,SEARCH,OOB.error);
    results = add_search_info(results,settings,search_counter,SEARCH,i,CT);
    clear base_learner OOB
    OOB.error = 1;
end % while search_in_progress

% training time evaluation
results.training_time = toc(uint64(settings.start_time));
TXT = sprintf('training time: %.1f sec',results.training_time);
if settings.verbose, 
    fprintf([TXT '\n']); 
end

% testing phase
clear Xc Xs;
[Yc,Ys,settings] = create_testing(settings);
base_learner = FINAL_BASE_LEARNER;
TST_ERROR = calculate_testing_error(Yc,Ys,base_learner,OPTIMAL_L,OPTIMAL_K);

% final output and logging
results = collect_final_results(settings,OPTIMAL_K,OPTIMAL_L,MIN_OOB,base_learner,results);
results.testing_error = TST_ERROR;
if settings.bootstrap
    TXT = sprintf('optimal d_sub %i : OOB %.4f : TST %.4f : L %i : T %.1f sec',OPTIMAL_K,MIN_OOB,TST_ERROR,OPTIMAL_L,results.time);
else
    TXT = sprintf('optimal d_sub %i : TST %.4f : L %i : T %.1f sec',OPTIMAL_K,TST_ERROR,OPTIMAL_L,results.time);
end
if settings.verbose, 
    fprintf([TXT '\n']); 
    fprintf('end of ensemble processing\n');
end
