function res = trainModel_libsvm2(stego, cover, ntrain, ntest, loop, verbose, c, g, ncg, kerneltype)
% TRAINMODEL train SVM model using LIBSVM..
% Usage:
%   RES = TRAINMODEL_LIBSVM2(STEGO, COVER, NTRAIN, NTEST, LOOP, VERBOSE, C, G, NCG, KERNELTYPE)
%
%  This function splits training sets and testing sets, trains the model, 
%  and return the testing result.
%  Selected stego and cover images are in pairs.
%  The following is equivalence:
%   RES = TRAINMODEL_LIBSVM2(STEGO, COVER, NTRAIN, NTEST)
%   RES = TRAINMODEL_LIBSVM2(STEGO, COVER, NTRAIN, NTEST, 10)
%   RES = TRAINMODEL_LIBSVM2(STEGO, COVER, NTRAIN, NTEST, 10, 0)
%   RES = TRAINMODEL_LIBSVM2(STEGO, COVER, NTRAIN, NTEST, 10, 0, nan, nan, NTRAIN)
%   RES = TRAINMODEL_LIBSVM2(STEGO, COVER, NTRAIN, NTEST, 10, 0, nan, nan, NTRAIN, 0)
%
% Input:
%   STEGO  : a L*N matrix for features of stego images.
%   COVER  : a M*N matrix for features of cover images.
%   NTRAIN : size of training set. (one pair of stego/cover images is 2!)
%   NTEST  : size of testing set.
%   LOOP   : number of loops. Default value is 10.
%   VERBOSE: 1 to display a lot of messages.
%   C, G   : Value for LIBSVM. Ignore it, value 'automatic' or NaN tells 
%            the program to find  best values for C,G using search
%            (and this process is very slow).
%   NCG    : size of training sample when searching for C, G. Default value
%            is same as NTRAIN.
%   KERNELTYPE : See kernel type for libsvmtrain. Default value is 0.
% 
% Output:
%   RES : A struct with the following fields:
%       MEAN_ERR  : Mean of error.
%
% See also:
%   SVMCG
%   TRAINMODEL_LIBSVM
%   LIBSVMTRAIN
%   LIBSVMPREDICT

cg_opt = '-h 1 -m 512';
train_opt = '-h 1 -m 512';
predict_opt = '';

if nargin < 6
    verbose = 0;
end

if nargin < 9
    ncg = ntrain;
end

if nargin < 10
    kerneltype = 0;
end
cg_opt = ['-t ',num2str(kerneltype),' ',cg_opt];
train_opt = ['-t ',num2str(kerneltype),' ',train_opt];

if ~verbose
    train_opt = ['-q ', train_opt];
    predict_opt = ['-q '];
end

%% load feature matrix
if ischar(stego)
    if verbose
        fprintf('loading stego from file %s.\n', stego);
    end
    load(stego,'-mat','res');
    stego = res;
    clear res;
end
[L,N] = size(stego);
if ischar(cover)
    if verbose
        fprintf('loading cover from file %s.\n', cover);
    end
    load(cover,'-mat','res');
    cover = res;
    clear res;
end
[M,N1] = size(cover);
if (N ~= N1) && (L ~= M)
    error('trainModel:featureNumNotMatch','Number of features in STEGO and COVER does not match.');
end
clear N1 M

if nargin < 5
    loop = 10;
elseif loop <= 0
    error('trainModel:invalidInput','Loop must be >0.');
end

%% initialize output value
res = struct('CG_OPT', cg_opt, 'TRAIN_OPT', train_opt, ...
    'PREDICT_OPT', predict_opt, 'STEGO_SAMPLE_NUM', L, ...
    'COVER_SAMPLE_NUM', L, 'FEATURE_DIM', N);

nSample = L*2;
% stego(1) | cover(0)
if (nargin < 8) || strcmpi(c, 'automatic') || strcmpi(g, 'automatic') || isnan(c) || isnan(g)
    fprintf('finding best c and g\n');
    if nargin < 9
        ncg = ntrain;
    end
    nt = floor(ncg/2);
    p = randperm(L);
    p = p(1:nt);
    train = [stego(p, :); cover(p, :)];
    ltrain = [ones(nt,1);zeros(nt,1)];
    if verbose
        fprintf('train set: stego = cover = %d\n', nt);
    end
    tic;
    [acc, c, g, cvmat] = SVMcg(ltrain, train, cg_opt);
    if verbose
        disp(cvmat);
    end
    T=toc;
    fprintf('c=%g, g=%g\n', c, g);
    [res.CG_ACC, res.CG_TIME, res.CG_MAT] = deal(acc, T, cvmat);
    clear train ltrain
end

if (ntrain <= 0) || (ntest <= 0) || (ntrain + ntest > nSample)
    error('trainModel:invalidInput', 'NTRAIN + NTEST must not be bigger than total sample numbers.');
end

[res.NUM_TRAIN, res.NUM_TEST, res.NUM_LOOP] = deal(ntrain, ntest, loop);
[res.C, res.G] = deal(c,g);
[res.MEAN_ERR, res.MEAN_FA, res.MEAN_MD, res.MIN_ERR] = deal(nan);

errors_fa = nan(loop, 1); %false alarm 0 said to be 1
errors_md = nan(loop, 1); %miss detect 1 said to be 0
accs = nan(loop, 3);
errors = nan(loop, 1);

ntrain = floor(ntrain / 2);
ntest = floor(ntest/2);
output = '';

if verbose
    fprintf('trn/tst = %d/%d, loop = %d\n',ntrain, ntest, loop);
    fprintf('train set: %d pairs\n', ntrain);
    fprintf('test set : %d pairs\n', ntest);
end

%% train and test
% implementation of LIBSVM is not thread safe
es = 0;
tic;
for i = 1:loop
    p = randperm(L);
    if verbose
        fprintf('Round %d/%d\n',i,loop);
    end
    % 1 .. ntrain              => train
    % ntrain+1 .. ntrain+ntest => test
    % ntrain+ntest+1 ...       => skipped
    train = [stego(p(1:ntrain), :); cover(p(1:ntrain), :)];
    ltrain = [ones(ntrain,1);zeros(ntrain,1)];
    %exportSVM(['train_',num2str(i),'.dat'],ltrain,train);
    t1=cputime;
    model = libsvmtrain(ltrain, train, [train_opt, ' -c ',num2str(c),' -g ',num2str(g)]);
    t2=cputime-t1;
    fprintf('time=%g\n',t2);
    clear train ltrain
    
    test = [stego(p(ntrain+1:ntrain+ntest), :); cover(p(ntrain+1:ntrain+ntest), :)];
    ltest = [ones(ntest,1);zeros(ntest,1)];
    %exportSVM(['test_',num2str(i),'.dat'],ltest,test);
    [tr, acc, ~] = libsvmpredict(ltest, test, model, predict_opt);
    clear test
    
    pfa = sum((tr == 1) & (ltest == 0)) / sum(ltest == 0);
    pmd = sum((tr == 0) & (ltest == 1)) / sum(ltest == 1);
    err = (pmd + pfa)/2;
    errors_fa(i) = pfa;
    errors_md(i) = pmd;
    errors(i) = err;
    es = es + err;
    accs(i,:) = acc';
    if ~verbose
        fprintf(repmat('\b',1,length(output)));
    end
    output = sprintf('%d/%d : acc = %0.4f / %0.4f\n', i, loop, 1-err, 1 - es / i);
    fprintf(output);
end
fprintf('\n');
T = toc;
res.TIME = T;
res.errors_fa = errors_fa;
res.errors_md = errors_md;
res.errors = errors;
res.accuracy = accs;
res.MIN_ERR = min(errors);
res.MEAN_ERR = mean(errors);
res.MEAN_FA = mean(errors_fa);
res.MEAN_MD = mean(errors_md);

if verbose
    disp(res)
    fprintf('Mean Accuracy: %0.2f%%\n',(1-res.MEAN_ERR)*100);
end 
end
