% TRAIN_MULTICLASS_HMM train a multiclass HMM classifier
% Usage
%       model = train_multiclass_hmm(data, init_params)
%
% where data is a cell array containing cell arrays of sequences
% for each class, and init_params is a structure of initial
% parameters, containing
%
%       init_params.NumIter    = max no. of EM iterations     
%       init_params.InitType   = 'uniform' or 'random'
%       init_params.NumMix     = no. of components in GMM
%       init_params.NumStates  = no. of states
%
% If init_params is a cell array of structures, it is assumed 
% that it contains separate parameters for each class. Otherwise
% the same parameters are used for each class.
% 
% This is a wrapper for Kevin Murphy's HMM toolkit, which must
% be installed before running this code. It trains a linear chain
% HMM with GMM observation models that have diagonal covariance. The
% trained parameters are returned in model.hmm.
%

function model = train_multiclass_hmm(data, init_params)

NumClass = length(data);
NumDim = size(data{1}{1},1);

total = length([data{:}]);

for i=1:NumClass
    if iscell(init_params)
    	init{i} = init_params{i};  % different parameters for each class
    else
        init{i} = init_params;
    end

    if ~isempty(data{i})
    	model{i} = train_hmm(data{i}, init{i});
        model{i}.prior = length(data{i})/total;
    else
    	model{i} = [];
    end
end

% Train HMM for single class
function model = train_hmm(data, init_params)

M = init_params.NumMix;
Q = init_params.NumStates;
max_iter = init_params.NumIter;	      
initType = init_params.InitType;
                
fprintf(1, ['\nTraining model with %d states, %d components, ' ...
    'on %d sequences (%d frames).\n\n'], Q, M, length(data), length([data{:}]));

D = size(cell2mat(data),1);   % dimension of observation vector

cov_type = 'diag';  % 'full' is much slower

% Left-to-right chain HMM

if exist('initType') && strcmp(initType,'uniform') % uniform initial parameters
    fprintf('Initializing from uniform parameters\n');
    prior0 = normalise(ones(Q,1));
    % chain with no skip states allowed
    transmat0 = mk_stochastic( diag(ones(Q,1))+diag(ones(Q-1,1),1));
    mixmat0 = ones(Q,M)./repmat(sum(ones(Q,M),2),1,M);
    mu0 = repmat(mean(cell2mat(data)')',[1 Q M]);
    S = diag(diag(cov(cell2mat(data)')));
    Sigma0 = repmat(S, [1 1 Q M]);
else  % random guess at initial parameters
    fprintf('Initializing from random parameters\n');
    prior0 = normalise(rand(Q,1));
    transmat0 = mk_stochastic(triu(rand(Q,Q)));   % chain with skip states
%   transmat0 = mk_stochastic(rand(Q,Q));         % full transition mtx
    mixmat0 = mk_stochastic(rand(Q,M));
    [mu0, Sigma0] = mixgauss_init(Q*M, cell2mat(data), cov_type,'kmeans');
    mu0 = reshape(mu0, [D Q M]);
    Sigma0 = reshape(Sigma0, [D D Q M]);
end

if M<=1   % single-gaussian mixtures
    mixmat0 = [];
end

[LL, prior1, transmat1, mu1, sigma1, mixmat1] = ...
    mhmm_em(data, prior0, transmat0, mu0, Sigma0, mixmat0, ...
	    'max_iter', max_iter, 'verbose', 0, 'cov_type', cov_type);

% Return new classifier state in model.hmm
model = init_params;

model.hmm.prior = prior1;
model.hmm.transmat = transmat1;
model.hmm.mu = mu1;
model.hmm.sigma = sigma1;
model.hmm.mixmat = mixmat1;
model.hmm.LL = LL;
