% CL=nbayes_test(nbc,TEST)
% Classifies the test samples given in TEST based on a gaussian
% distribution by means of a naive bayes classifier.
% TEST must be a 2d-matrix where each row represents 
% a vector of class attributes.
% nbc includes the estimated mean and variances of each class and attribute
% The structure nbc is calculated by nbayes_test
%
% Returns a vector of class labels CL based on the classification result.
%
% see also: nbayes_train

 function CL=nbayes_test(nbc, TEST)

if length(nbc(1).meanC)~=size(TEST,2),
    error('Number of attributes in nbc and TEST must be equal.');
end

nclasses = length(nbc);
for c=1:nclasses,
    nbt(c).pC=zeros(size(TEST));
    
    % calculate the probability densities for each attribute value
    sC = 2 * nbc(c).varC;
    for sample=1:size(TEST,1),
        att = TEST(sample,:);
        dC = att - nbc(c).meanC;
        nbt(c).pC(sample,:) = exp(-dC.*dC./sC) ./sqrt(pi.*sC);
    end

    % traverse cond. probabilities and the posterior distribution
    nbt(c).postsC = nbt(c).pC(:,1);
end

for k=2:size(TEST,2),
    postsSum=0;
    for c=1:nclasses,
        nbt(c).postsC = nbt(c).postsC .* nbt(c).pC(:,k);
        postsSum = postsSum+nbt(c).postsC;
    end
    % avoid over- or underflow
    OUflow = find( postsSum < 1e-24 | postsSum > 1e24);
    % renormalize affected samples
    if(~isempty(OUflow)),
        for c=1:nclasses,
            nbt(c).postsC(OUflow) = nbt(c).postsC(OUflow) ./ postsSum(OUflow);
        end
    end
end

% normalize probabilities
postsSum=0;
for c=1:nclasses,
    postsSum=postsSum+nbt(c).postsC;
end
postsNorm = 1 ./ postsSum;
for c=1:nclasses,
    nbt(c).postsC = nbt(c).postsC .* postsNorm;
end

% assume all samples to be first class
postsC = nbt(1).postsC;
CL = ones(size(postsC)).*nbc(1).label;
% find class
for c=2:nclasses,
    greater = find(nbt(c).postsC > postsC);
    postsC(greater) = nbt(c).postsC(greater);
    CL(greater) = nbc(c).label;
end