function[meanVecs, covarMats] = trainForMahalanobis(trainingSet, assumption)
% Calculates the mean vector for every class as well as the covariance
% matrix, which is assumed to be diagonal and the same for every class.
%
%   INPUT
%   trainingSet.The feature vectors for training.
%   assumption..Makes assumptions regarding the form of the covariance
%               matrix:
%               1: no special assumptions
%               2: all covariance matrices are the same
%               3: the matrices are diagonal (just the variance)
%               4: all covariance matrices are the same and diagonal
%   OUTPUT
%   meanVecs....An array of the mean feature vector of every class.
%   covarMats...The covariance matrix with the given assumption applied.

    classes = unique(trainingSet(1, :));
    meanVecs = zeros(size(trainingSet, 1) - 1, size(classes, 2));
    covarMat = zeros(size(trainingSet, 1) - 1);
    covarMats = zeros(size(covarMat,1), size(covarMat,1), size(classes,2));
    for class = classes
        classVecs = trainingSet(2:end, trainingSet(1, :) == class);
        N = size(classVecs, 2);
        meanVecs(:, class) = sum(classVecs, 2) ./ N;
        xMinusMean = classVecs - (meanVecs(:, class) * ones(1, N));
        for i = 1 : N
            covarMat = covarMat + xMinusMean(:, i) * xMinusMean(:, i)';
        end
        if assumption ~= 2 && assumption ~= 4
            covarMat = covarMat / (N - 1);
            if assumption == 3
                covarMat = diag(diag(covarMat));
            end
            covarMats(:,:,classes == class) = covarMat;
        end
    end
    if assumption == 2 || assumption == 4
        covarMat = covarMat / size(trainingSet, 2);
        if assumption == 4
            covarMat = diag(diag(covarMat));
        end
        covarMats = repmat(covarMat, [1 1 size(classes,2)]);
    end
end
