%*****************************************************
%Question 3
%*****************************************************
histogramFlag = true;

if ~histogramFlag
clear all
clc
load('imdata.mat', 'x','y','i')

y = double(y);
x = double(x);

%Define the total number of data points N
N = size(x,1);
%Define the number of possible intensities
totalNumIntensities = 64;
%Define the number of classes
totalNumClasses = 64;

%Define the Dirichlet parameter set alpha
alpha=ones(1,totalNumClasses);
sumAlpha  =sum(alpha);
%Using the 3 attributes as the data
attributeVector = [x(:,end), x(:,end-34), x(:, end-35)];
numFeatures = size(attributeVector,2);

%Set the training sets
xtraining = attributeVector;
ytraining = y;

%*******************************************************************
%Store the data into a 64x64x3 matrix. This is because there are
%1. 3 different features
%2. 64 different intensities for each feature
%3. 64 different possible classes for each feature 
%Therefore the probability P(Xi = k | Y = c) = Prck(c,k,i)
Prck = zeros(64,64,3);

%Sum over all 3 features 
for j=1:numFeatures
    
    for k=1:N
       %This counts the number of values with intensity k and with target
       %value y=c;
       %Therefore defined by a 2-tuple (xi = k, y=c)
       %Prck(intensity, target value, feature)
       Prck(xtraining(k,j)+1,ytraining(k)+1, j) = Prck(xtraining(k,j)+1, ytraining(k)+1,j) + 1; 
    end
    
end
%*******************************************************************
%Count the number of target values in each class
%Convert the y attributes to a 1-of-M representation
[yofm, attributesY] = datato1ofm(y);

%y converted to a 1-of-m representation
yofmSparse = full(yofm);

%Sum up all the y intensities
Py = zeros(size(yofmSparse,1),1);
Py = sum(yofmSparse);
%********************************************************************

%Turn the counts into probabiities
%Now that the elements have been counted, calculate 
%the value over all features. I.e. beta_xj + djy(xj)/ beta_0 + Cy
%This equals 1 + djy(xj)/64 + Cy
%This is P(X = xj|Y=y)

for ll = 1:numFeatures
    
    %kk - class value index
    for cc = 1:totalNumClasses
        %jj - intensity value index
        for kk = 1:totalNumClasses
            
            Prck(kk,cc,ll) = (1 + Prck(kk,cc,ll))/(sumAlpha + Py(:,cc));
            
        end
    end
end

%Find all the class prior probabilities given the parameters
%P(y=c|theta) = (Nc + alpha)/(N+C)
%Here:
%1. Nc is the number of elements in class c
%2. N is the total number of data points
%3. C is the number of classes
Py = (Py(1,:) + ones(size(Py)))./(sumAlpha + N);
logPy = log(Py);
%**********************************************************************
%Testing begins
load('imtestdata.mat');
attributeVector = [x(:,end), x(:,end-34), x(:, end-35)];
xtest = double(attributeVector);

%Define the log prediction probability
NegLogPredictionProb  =0;

standardisedPosterior = zeros(1,totalNumClasses);
counter = 0;
    for nn = 1:size(x,1)
        
        %For each training instance, we need to classify it, normalise and
        %add to the perplexity
        
        %Find all the class prior probabilities given the parameters
        %P(y=c|theta) = (Nc + alpha)/(N+C)
        %Here:
        %1. Nc is the number of elements in class c
        %2. N is the total number of data points
        %3. C is the number of classes
        %Pyc = (Py(y(nn,:)+1) + 1)/(sumAlpha + N);
        
        %Find all the P(X = xj | Y=y) for each of the features x1,x2,x3 for
        %the datapoint
        px1y = log(Prck(xtest(nn,1)+1, :, 1));
        px2y = log(Prck(xtest(nn,2)+1, :, 2));
        px3y = log(Prck(xtest(nn,3)+1, :, 3));
        
                
        %Set the numerator as the product P(X = xj | Y=y)P(y=c)
        numerator = px1y+px2y+px3y+ logPy;
        
        %Normalise the data
        %Find P(x1 = i | y=c') summed over all c' and multiply by sum over
        %all priors P(Y=c')
        denominatorP1 = log(Prck(xtest(nn,1)+1,:,1));
        denominatorP2 =log(Prck(xtest(nn,2)+1,:,2));
        denominatorP3 = log(Prck(xtest(nn,3)+1,:,3));
        
        %Use the log sum exponential trick in order to prevent underflow
        %B = max_{c}log(P(x1 = i | y=c))
        B = max(numerator);
        
        denom = log(sum(exp(denominatorP1+denominatorP2+denominatorP3+logPy - repmat(B,1,size(logPy,2))))) + B;
        
        
       %normaliser = sum(exp(denom - ))) + B;

        NegLogPosterior =-(numerator - denom) ;
        
        %Standardise the data for submission to Kaggle
        scalingConstant  = min(NegLogPosterior);
        
        %negative Log prediction probability
        standardisedPosterior(nn,:) = NegLogPosterior - repmat(scalingConstant,1, size(NegLogPosterior,2));
        counter =counter + 1
    end

csvwrite('predictionsOld.csv',standardisedPosterior);
else
%Plot a histogram of the data

bins  = 0:63;

[Nums, bin] = histc(predictions1(40000,:), bins);

bar(bins,Nums,'histc');
end
