%% Modified Naive Bayes
% This generative method uses the given Naive Bayes method
% to classify the data into ratings (1 OR 2) and (4 OR 5).
% Then it performs another Naive Bayes classification on
% each of the previously classified data separately such that
% the data that were classified as (1 OR 2) would be classified
% as 1 and 2, and similarly on (4 OR 5) data.
% Thus, this is like a combination of Naive Bayes and Decision Tree
% such that instead of entropy, Naive Bayes models were used to
% make binary classifications at each node recursively.
% In this specific case, the model to classify the data as
% (1 OR 2) and (4 OR 5), intuitively.
%
% RMSE using 2 fold cross validation is the output.

%% Load the data
load ../data/data_no_bigrams.mat;

% Make our own test/training set for cross validation
% Training data = 7 to 11 categories
% Testing data = 1 to 6 categories
Xtrain = make_sparse(train(bsxfun(@gt, [train().category], 6)));
Xtest = make_sparse(train(bsxfun(@lt, [train().category], 7)));

Ytrain = double([train(bsxfun(@gt, [train().category], 6)).rating])';
Ytest = double([train(bsxfun(@lt, [train().category], 7)).rating])';

% Train on [(1 OR 2) (4 OR 5)]
YminiK = bsxfun(@eq, Ytrain, [1 2 4 5]);
Y12 = (YminiK(:,1) + YminiK(:,2)) > 0;
Y45 = (YminiK(:,3) + YminiK(:,4)) > 0;
YminiK_12_45 = [Y12 Y45];
nb_12_45 = nb_train_pk([Xtrain]'>0, [YminiK_12_45]);

% Test
Yhat_12_45 = nb_test_pk(nb_12_45,Xtest'>0);
Yhat_12_45 = round(Yhat_12_45);

% indices for Y(1 OR 2) and Y(4 OR 5)
YminiK_12_idx = find(Yhat_12_45(:,1));
YminiK_45_idx = find(Yhat_12_45(:,2));

% Train on [1 (2 OR 4 OR 5)]
Y1 = YminiK(:,1);
Y2 = YminiK(:,2) + (YminiK(:,3) + YminiK(:,4)) > 0;
YminiK_1_2 = [Y1 Y2];
nb_1_2 = nb_train_pk([Xtrain]'>0, [YminiK_1_2]);

% Test on Ytest that were classified (1 OR 2)
Yhat_1_2 = nb_test_pk(nb_1_2,Xtest(YminiK_12_idx,:)'>0);
Yhat_1_2 = round(Yhat_1_2);

Yhat_1 = zeros(size(Xtest,1),1);
Yhat_2 = zeros(size(Xtest,1),1);

% Create Yhat for 1 and 2
Yhat_1(YminiK_12_idx) = Yhat_1_2(:,1);
Yhat_2(YminiK_12_idx) = Yhat_1_2(:,2);

% Train on [(1 OR 2 OR 4) 5]
Y4 = YminiK(:,1) + YminiK(:,2) + YminiK(:,3) > 0;
Y5 = YminiK(:,4);
YminiK_4_5 = [Y4 Y5];
nb_4_5 = nb_train_pk([Xtrain]'>0, [YminiK_4_5]);

% Test on Ytest that were classified as (4 OR 5)
Yhat_4_5 = nb_test_pk(nb_4_5,Xtest(YminiK_45_idx,:)'>0);
Yhat_4_5 = round(Yhat_4_5);

Yhat_4 = zeros(size(Xtest,1),1);
Yhat_5 = zeros(size(Xtest,1),1);

% Create Yhat for 4 and 5
Yhat_4(YminiK_45_idx) = Yhat_4_5(:,1);
Yhat_5(YminiK_45_idx) = Yhat_4_5(:,2);

% Combine Yhat for 1 2 4 5
Yhat_1245 = [Yhat_1 Yhat_2 Yhat_4 Yhat_5];

ratings = [1 2 4 5];

Yhat = sum(bsxfun(@times, Yhat_1245, ratings), 2);

% Calc RMSE
n = numel (Ytest);
rmse = sqrt (sum ((Ytest - Yhat) .^ 2) / n)