% Mengzi Zhang
% 17 Nov 2011
% CIS 520
% SVM

addpath libsvm
addpath liblinear

%% CV training / test sets

%  XcvTrain = make_sparse(train(bsxfun(@gt, [train().category], 6)));
%  YcvTrain = double([train(bsxfun(@gt, [train().category], 6)).rating])';
%  
%  XcvTest = make_sparse(train(bsxfun(@lt, [train().category], 7)));
%  YcvTest = double([train(bsxfun(@lt, [train().category], 7)).rating])';


% run on a smaller subset to see if still get out of memory
XcvTrain = make_sparse(train(bsxfun(@eq, [train().category], 9)),size(X, 2));
YcvTrain = double([train(bsxfun(@eq, [train().category], 9)).rating])';

XcvTest = make_sparse(train(bsxfun(@eq, [train().category], 3)),size(X, 2));
YcvTest = double([train(bsxfun(@eq, [train().category], 3)).rating])';




% [XcvTrain Y] = make_sparse(traindata, vocab);
% [XcvTest Ytest] = make_sparse(testdata, vocab);


%% Spider

% Set up svm object
svm_obj = svm({kernel('rbf',1),'optimizer="andre"'});
%svm_obj = svm({'optimizer="andre"'});
% Pass in other kernel objects to use other kernels
%svm_obj.child = kernel;

% Train
% No output Y data for clustering. Create with one var
data_train = data (XcvTrain, YcvTrain);
% Use feval, because supplied data has a var named train. Using train() will
%   point to that var, instead of the Spider fn.
[predictions_tr model] = feval ('train', svm_obj, data_train);


% See the predictions
predictions_tr.X

% See model params
% weights
%model.alpha
% threshold
%model.b0
% support vectors
%model.Xsv


% Test
data_test = data (XcvTest, YcvTest);
predictions_ts = feval ('test', model, data_test);

% See the predictions
predictions_ts.X



%% Libsvm from homework 6

% INSTRUCTIONS: Use the KERNEL_LIBSVM function to evaluate each of the
% kernels you mentioned. Then run the line below to save the results to a
% .mat file.

%k = @(x,x2) kernel_poly(x, x2, 1);
%[results.linear, info] = kernel_libsvm( ...
% XcvTrain, YcvTrain, XcvTest, YcvTest, k);

% k = @(x,x2) kernel_poly(x, x2, 2);
% [results.quadratic, info] = kernel_libsvm(X, Y, Xtest, Ytest, k);
% 
% k = @(x,x2) kernel_poly(x, x2, 3);
% [results.cubic, info] = kernel_libsvm(X, Y, Xtest, Ytest, k);
% 
% k = @(x,x2) kernel_gaussian(x, x2, 20);
% [results.gaussian, info] = kernel_libsvm(X, Y, Xtest, Ytest, k);
%  
%k = @(x,x2) kernel_intersection(x, x2);
%[results.intersect, info, yhat] = kernel_liblinear( ...
%  XcvTrain, YcvTrain, XcvTest, YcvTest, k);


% Run training without kernel
model = train(YcvTrain, XcvTrain, sprintf('-s 0 -v 5 -c %g -q -e 1.0', 1.0));
[yhat acc vals] = predict(YcvTest, XcvTest, model, '-b 1');
%test_err = mean(yhat~=Ytest);
test_err = 0;


ratings = [1 2 4 5];
yhat_Exp = sum(bsxfun(@times, info.vals, ratings),2);

% Calc RMSE
n = numel (YcvTest);
rmseMini = sqrt (sum ((YcvTest - yhat_Exp) .^ 2) / n);

%% Bar Plot - comparing error rates of different kernels

% Makes a bar chart showing the errors of the different algorithms.
% algs = fieldnames(results);
% for i = 1:numel(algs)
%     y(i) = results.(algs{i});
% end
% bar(y);


