function [test_err info yhat] = lin_liblinear(X, Y, Xtest, Ytest, solver, B)
% Trains a SVM using libsvm and evaluates on test data.
%
% Usage:
%
%   [TEST_ERR INFO] = KERNEL_LIBSVM(X, Y, XTEST, YTEST, KERNEL)
%
% Runs training and testing of a SVM with the given kernel function, using
% cross validation to choose regularization parameter C.
%
% X, Y, XTEST, and YTEST should be created using MAKE_SPARSE.
% KERNEL is a FUNCTION HANDLE to the appropriate KERNEL function, which
%   must take ONLY TWO PARAMETERS K(X,X2).
%
% EXAMPLES:
%
% Compute error using a poly kernel with P=2:
%
% >> k = @(x,x2) kernel_poly(x, x2, 1);
% >> [test_err info] = kernel_libsvm(X, Y, Xtest, Ytest, k)
%
% The first step is necessary to create a function that only depends on two
% arguments from the KERNEL_POLY function which takes 3.


% Use built-in libsvm cross validation to choose the C regularization
% parameter.
crange = 10.^[-10:2:4];
%  for i = 1:numel(crange)
%      %acc(i) = svmtrain(Y, [(1:size(K,1))' K], sprintf('-t 4 -v 10 -c %g', crange(i)));
%      acc(i) = train(Y, X, sprintf('-s %d -v 10 -e 1.0 -c %g', solver,crange(i)));
%  end
%  [~, bestc] = max(acc);
%  fprintf('Cross-val chose best C = %g\n', crange(bestc));
%}
bestc = 6;
 
 
% Train and evaluate SVM classifier using libsvm
% model = train(Y, [(1:size(K,1))' K], sprintf('-t 4 -c %g', crange(bestc)));

% Submitted with the second check point
% rmse = 1.1465 - online was 1.17
% model = train(Y, X, sprintf('-s -v 5 -c %g', 1.0));

% Cross validation to find the best model
% for i = 1:cross-val
%     % Insert split data cross validation
%     
%     % Build many models
%     model(i) = train(Y, X, sprintf('-s %d -c %g', solver,crange(bestc)));
%     model_err = 10; % Insert this
%     [yhat(i,:) acc(i) vals(i,:)] = predict(Ytest, Xtest, model);
%     test_err(i) = mean(yhat~=Ytest);
% end

if ~exist('B', 'var')
  model = train(Y, X, sprintf('-s %d -c %g -e 1.0', solver, crange(bestc)));
else
  model = train(Y, X, sprintf('-s %d -c %g -e 1.0 -B %g', solver, crange(bestc), B));
end
[yhat acc vals] = predict(Ytest, Xtest, model);
test_err = mean(yhat~=Ytest);

% [~,bestModel] = min(test_error);
% 
% % Optionally we can look at more information from training/testing.
% info.vals = vals(bestModel,:);
% info.yhat = yhat(bestModel,:);
% info.model = model(bestModel);
info.vals = vals;
info.yhat = yhat;
info.model = model;
info.bestC = crange(bestc);

