
% A script for learning hyperparameters by gradient methods
% Mark Norrish, 2011

clear all;
addpath('gpml-matlab-v3.1-2010-09-27');
startup;

D = load('gp_data'); % D = D(randperm(size(D,1)),:); % shuffle it to randomise trials
%D = D(1:20, :); % to make it faster
dim = size(D); 
n = dim(1);
dim = dim(2)-1;
n_class = max(D(:,dim+1));

range = 2; % hax

func = @covSEiso; % restricted for test purposes

X = D(1:n,1:dim);
Y1 = D(1:n,dim+1);
y = reshape(kron(ones(n,1),1:n_class),n*n_class,1)==repmat(Y1,n_class,1); % expressed as 1-of-n_class encoding
hyps = zeros(range * n_class, 1); % initialise
% hyps = 0.8*[-1 0 0; 1 -1 0; 0 1 0; 1 1 0; -1 -1 0]';

K = zeros(n,n,n_class); sigma_noise = 1e-6;
for c = 1:n_class
  K(:,:,c) = func(hyps(1 + (c-1) * range:c*range), X, X) + sigma_noise*eye(n);
end  
approxF = alg_3_3(n, n_class, K, y);
uniform = 0;

% l is max # of line searches/func evals (+,-)
l = -20; its = -l;
disp('Minimising...');
while its == -l
  l = 2*l;
  [hyps, negloglik, its] = minimize(hyps, @eq_3_44, l, func, n, n_class, X, y, approxF, uniform);
end
% a stupid construct so it gets the answer and I know how many func evals it took

disp('The learnt hyperparameters are:');
Hyps = reshape(hyps, range, n_class);
disp(Hyps);
dlmwrite('learnt_hyps', Hyps);