function demogpsimilarity()

%clear all; 
% close all

cdata = {[0 1 0],[1 0 1]};
%% generate two different time series

params = [.1 .105; .1 .1];
meanfunc = {@meanSum, {@meanLinear, @meanConst}}; hyp.mean = [0.5; 1];
covfunc = {@covMaterniso, 3}; ell = 1/4; sf = 1; hyp.cov = log([ell; sf]);
likfunc = @likGauss; sn = 0.1; hyp.lik = log(sn);
 
n1 = 20; % number of data points
x1 = gpml_randn(0.2, n1, 1);
K = feval(covfunc{:}, hyp.cov, x1);
mu = feval(meanfunc{:}, hyp.mean, x1);
y1 = chol(K)'*gpml_randn(params(1,1), n1, 1) + mu + exp(hyp.lik)*gpml_randn(params(1,2), n1, 1);

n2 = 20;
x2 = gpml_randn(0.3, n2, 1);
K = feval(covfunc{:}, hyp.cov, x2);
mu = feval(meanfunc{:}, hyp.mean, x2);
y2 = chol(K)'*gpml_randn(params(2,1), n2, 1) + mu + exp(hyp.lik)*gpml_randn(params(2,2), n2, 1);


figure(1);
plot(x1,y1,'+','Color',cdata{1}); hold on; plot(x2,y2,'*','Color',cdata{2});
xlabel('x, inputs'); ylabel('y, output'); title('Raw Data');

data{1}.raw = [x1(:) y1(:)];
data{2}.raw = [x2(:) y2(:)];

zz = (min([x1(:); x2(:)] - .1):0.01:max([x1(:); x2(:)]) + .1)';

clear hyp covfunc likfunc meanfunc;

%% Apply gaussian processes to both time series using the same procedure
k1 = @covSEiso;                         % long term trend
k2 = @covSEiso;                         % short term length-scales
covfunc = {@covSum, {k1, k2}};          % sum up the covariance terms

params = [1 1 3 .2; ...  % which parameter to optimise; lower-end of interval; upper-end of interval; step in interval;
          3 -1 1 .2];

for i = 1 : length(data)
    data{i}.model = zeros(length(zz),3);
    data{i}.model(:,1) = zz;
    x = data{i}.raw(:,1); y = data{i}.raw(:,2);
    y1 = (1/std(y))*(y - mean(y));                  % Apply normalisation
    
    % Select priors for the covariance parameters. To evaluate the number of
    % parameters in the covariance function use: feval(covfunc{:})
    hyp.cov = [3 1 1 0]; hyp.lik = -2;               % init hypers
    
    hyp.cov = gptrain(hyp,covfunc,x,y1,params);       % optimise parameters
    
    % Fit the GP model
    [hyp, fX, j] = ...
        minimize(hyp, @gp, -200, @infExact, [], covfunc, @likGauss, x, y1);
    data{i}.nlml = gp(hyp, @infExact, [], covfunc, @likGauss, x, y1);
    % Make predictions 10 years into the future
    data{i}.hyp = hyp.cov;
    [data{i}.model(:,2), data{i}.model(:,3)] = gp(hyp, @infExact, [], covfunc, @likGauss, x, y1, zz);
    data{i}.model(:,2) = (data{i}.model(:,2)*std(y)) + mean(y);
    data{i}.model(:,3) = (data{i}.model(:,3)*std(y));
end

% Plot the data and the predictions
figure(2);
for i = 1 : length(data)
    zz = data{i}.model(:,1); mu = data{i}.model(:,2); s2 = data{i}.model(:,3);
    x = data{i}.raw(:,1); y = data{i}.raw(:,2);
    f = [mu+2*sqrt(s2); flipdim(mu-2*sqrt(s2),1)];
    fill([zz; flipdim(zz,1)], f, cdata{i}); alpha(0.25); hold on; 
    plot(zz, mu,'--','Color',cdata{i}); plot(x,y,'*','Color',cdata{i});                            
    hold on;
end
hold off; xlabel('x, input'); ylabel('y, output');
title('GP-regression');


%% Compute similarity between GPs

[dist, bc] = gpdist(data{1}.model(:,2:3),data{2}.model(:,2:3));

figure(3);
subplot(211);
for i = 1 : length(data)
    zz = data{i}.model(:,1); mu = data{i}.model(:,2); s2 = data{i}.model(:,3);
    x = data{i}.raw(:,1); y = data{i}.raw(:,2);
    f = [mu+2*sqrt(s2); flipdim(mu-2*sqrt(s2),1)];
    fill([zz; flipdim(zz,1)], f, cdata{i}); alpha(0.25); hold on; 
    plot(zz, mu,'--','Color',cdata{i}); plot(x,y,'*','Color',cdata{i});                            
    hold on;
end
hold off; xlabel('x, input'); ylabel('y, output');
title('GP-regression');

subplot(212);
[AX,H1,H2] = plotyy(zz,dist,zz,bc,'plot');
set(get(AX(1),'Ylabel'),'String','Bhat. Distance') 
set(get(AX(2),'Ylabel'),'String','Bhat. Coeff.') 
xlabel('x, input');
set(H1,'LineStyle','--')
set(H2,'LineStyle','-')
