function [w, error_log] = blr_train(data,class)
% Bayes Linear Regression
% Run loadData before running this script

pts = data(:,1:3);
id = data(:,4);
label = data(:,5);
features = data(:,6:end);

% Possible labels
VEG = 1004;
WIRE = 1100;
POLE = 1103;
GROUND = 1200;
FACADE = 1400;

% Decide which class we want to classify
CLASS_LABEL = class;

% Observations and empty matrices to use
numFeatures = size(features,2);
numPoints = length(label);

% mu and covar are the expectation and covariance on the parameter vector
% or the weight vector
mu = cell(numPoints,1);
covar = cell(numPoints,1);

P = cell(numPoints,1);
J = cell(numPoints,1);
P{1} = zeros(numFeatures);
J{1} = zeros(numFeatures,1);




% Label all the training data based on the provided label. 1 for belonging
% to that class and -1 for not belonging to that class. So for example, we
% can split all the points into VEG and not VEG.
y = -1*ones(size(label));
y(label==CLASS_LABEL) = 1;
y_guess = zeros(size(label));

% Variance on the 
var = 0.001;
mu{1} = zeros(numFeatures,1);
y_guess(1) = mu{1}'*features(1,:)';

for i = 2:numPoints
    y_guess(i) = mu{i-1}'*features(i,:)';
    
    % For each point in the data set, follow the update rules for J and P
    J{i} = y(i)*features(i,:)' / var + J{i-1};
    P{i} = features(i,:)'*features(i,:) / var + P{i-1};
    
    % Also find the expectation and covariance at each iteration
    covar{i} = inv(P{i});
    mu{i} = covar{i}*J{i};
%     mu{i}
    
end

% At the end of the iteration, we _should_ have the best guess for theta,
% which is the weighting vector and is a Gaussian with mean mu and covariance covar
w = mu{i};

y_guess(y_guess>=0) = 1;
y_guess(y_guess<0) = -1;

error_log = (y~=y_guess);
% figure, plot(cumsum(error_log)), axis equal
end


