clear;
load('ex5data1.mat');
m = size(X, 1);

figure;
plot(X, y, 'rx', 'MarkerSize', 10, 'LineWidth', 1.5);
xlabel('Change in water level (x)');
ylabel('Water flowing out of the dam (y)');
theta = [1; 1];

J = linearRegCostFunction(theta, X, y, 1);

fprintf(['Cost at theta = [1; 1]:%f '...
                        '\n(this value should be about 303.993192)\n'], J);

theta = [1; 1];
[J, grad] = linearRegCostFunction(theta, X, y, 1);

fprintf(['Gradient at theta = [1; 1]:[%f; %f] '...
                                '\n(this value should be about [-15.303016; 598.250744])\n'], ...
    grad(1), grad(2));

lambda = 0;
theta = trainLinearReg(X, y, lambda);
figure;
plot(X, y, 'rx', 'MarkerSize', 10, 'LineWidth', 1.5);
xlabel('Change in water level (x)');
ylabel('Water flowing out of the dam (y)');
hold on;
plot(X, [ones(m, 1), X] * theta, '--', 'LineWidth', 2);
hold off;

lambda = 0;
[error_train, error_val] = learningCurve(X, y, Xval, yval, lambda);
figure;
plot(1:m, error_train, 1:m, error_val);
title('Learning curve for linear regression')
legend('Train', 'Cross Validation')
xlabel('Number of training examples')
ylabel('Error')
axis([0 13 0 150])

fprintf('# Training Examples\tTrain Error\tCross Validation Error\n');

for i = 1:m
    fprintf('  \t%d\t\t%f\t%f\n', i, error_train(i), error_val(i));
end

p = 8;
X_poly = polyFeatures(X, p);
[X_poly, mu, sigma] = featureNormalize(X_poly);

X_poly_test = polyFeatures(Xtest, p);
X_poly_test = (X_poly_test - repmat(mu, size(X_poly_test, 1), 1)) ./ repmat(sigma, size(X_poly_test, 1), 1);

X_poly_val = polyFeatures(Xval, p);
X_poly_val = (X_poly_val - repmat(mu, size(X_poly_val, 1), 1)) ./ repmat(sigma, size(X_poly_val, 1), 1);

fprintf('Normalized Training Example 1:\n');
fprintf('  %f  \n', X_poly(1, :));

lambda = 3;
theta = trainLinearReg(X_poly, y, lambda);
figure;
plot(X, y, 'rx', 'MarkerSize', 10, 'LineWidth', 1.5);
plotFit(min(X), max(X), mu, sigma, theta, p);
xlabel('Change in water level (x)');
ylabel('Water flowing out of the dam (y)');
title (sprintf('Polynomial Regression Fit (lambda = %f)', lambda));

figure;
[error_train, error_val] = learningCurve(X_poly, y, X_poly_val, yval, lambda);
plot(1:m, error_train, 1:m, error_val);
title(sprintf('Polynomial Regression Learning Curve (lambda = %f)', lambda));
xlabel('Number of training examples');
ylabel('Error');
axis([0 13 0 100]);
legend('Train', 'Cross Validation');
fprintf('Polynomial Regression (lambda = %f)\n\n', lambda);
fprintf('# Training Examples\tTrain Error\tCross Validation Error\n');

for i = 1:m
    fprintf('  \t%d\t\t%f\t%f\n', i, error_train(i), error_val(i));
end

[lambda_vec, error_train, error_val] = validationCurve(X_poly, y, X_poly_val, yval);
% close all;
figure;
plot(lambda_vec, error_train, lambda_vec, error_val);
legend('Train', 'Cross Validation');
xlabel('lambda');
ylabel('Error');
fprintf('lambda\t\tTrain Error\tValidation Error\n');

for i = 1:length(lambda_vec)
    fprintf(' %f\t%f\t%f\n', ...
        lambda_vec(i), error_train(i), error_val(i));
end

X_poly_test = polyFeatures(Xtest, p);
X_poly_test = (X_poly_test - repmat(mu, size(X_poly_test, 1), 1)) ./ repmat(sigma, size(X_poly_test, 1), 1);
theta = trainLinearReg(X_poly, y, 3);
error_test = linearRegCostFunction(theta, X_poly_test, ytest, 0);
fprintf('Test Error = %f\n', error_test); 
