%% Machine Learning Online Class - Exercise 1: Linear Regression

%     plotData.m
%     gradientDescent.m
%     computeCost.m
%     gradientDescentMulti.m
%     computeCostMulti.m
%     featureNormalize.m
%     normalEqn.m
%
% x refers to the value size in 10,000s
% y refers to the profit in $10,000s


%% Initialization
clear ; close all; clc

%%  generate a data set, a sine function data 
fprintf('Generate and plot data sets...\n');
randn('seed',100);
x = linspace(0,1,20)'; 
y = sin(2*pi*x) + randn(size(x))*.3;	% with Gaussian power .09
X = x;									% uni-variable


%% ======================= Part 1: Plotting =======================
X = [x x.^2 x.^3 x.^4 x.^5 x.^6 x.^7 x.^8 x.^9];% multi-variable
m = length(y); % number of training examples

% Plot Data
plotData(X(:,1), y);

% Calculate the parameters from the normal equation
X = [ones(m, 1) X];
theta = normalEqn(X, y);

% Plot the linear fit
hold on; % keep previous plot visible
plot(X(:,2), X*theta, 'b--','LineWidth',3)
legend('training data', 'underlied function','linear regression');
hold off % don't overlay any more plots on this figure


%% =================== Part 3: Gradient descent ===================

%% ============= Part 4: Visualizing J(theta_0, theta_1) =============
fprintf('Visualizing J(theta_0, theta_1) ...\n')

% Grid over which we will calculate J
offset = 4;
theta0_vals = linspace(-offset+theta(end-1), offset+theta(end-1), 100);
theta1_vals = linspace(-offset+theta(end), offset+theta(end), 100);

% initialize J_vals to a matrix of 0's
J_vals = zeros(length(theta0_vals), length(theta1_vals));

% Fill out J_vals
for i = 1:length(theta0_vals)
    for j = 1:length(theta1_vals)
		t = [theta(1:end-2);theta0_vals(i);theta1_vals(j)];    
	  J_vals(i,j) = computeCost(X, y, t);
    end
end

% Because of the way meshgrids work in the surf command, we need to 
% transpose J_vals before calling surf, or else the axes will be flipped
J_vals = J_vals';

% find the J cost minimum axis
[minJ Jax] = findMinAxis(J_vals');

% % surface
% figure(2)
% hold on;
% surf(theta0_vals, theta1_vals, log10(J_vals))
% xlabel('\theta_0'); ylabel('\theta_1');
% title('logorized performance surface')
% hold off;

% contour
figure(3)
% Plot J_vals as 15 contours spaced logarithmically between 0.01 and 100
contour(theta0_vals, theta1_vals, J_vals, logspace(-2, 3, 30))
xlabel('\theta_0'); ylabel('\theta_1');
hold on;
plot(theta(end-1), theta(end), 'rx', 'MarkerSize', 10, 'LineWidth', 2);
%plot(theta0_vals(Jax(1)), theta1_vals(Jax(2)), 'co', 'MarkerSize', 10, 'LineWidth', 2);
title('error performance contour with different cost functions')
legend('MSE(J)','normal equation');


% ============part 6: compare the performance  ============
fid = fopen('ex1_record.txt','a+');
fprintf(fid,'feature number : %d',length(theta));
fprintf(fid,'\t');
fprintf(fid,'Jmin= %f',minJ);
fprintf(fid,'\n');
fclose(fid);
