clear all;
close all;

%read data
%Y Target Variable
Y = csvread('Data\DataApp2\3cities\Y.csv');

%X rawdata training data set
X = csvread('Data\DataApp2\3cities\X.csv');

%get data set size
M = size(X,1);

%divide the data set
trainLen = round(M * 0.7);

%feature scaling to avoid overflow
[XS, mu, st] = featureScaling(X);

%split data
Xtrain = XS(1:trainLen,:);
Ytrain = Y(1:trainLen,:);

Xtest = XS(trainLen:end,:);
Ytest = Y(trainLen:end,:);

%vectors to store erros
trainingError = zeros(trainLen,1);
testingError = zeros(trainLen,1);

%loop through the amount of data to train the model
for i=1:trainLen
    
%     %model 1 training
%     [Poptim, PCostO, Woptim, WCostO, trainCost] = approach1(Ytrain(1:i,:), Xtrain(1:i,:), 400, 0.1, 1);
%     trainingError(i,1) = trainCost;
%     
%     
%     %model test
%     [H, testCost] = modelPrediction( Ytest, Xtest, Poptim, Woptim);
%     testingError(i,1) = testCost;
    
    

    %approach 2 training
    [Woptim, WCostO] = approach2(Ytrain(1:i,:), Xtrain(1:i,:), 7, 400, 0.01, 1);
    trainCost = WCostO(end,1);
    trainingError(i,1) = trainCost;

        
    %prediction approach 2
    [H, testCost] = modelPredictionAPP2(Ytest, Xtest, 7, Woptim);
    testingError(i,1) = testCost;
    
    
    
    disp('l/train error/test error');
    disp([i,trainCost,testCost]);
    
end;

%plot learning curves
figure, plot(1:trainLen, trainingError(:));
hold on;
plot(1:trainLen, testingError(:),'color','red');

