clear all;
close all;

%set up data
global Xarg Yarg;

%read data
%Y Target Variable
Y = csvread('Data\2_DataApp1\12cities\Y.csv');

%X rawdata training data set
X = csvread('Data\2_DataApp1\12cities\X.csv');


%feature scaling to avoid overflow
[XS, mu, st] = featureScaling(X);

%get data set size
[M N]= size(X);

%split data set
len = round(M * 0.7);

%divide the data set
Xtrain = XS(1:len,:);
Ytrain = Y(1:len,:);

Xtest = XS(len:end,:);
Ytest = Y(len:end,:);

%vectors to store erros
trainingError = zeros(len,1);
testingError = zeros(len,1);

%ga optimization
%set up the ga
%get the option structure with default parameters
options = gaoptimset(@ga);

%set some parameters for our problem

%control parameters:
%number of variables. it is required and depedns of the fitness function
nvars = 24;
options.PopulationSize = 50;
%elitism: how many individuals from the generation are granted to survive
options.EliteCount = 10;
options.CrossoverFraction = 0.9;
options.MutationFraction = 0.1;
options.Generations = 500;
%options.TolFun = 0;
%options.TolCon = 0;
%options.StallGenLimit = Inf;
%options.StallTimeLimit = Inf;

%genetic algorithm operators:
%specify functions to operators. have many  already implemented but you can
%specify one that you like
% options.CreationFcn = @gacreationuniform;
% options.FitnessScalingFcn= @fitscalingrank;
% options.SelectionFcn= @selectionstochunif;
% options.CrossoverFcn= @crossoverscattered;
% options.MutationFcn= {@mutationgaussian  [1]  [1]};
% options.DistanceMeasureFcn = [];

%set up constraints
%if you want to start with your initial population
options.InitialPopulation = [];
%if you want pu contraints in the initialization of a individual. have to
%be a matrix 2(low and upper bound)xNvariables
options.InitialRange = [];
%mean what is the index of the integer variables in the individual
IntCon = 1:24;
%lower bound for each variable
lb = 1 .* ones(1,24);
%upper bound for each variable
ub = 10 .* ones(1,24);

%analysis options:
%options.Display = 'iter';
%options.PlotInterval = 2;
%define a function to be called at the end of each iteration
%options.OutputFcns = [];
%plots already implemented
%options.PlotFcns =  {@gaplotbestf @gaplotbestindiv @gaplotdistance @gaplotrange @gaplotscorediversity @gaplotscores @gaplotselection };
%options.PlotFcns =  {@gaplotbestf};

%define the fitness function y = fitnessfun(x) in other file in the same directory that this
%script and put the name fitnessfun.m
for it=100:100:len
    sprintf('data size: %d',it)
    Xarg =  Xtrain(1:it,:);
    Yarg = Ytrain(1:it,:);
    
    %training
    %start the ganetic algorithm and take training error
    [x,fval,exitflag,output] = ga(@fitnessfun,nvars,[],[],[],[],...
        lb,ub,[],IntCon,options);
    trainingError(it,1) = fval;
    
    %test error computation
    %calculate features F from X using random parameters P
    [FY, F] = featureProgram(Ytest, Xtest, reshape(x,N,2));
    %linear regression
    [W, wCost] = linearRegressionModel(F, FY,  500, 0.01, 1);
    %model prediction
    [H, cost] = modelPrediction(Y ,X, reshape(x,N,2), W);
    testingError(it,1) = cost;
     
    disp('l/train error/test error');
    disp([it,fval,cost]);
    
    
end;

%plot learning curves
figure, plot(1:len, trainingError(:));
hold on;
plot(1:len, testingError(:),'color','red');


%output: Structure containing output from each generation and other information about algorithm performance
