% addpath
addpath('../dataset');
addpath('../libsvm-3.11/','../randomforest-matlab/RF_Reg_C', '../ecos_denfis');
addpath('../src/');

% load data
data=importdata('steel.csv');
X=data.data(:,2:11);
X=scale_data(X, 1, -1, [], []);
Y=data.data(:,12);

RF_MSE=zeros(50,1);
SVR_MSE=zeros(50,1);
ANN_MSE=zeros(50,1);
DENFIS_MSE=zeros(50,1);
RF2_MSE=zeros(50,1);
SVR2_MSE=zeros(50,1);
ANN2_MSE=zeros(50,1);
DENFIS2_MSE=zeros(50,1);

for i=1:50
    % partition to trn and testing
    [N D] =size(X);
    %randomly split into 38 examples for training and 16 for testing
    randvector = randperm(N);
    TRN_LENGTH=46;
    trn_idx=randvector(1:TRN_LENGTH);
    tst_idx=randvector(TRN_LENGTH+1:end);
    X_trn = X(trn_idx,:);
    Y_trn = Y(trn_idx);
    X_tst = X(tst_idx,:);
    Y_tst = Y(tst_idx);
    
    randvector=randperm(TRN_LENGTH);
    val_idx=randvector(1:8);
    val_idx=trn_idx(val_idx);
    
    %% RF
    model = regRF_train(X_trn,Y_trn);
    Y_hat = regRF_predict(X_tst,model);
    RF_MSE(i)=mean((Y_hat-Y_tst).^2);
    RF2_MSE(i)=mean((round(Y_hat*100)./100-Y_tst).^2);
    % %% RF advanced
    % extra_options.importance = 1; % calculate importance
    % model = regRF_train(X_trn,Y_trn, 500, 4, extra_options);
    % Y_hat = regRF_predict(X_tst,model);
    % RF_MSE=mean((Y_hat-Y_tst).^2);
    % fprintf('\nMSE rate %f\n', RF_MSE );
    % figure('Name','OOB error rate');
    % plot(model.mse); title('OOB MSE error rate');  xlabel('iteration (# trees)'); ylabel('OOB error rate');
    % figure('Name','Importance Plots')
    % subplot(3,1,1);
    % bar(model.importance(:,end-1));xlabel('feature');ylabel('magnitude');
    % title('Mean decrease in Accuracy');
    %
    % subplot(3,1,2);
    % bar(model.importance(:,end));xlabel('feature');ylabel('magnitude');
    % title('Mean decrease in Gini index');
    % % model.importanceSD
    % subplot(3,1,3);
    % bar(model.importanceSD);xlabel('feature');ylabel('magnitude');
    % title('Std. errors of importance measure');
    
    %% ANN
    % Solve an Input-Output Fitting problem with a Neural Network
    % Script generated by NFTOOL
    % Created Sun Oct 27 16:28:18 SGT 2013
    %
    % This script assumes these variables are defined:
    %
    %   X - input data.
    %   Y - target data.
    
    inputs = X';
    targets = Y';
    
    % Create a Fitting Network
    hiddenLayerSize = 10;
    net = fitnet(hiddenLayerSize);
    
    % Choose Input and Output Pre/Post-Processing Functions
    % For a list of all processing functions type: help nnprocess
    net.inputs{1}.processFcns = {'removeconstantrows','mapminmax'};
    net.outputs{2}.processFcns = {'removeconstantrows','mapminmax'};
    
    
    % Setup Division of Data for Training, Validation, Testing
    % For a list of all data division functions type: help nndivision
    net.divideFcn = 'divideind';
    net.divideParam.trainInd = setdiff(trn_idx, val_idx);
    net.divideParam.valInd = val_idx;
    net.divideParam.testInd = tst_idx;
    
    % For help on training function 'trainlm' type: help trainlm
    % For a list of all training functions type: help nntrain
    net.trainFcn = 'trainlm';  % Levenberg-Marquardt
    
    % Choose a Performance Function
    % For a list of all performance functions type: help nnperformance
    net.performFcn = 'mse';  % Mean squared error
    
    % Choose Plot Functions
    % For a list of all plot functions type: help nnplot
    % net.plotFcns = {'plotperform','plottrainstate','ploterrhist', ...
    %   'plotregression', 'plotfit'};
    
    
    % Train the Network
    [net,tr] = train(net,inputs,targets);
    
    % Test the Network
    outputs = net(inputs);
    errors = gsubtract(targets,outputs);
    performance = perform(net,targets,outputs);
    
    % Recalculate Training, Validation and Test Performance
    trainTargets = targets .* tr.trainMask{1};
    valTargets = targets  .* tr.valMask{1};
    testTargets = targets  .* tr.testMask{1};
    trainPerformance = perform(net,trainTargets,outputs);
    valPerformance = perform(net,valTargets,outputs);
    testPerformance = perform(net,testTargets,outputs);
    ANN_MSE(i)=testPerformance;
    ANN_pred=outputs(tst_idx)';
    ANN2_MSE(i)=mean((round(ANN_pred*100)./100-Y_tst).^2);
    % View the Network
    % view(net)
    
    % Plots
    % Uncomment these lines to enable various plots.
    %figure, plotperform(tr)
    %figure, plottrainstate(tr)
    %figure, plotfit(net,inputs,targets)
    %figure, plotregression(targets,outputs)
    %figure, ploterrhist(errors)
    
    %% SVR
    kernel=2; % rbf
    c_range=-4:4;
    g_range=(1:0.5:2)/D;
    d_range=2;
    e_range=-5:-1;
    [ model, CV_accuracy, best_param, coarse_score_grid, fine_score_grid ] =...
        grid_search_SVR( X_trn, Y_trn, kernel, c_range, g_range, d_range, e_range );
    [ SVR_pred, acc, val ]=svmpredict( Y_tst, X_tst, model );
    SVR_MSE(i)=mean((SVR_pred-Y_tst).^2);
    SVR2_MSE(i)=mean((round(SVR_pred*100)./100-Y_tst).^2);
    
    %% DENFIS
    parameters.trainmode=1;
    parameters.dthr=0.1;
    parameters.mlpepochs=50;
    parameters.mofn=10;
    
    Tresult=denfis([X_trn, Y_trn], parameters);
    Rresult=denfiss([X_tst, Y_tst], Tresult);
    
    DENFIS_MSE(i)=mean((Rresult.Out'-Y_tst).^2);
    DENFIS2_MSE(i)=mean((round(Rresult.Out'*100)./100-Y_tst).^2);
end