% author: khangvn
% implement support vector regression for weekly interbank interest rate
% In this implementation, we consider only the technical indicators, not
% the macro economics (CPI, FDI ...), using SVMLIB

% % add svmlib library into MATLAB (necessary at first)
% addpath('D:\exim\lib\libsvm-3.18\matlab') 
% addpath('../tools')
clc;
close all;
% [val, text] = xlsread('data_lsLNH.xlsx',1); % load weekly data
load '../data/lsLNH.mat' %load data weekly

% ==========INIT PARAMS
% ===== create lagged X_t ====
P = 8; %lag delta_R
Q = 4; %lag gap
K = 1; %lag R

% ===== create Yt_h ====
h = 1;          % forecast horizon
ratio = 1/5;    % divide data to training and test set

epsilon = 2;

% data processsing
ls1W = val(:,1); rls1W = ls1W(2:end) - ls1W(1:end-1);
ls1M = val(:,2); rls1M = ls1M(2:end) - ls1M(1:end-1);
ls3M = val(:,3); rls3M = ls3M(2:end) - ls3M(1:end-1);
gap1M1W = ls1M - ls1W;
gap3M1M = ls3M - ls1M;

% Yt_h1 = ls1M(1+h:end) - ls1M(1:end-h);
% Yt_h = Yt_h1(2:end);

% first we consider the following predictors: EMA, delta_laisuat_1M, gap[1M]-gap[1W]
EMA4_1M =funcEMA(ls1M,4);
EMA12_1M = funcEMA(ls1M,12);
% Xt = [gap1M1W(2:end-h) rls1M(1:end-h) EMA4_1M(2:end-h) EMA12_1M(2:end-h)];
% Xt = [gap3M1M(2:end-h) rls1M(1:end-h) EMA4_1M(2:end-h)];

% ======create a list of lagged variables for regression========
% r_{t+h} = F ( r_t, r_{t-1} ... r_{t-K} )
lagDeltaR = lagmatrix(rls1M,[-h 0:1:P]);
lagR = lagmatrix(ls1M,0:K);
lagGap = lagmatrix(gap3M1M,0:1:Q);
tempX = [lagDeltaR, lagGap(2:end,:), lagR(2:end,:)];
YX = tempX(max([P,Q,K])+1:end-h,:);
datenew = dates(max([P,Q,K])+1:end-h);
Yt_h = YX(:,1);
Xt = YX(:,2:end);


[N, D] = size(Xt);

Ntest = ceil(N*ratio);
Ntrain = N - Ntest;

Xtrain_t = Xt(1:Ntrain,:);
Ytrain_t = Yt_h(1:Ntrain);

Xtest = Xt(Ntrain+1:end,:);
Ytest = Yt_h(Ntrain+1:end);


% model = svmtrain2(Yt_h(2:end), Xt, '-s 3 -c 1 -g 0.07');
% [predict_label, accuracy, dec_values] = svmpredict(heart_scale_label, heart_scale_inst, model); % test the training data

%% scaling data
% % simple scaling;
minimums = min(Xtrain_t, [], 1);
ranges = max(Xtrain_t, [], 1) - minimums;

minY = min(Ytrain_t, [], 1);
rangesY = max(Ytrain_t, [], 1) - minY;
autoscale = true;
if(autoscale)
    XtrainScale_t = (Xtrain_t - repmat(minimums, size(Xtrain_t, 1), 1)) ./ repmat(ranges, size(Xtrain_t, 1), 1);
    YtrainScale_t = (Ytrain_t - repmat(minY, size(Ytrain_t, 1), 1)) ./ repmat(rangesY, size(Ytrain_t, 1), 1);

    %note: the scaling is the same way, but we need to stored the minimums and ranges in the training data
    Xscale_test = (Xtest - repmat(minimums, size(Xtest, 1), 1)) ./ repmat(ranges, size(Xtest, 1), 1);
    Yscale_test = (Ytest - repmat(minY, size(Ytest, 1), 1)) ./ repmat(rangesY, size(Ytest, 1), 1);
else
    XtrainScale_t = Xtrain_t;
    YtrainScale_t = Ytrain_t;
    Xscale_test = Xtest;
    Yscale_test = Ytest;
end

%%
% ###################################################################
% From here on, we do 5-fold cross validation to get the best
% ###################################################################
rng(1);
[bestLog2c,bestLog2g,bestcv] = autogrid(YtrainScale_t,XtrainScale_t, [' -s 3 -p ' epsilon])

%%
% performance of test set
% bestLog2g = 8; bestLog2c = -3;
model = svmtrain2(YtrainScale_t, XtrainScale_t, sprintf('-s 3 -c %f -g %f p %f',[2^bestLog2c,2^bestLog2g,epsilon]));

% MSE for training samples
[y_hat1, Acc1, projection1] = svmpredict(YtrainScale_t, XtrainScale_t, model);
MSE_Train = mean((y_hat1-YtrainScale_t).^2);
NRMS_Train = sqrt(MSE_Train) / std(YtrainScale_t);
Rsquare_Train = 1 - sum((y_hat1-YtrainScale_t).^2)./ sum((YtrainScale_t - mean(YtrainScale_t)).^2);

y_hat = zeros(Ntest+1-h,1);
for i = Ntrain : N-h
    Ytrain_temp = [YtrainScale_t;Yscale_test(1:i-Ntrain)];
    Xtrain_temp = [XtrainScale_t;Xscale_test(1:i-Ntrain,:)];
    modeltemp = svmtrain2(Ytrain_temp, Xtrain_temp, sprintf('-q -s 3 -c %f -g %f',[2^bestLog2c,2^bestLog2g]));
    [y_hat(i-Ntrain+1), Acc, projection] = svmpredict(Yscale_test(i+h-Ntrain), Xscale_test(i+h-Ntrain,:), modeltemp);
end

% MSE for test samples

MSE_Test = mean((y_hat-Yscale_test).^2);
NRMS_Test = sqrt(MSE_Test) / std(Yscale_test);
Rsquare_Test = 1 - (sum((y_hat-Yscale_test).^2)./ sum( (Yscale_test - mean(Yscale_test) ).^2));


[MSE_Test, NRMS_Test, Rsquare_Test; MSE_Train, NRMS_Train,Rsquare_Train]
% %%
% X = 0:0.01:1;
% X = X';
% y = ones(length(X), 1);
% y_est = svmpredict(y, X, model);
% 
% figure(4)
% hold all;
% plot(XtrainScale_t, YtrainScale_t, 'ko')
% plot(Xscale_test, Yscale_test, 'kx')
% plot(X, y_est, 'r-');
% 
% legend('Training','Test','Model')
% % y1 = max([YtrainScale_t; Yscale_test]);
% % y2 = min([YtrainScale_t; Yscale_test]);
% % axis([0 1 y2 y1]);
% axis tight
% hold off
