% author: khangvn
% implement support vector regression for weekly interbank interest rate
% In this implementation, we consider only the technical indicators, not
% the macro economics (CPI, FDI ...), using SVMLIB

% % add svmlib library into MATLAB (necessary at first)
% addpath('D:\exim\lib\libsvm-3.18\matlab') 
% addpath('../tools')
clc;
close all;
% [val, text] = xlsread('data_lsLNH.xlsx',1); % load weekly data
load '../data/lsLNH.mat' %load data weekly

% ==========INIT PARAMS
% ===== create lagged X_t ====
P = 8; %lag delta_R
Q = 4; %lag gap
K = 1; %lag R

% ===== create Yt_h ====
h = 1;          % forecast horizon
ratio = 1/5;    % divide data to training and test set


% data processsing
ls1W = val(:,1); rls1W = ls1W(2:end) - ls1W(1:end-1);
ls1M = val(:,2); rls1M = ls1M(2:end) - ls1M(1:end-1);
ls3M = val(:,3); rls3M = ls3M(2:end) - ls3M(1:end-1);
gap1M1W = ls1M - ls1W;
gap3M1M = ls3M - ls1M;

% Yt_h1 = ls1M(1+h:end) - ls1M(1:end-h);
% Yt_h = Yt_h1(2:end);

% first we consider the following predictors: EMA, delta_laisuat_1M, gap[1M]-gap[1W]
EMA4_1M =funcEMA(ls1M,4);
EMA12_1M = funcEMA(ls1M,12);
% Xt = [gap1M1W(2:end-h) rls1M(1:end-h) EMA4_1M(2:end-h) EMA12_1M(2:end-h)];
% Xt = [gap3M1M(2:end-h) rls1M(1:end-h) EMA4_1M(2:end-h)];

% ======create a list of lagged variables for regression========
% r_{t+h} = F ( r_t, r_{t-1} ... r_{t-K} )
lagDeltaR = lagmatrix(rls1M,[-h 0:1:P]);
lagR = lagmatrix(ls1M,0:K);
lagGap = lagmatrix(gap3M1M,0:1:Q);
tempX = [lagDeltaR(:,1), (1:size(lagDeltaR,1))', lagDeltaR(:,2:end), lagGap(2:end,:), lagR(2:end,:)];
YX = tempX(max([P,Q,K])+1:end-h,:);
datenew = dates(max([P,Q,K])+1:end-h);
Yt_h = ((YX(:,1)>0)+0).*2 - 1; %only label 1 and -1
Xt = YX(:,2:end);

[N, D] = size(Xt);
Ntest = ceil(N*ratio);
Ntrain = N - Ntest;


Xtrain_t = Xt(1:Ntrain,:);
Ytrain_t = Yt_h(1:Ntrain);

Xtest = Xt(Ntrain+1:end,:);
Ytest = Yt_h(Ntrain+1:end);


% model = svmtrain2(Yt_h(2:end), Xt, '-s 3 -c 1 -g 0.07');
% [predict_label, accuracy, dec_values] = svmpredict(heart_scale_label, heart_scale_inst, model); % test the training data

%% scaling data
% % simple scaling;
minimums = min(Xtrain_t, [], 1);
ranges = max(Xtrain_t, [], 1) - minimums;

% minY = min(Ytrain_t, [], 1);
% rangesY = max(Ytrain_t, [], 1) - minY;

autoscale = true;
if(autoscale)
    XtrainScale_t = (Xtrain_t - repmat(minimums, size(Xtrain_t, 1), 1)) ./ repmat(ranges, size(Xtrain_t, 1), 1);
    % YtrainScale_t = (Ytrain_t - repmat(minY, size(Ytrain_t, 1), 1)) ./ repmat(rangesY, size(Ytrain_t, 1), 1);

    %note: the scaling is the same way, but we need to stored the minimums and ranges in the training data    
    Xscale_test = (Xtest - repmat(minimums, size(Xtest, 1), 1)) ./ repmat(ranges, size(Xtest, 1), 1);
    % Yscale_test = (Ytest - repmat(minY, size(Ytest, 1), 1)) ./ repmat(rangesY, size(Ytest, 1), 1);
    Yscale_test = Ytest;
    YtrainScale_t = Ytrain_t;
else
    XtrainScale_t = Xtrain_t;
    YtrainScale_t = Ytrain_t;
    Xscale_test = Xtest;    
    Yscale_test = Ytest;

end

%%
% ###################################################################
% From here on, we do 5-fold cross validation to get the best
% ###################################################################
rng(1);

[bestLog2c,bestLog2g,bestcv] = autogridCV(YtrainScale_t,XtrainScale_t);

%%
% ################################################################
% Test phase
% Use the parameters to classify the test set
% ################################################################
param = ['-q -c ', num2str(2^bestLog2c), ' -g ', num2str(2^bestLog2g), ' -b 1'];
bestModel = svmtrain2(YtrainScale_t, XtrainScale_t, sprintf('-b 1 -c %f -g %f',[2^bestLog2c,2^bestLog2g]));
% [predict_label, accuracy, prob_values] = svmpredict(Yscale_test, Xscale_test, bestModel, '-b 1'); % test the training data
[predict_label, evaluation_result, decision_values] = do_binary_predict(Yscale_test, Xscale_test, bestModel,'-b 1');

% ================================
% ===== Showing the results ======
% ================================

% Assign color for each class
% colorList = generateColorList(2); % This is my own way to assign the color...don't worry about it
colorList = prism(100);

% true (ground truth) class
trueClassIndex = zeros(N,1);
trueClassIndex(Yt_h==1) = 1;
trueClassIndex(Yt_h==-1) = 2;
colorTrueClass = colorList(trueClassIndex,:);
% result Class
resultClassIndex = zeros(length(predict_label),1);
resultClassIndex(predict_label==1) = 1;
resultClassIndex(predict_label==-1) = 2;
colorResultClass = colorList(resultClassIndex,:);

% Reduce the dimension from 13D to 2D
distanceMatrix = pdist(Xt,'euclidean');
newCoor = mdscale(distanceMatrix,2);

% Plot the whole data set
x = newCoor(:,1);
y = newCoor(:,2);
patchSize = 30; %max(prob_values,[],2);
colorTrueClassPlot = colorTrueClass;
figure; scatter(x,y,patchSize,colorTrueClassPlot,'filled');
title('whole data set');

testIndex = zeros(N,1); testIndex(Ntrain+1:N) = 1;
trainIndex = zeros(N,1); trainIndex(1:Ntrain) = 1;
% Plot the test data
x = newCoor(testIndex==1,1);
y = newCoor(testIndex==1,2);
patchSize = 80*max(prob_values,[],2);
colorTrueClassPlot = colorTrueClass(testIndex==1,:);
figure; hold on;
scatter(x,y,2*patchSize,colorTrueClassPlot,'o','filled');
scatter(x,y,patchSize,colorResultClass,'o','filled');
% Plot the training set
x = newCoor(trainIndex==1,1);
y = newCoor(trainIndex==1,2);
patchSize = 30;
colorTrueClassPlot = colorTrueClass(trainIndex==1,:);
scatter(x,y,patchSize,colorTrueClassPlot,'o');
title('classification results');
%%
p11 = sum((predict_label+1)./2.*((Yscale_test+1)/2))./sum((Yscale_test+1)/2);
p00 = sum(~((predict_label+1)/2).*(~((Yscale_test+1)/2)))./sum(~((Yscale_test+1)/2));
p = [(p11+p00)/2, p11, p00]