%%  清空环境变量
warning off;            % 关闭报警信息
close all;              % 关闭开启的图窗
% clear;                  % 清空变量
% clc;
addpath('OAVMD\');addpath('OAVMD\funs');
addpath('ModelForResid\');
addpath('intervalpreuseOA\');
addpath('ModelForTest\');
% poolobj = gcp('nocreate');
% if ~isempty(poolobj)
%     delete(poolobj);
% end

%% 读取数据 
data = readtable('data_decomposednew.csv'); % 读取数据 该数据由savestlresult.py进行了STL分解得到
[num_row,~] = size(data);       % 数据中共有多少样本

timestamp = data.timestamp;
seasonal = data.seasonal;
trend = data.trend; 
resid = data.residual;

%% 1.趋势项
% trend = data.trend;
max_train_length = length(trend) - 2;
num_train = min()
% num_hourpoint = 14;         % 标记每天有多少个时间点
num_train_set = 731;                %  此处264代表2020-09-21对应为330个数据中的第264个 731-2022 07 01                             
jieshuq = 13;
ceshiq = 400;

[Trend_test_ori, Trend_test_pre, Trend_train_err] = modelfortrend(trend, num_hourpoint, num_train_set, jieshuq, ceshiq);   % trend测试集预测结果
Trend_train_err(1:num_hourpoint,:) = [];

%% 2.季节项
% seasonal = data.seasonal;
num_train1test = num_train_set * num_hourpoint;

Seasonal_test_ori = seasonal(num_train1test + 1:end);
Seasonal_test_pre = Seasonal_test_ori;

%% 3.残差项
% resid = data.residual;

% 3.1 WOA优化VMD
% K:分解得到模态数 \ alpha:惩罚因子
% signal = resid;
% SearchAgents_no = 10;         % 种群数量，Number of search agents
% Max_iter = 10;                % 最大迭代次数，用于计算a,a2。用于zeros函数生成一个1*10的矩阵
% dim = 2;                      % 此例需要优化两个参数c和g位置向量，用于zeros函数生成一个1*2的矩阵
% lb = [5,100];                 % 参数取值下界，是一个向量，这个向量有2个元素
% ub = [10,10000];              % 参数取值上界，是一个向量，这个向量有2个元素
% [K_vmd,alpha] = OAVMD(signal,SearchAgents_no, Max_iter, dim, lb, ub);
% fprintf('The value of K for the VMD decomposition after optimization: %d    alpha为: %d\n', K_vmd, alpha);

K_vmd = 6;
alpha = 1.579887e+02;

fprintf('The value of K for the VMD decomposition after optimization: %d    alpha: %d\n', K_vmd, alpha);

[imf,resid_vmd,info] = vmd(resid,'NumIMF',K_vmd,'PenaltyFactor',alpha);
data_resid_aftvmd(:,1:6) = imf(:,:);
data_resid_aftvmd(:,K_vmd+1) = resid_vmd;

% 3.2 采用 BiTCN-BiGRU-Att 预测
[num_test,~] = size(Trend_test_ori);
[~,num_train] = size(Trend_train_err);

% Resid_test_ori = zeros(num_test,1);
Resid_test_pre = zeros(num_test,1);
Resid_train_err = zeros(num_train,1);

num_resid_buchang = 98;
num_train = num_train_set * num_hourpoint - 2;        % 训练集长度                %  此处264代表2020-09-21对应为330个数据中的第264个 731-2022 07 01                             
options = trainingOptions('adam', ...                 % 优化算法Adam  
    'MaxEpochs', 1, ...                            % 最大训练次数
    'GradientThreshold', 1, ...                       % 梯度阈值
    'InitialLearnRate', 0.01, ...         % 初始学习率
    'LearnRateSchedule', 'piecewise', ...             % 学习率调整
    'LearnRateDropPeriod', 60, ...        
    'LearnRateDropFactor',0.1, ...                    % 学习率调整因子
    'L2Regularization', 0.0001, ...         % 正则化参数
    'ExecutionEnvironment', 'gpu',...                 % 训练环境
    'Verbose', 0, ...                                 
    'Plots', 'none');                    % 画出曲线

for i=1:K_vmd+1
    [resid_test_ori, resid_test_pre, resid_train_err] = modelforresid(data_resid_aftvmd(:,i), num_resid_buchang, num_train_set, options, num_hourpoint);   % trend测试集预测结果
    % Resid_test_ori = Resid_test_ori + resid_test_ori;
    Resid_test_pre = Resid_test_pre + resid_test_pre;
    resid_train_err = resid_train_err';
    Resid_train_err = Resid_train_err + resid_train_err;
end

%% 4.合并全部结果点预测结果展示 并 区间预测结果（训练、测试）

% 4.1 计算训练集误差 train_errors
train_error = (Trend_train_err + Resid_train_err)';
test_ori = (seasonal(num_train1test+1:end) + trend(num_train1test+1:end) + resid(num_train1test+1:end))';
test_pre = (Trend_test_pre' + Seasonal_test_pre + Resid_test_pre)';
test_pre(test_pre<0) = 0;

% 确保所有变量是列向量且长度一致
test_ori = test_ori(:);
test_pre = test_pre(:);

% 提取测试集对应的原始数据行
test_indices = num_train1test + 1 : num_train1test + numel(test_ori);
test_timestamp = timestamp(test_indices);
test_seasonal = seasonal(test_indices);
test_trend = trend(test_indices);
test_residual = resid(test_indices);

% 再次检查所有变量长度是否一致
assert(length(test_timestamp) == length(test_seasonal) && ...
       length(test_seasonal) == length(test_trend) && ...
       length(test_trend) == length(test_residual) && ...
       length(test_residual) == length(test_ori) && ...
       length(test_ori) == length(test_pre), ...
       '变量长度不一致！');

% 创建带时间戳和原始列的表
datatestdata = table(test_timestamp, test_seasonal, test_trend, test_residual, test_ori, test_pre, ...
    'VariableNames', {'timestamp', 'seasonal', 'trend', 'residual', 'test_ori', 'test_pre'});
writetable(datatestdata, 'result\\testdata.csv');

% 提取训练误差对应的原始数据行
train_error = train_error(:); % 确保是列向量
train_error_length = numel(train_error);
train_timestamp = timestamp(1:train_error_length);
train_seasonal = seasonal(1:train_error_length);
train_trend = trend(1:train_error_length);
train_residual = resid(1:train_error_length);

% 检查训练误差相关变量长度
assert(length(train_timestamp) == length(train_seasonal) && ...
       length(train_seasonal) == length(train_trend) && ...
       length(train_trend) == length(train_residual) && ...
       length(train_residual) == length(train_error), ...
       '训练误差变量长度不一致！');

% 创建带时间戳和原始列的表
datatrainerr = table(train_timestamp, train_seasonal, train_trend, train_residual, train_error, ...
    'VariableNames', {'timestamp', 'seasonal', 'trend', 'residual', 'train_error'});
writetable(datatrainerr, 'result\\trainerr.csv');

%% 4.2 进行 KDE 区间预测实现 85% 90% 95%
testdata = readtable('result\\testdata.csv');
test_ori = testdata.test_ori;
test_pre = testdata.test_pre;
trainerr = readtable('result\\trainerr.csv');
train_error = trainerr.train_error;
calc_error(test_pre,test_ori)
% 
% % 进行 自适应组合 区间预测
E = 4.57;
B = 1000;     % Bootstrap样本数
Acombineintervalpre(test_ori, test_pre, train_error, E, B)

fprintf('\n')