classdef MyModel< handle
    properties (Constant)
        %% 定义回归模型
        %%  mypool = parpool(3)   ;  %%delete(gcp('nocreate')).
%         bParallel = true  %% RF 的并行选项
        bParallel = false %% RF 的并行选项
%         bRunTime = true   %% RF 的运行时间显示
        bRunTime = false  %% RF 的运行时间显示
        
    end
    methods(Static = true)
        function ret = DoOneModel(x_data, y_data, modelType, bShow, arg)
            kfold = arg.cc.NumTestSets;
            switch modelType
                case 1
                    ret = MyModel.DoEachRegression(x_data, y_data, arg);
                case 2                   
                    ret = MyModel.DoEachSVR(x_data, y_data, bShow, kfold, arg.cc, arg);%%论文没用到，代码没有cv，用到需检查
                case 3
                    ret = MyModel.DoEachRF(x_data, y_data, bShow, kfold, arg.cc, arg);
                case 4
                    ret = MyModel.DoEachLSB(x_data, y_data, bShow, kfold, arg.cc, arg);
                otherwise
                    disp('Error in DoOneModel..............');
            end
        end
        
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        function ret = DoEachRFwithBest(X, y, bShow)
            lenY = size(y, 2);
            %             Mdl = cell(lenY, 1);
            yfits = zeros(size(y));
            for i = 1 : lenY
                disp(i)
                yy = y(:, i);
                
                XX = X;
                kk = 1:size(X, 2);
                while true
                    tree = TreeBagger(150, XX, yy, 'Method', 'regression', ...
                        'OOBPredictorImportance','On', 'MinLeafSize',3);
                    %                     rmse = rms(oobError(tree))
                    rmse = rms(oobError(tree))
                    
                    idxvar = find(tree.OOBPermutedPredictorDeltaError>0);
                    kk = kk(idxvar);
                    if length(idxvar) == size(XX, 2)
                        disp(kk);
                        yfits(:, i) = predict(tree, XX);
                        break;
                    end
                    XX =  XX(:, idxvar);
                end
            end
            ret = Helper.CalcAllErrors(y, yfits, bShow);
        end
        
        function ret = DoEachRFwithBest2(X, y, bShow)
            lenY = size(y, 2);
            %             Mdl = cell(lenY, 1);
            yfits = zeros(size(y));
            for i = 1 : lenY
                disp(i)
                yy = y(:, i);
                
                XX = X;
                kk = 1:size(X, 2);
                rmse = [];
                trees = {};
                jj = 1;
                kkSave = {};
                idxvar =  kk;
                while true
                    tree = TreeBagger(200, XX, yy, 'Method', 'regression', ...
                        'OOBPredictorImportance','On', 'MinLeafSize',3);
                    %                     rmse = rms(oobError(tree))
                    rmse1 = rms(oobError(tree));
                    rmse = [rmse; rmse1];
                    
                    trees{jj} = tree;
                    kkSave{jj} = kk;
                    jj = jj+1;
                    
                    if length(idxvar) == size(XX, 2)
                        [mmm, ins] = min(rmse)
                        kk = kkSave{ins};
                        tree = trees{ins};
                        XX = X(:, kk);
                        disp(kk);
                        yfits(:, i) = predict(tree, XX);
                        break;
                    end
                    
                    idxvar = find(tree.OOBPermutedPredictorDeltaError>0);
                    kk = kk(idxvar);
                    XX =  XX(:, idxvar);
                end
            end
            ret = Helper.CalcAllErrors(y, yfits, bShow);
        end
        
        
        %% LSBoost 2023-08-09 最小二乘提升
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        function ret = DoEachLSB(X, y, bShow, kfold, cc, arg)
            lenY = size(y, 2);
            yfits = zeros(size(y));
% % %             tic
            feaFlag = exist('arg', 'var'); %%2024-02-08 增加特征选择                
            if feaFlag == true
                feaFlag = isfield(arg, 'feaType');%%2024-02-10增加的特征选择！
            end
            rng(3333)
            t = templateTree('MinLeafSize', 5);
%             t = templateTree('MinLeafSize', 3); %%2024-07-29 改
            for i = 1 : lenY
                if kfold == 0
                    yy = y(:, i);
                    tree = fitrensemble(X, yy);
                    
                    yfits(:, i) = predict(tree, X);
                else
                    if  feaFlag == true %%2024-02-08 增加特征选择
                        metal = arg.metal;
                        if metal ~= i %%只针对一种金属元素进行操作
                            continue;
                        end
                    end                    

                    for kk = 1 : kfold
                        trIdx = cc.training(kk);
                        teIdx = cc.test(kk);
                        ytrain = y(trIdx, i);
                        xtest = X(teIdx, :);
                        xtrain = X(trIdx, :);
                        
                        if feaFlag == true %%2024-02-08 增加波段特征选择，后面的调用参数要设为1
                            kopt = FeaSelect.GetOptimalFeature(xtrain, ytrain, arg.wave, arg.feaType, 1, arg.bShow);
                            xtrain = xtrain(:, kopt);
                            xtest = xtest(:, kopt);
                        end
                        
                        tree = fitrensemble(xtrain, ytrain, ...
                                            'Method', 'LSBoost', ...
                                            'Learners', t,  'LearnRate', 0.5);
                        %         'LearnRate', 0.5, ...         , ...,'Options',paroptions
                        %                                         'NumLearningCycles', 150);
                        %                        tree = fitrensemble(xtrain, ytrain, 'Method', 'LSBoost', ...
                        %                                     'NumLearningCycles', 100, 'OptimizeHyperparameters','auto');
                        %                        tree = fitrensemble(xtrain, ytrain, 'Method', 'LSBoost', ...
                        %                                    'OptimizeHyperparameters', ...
                        %                                    {'NumLearningCycles', 'LearnRate', 'MinLeafSize'});
                        %%OptimizeHyperparameters','auto',                        
                        
                        yfits(teIdx, i) = predict(tree, xtest);
                    end
                end                
            end
            ret = Helper.CalcAllErrors(y, yfits, bShow);
% %             toc
        end
        
        
        
        %% TreeBagger
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        function ret = DoEachRF(X, y, bShow, kfold, cc, arg)
            if MyModel.bParallel == true
                paroptions = statset('UseParallel',true);
            else
                paroptions = statset();
            end
            if MyModel.bRunTime == true
                tic
            end
            
            lenY = size(y, 2);
            yfits = zeros(size(y));
            
%             rng('default')
            rng(8765)%%这个效果 RF+MSC的效果最好
            feaFlag = exist('arg', 'var'); %%2024-02-08 增加特征选择                
            if feaFlag == true
                feaFlag = isfield(arg, 'feaType');%%2024-02-10增加的特征选择！
            end
            for i = 1 : lenY
                if kfold == 0
                    yy = y(:, i);
% %                     tree = TreeBagger(100, X, yy, 'Method', 'regression', ...
% %                         'OOBPredictorImportance','On', 'MinLeafSize',3);
                    tree = TreeBagger(100, X, yy, 'Method', 'regression', ...
                                      'MinLeafSize',3,'Options',paroptions);                   
% %                     tree = TreeBagger(100, X, yy, 'Method', 'regression', 'Options',paroptions);
                    yfits(:, i) = predict(tree, X);
                else
                    if  feaFlag == true %%2024-02-08 增加特征选择
                        metal = arg.metal;
                        if metal ~= i %%只针对一种金属元素进行操作
                            continue;
                        end
                    end
                    
                    for kk = 1 : kfold
                        trIdx  = cc.training(kk);
                        teIdx  = cc.test(kk);
                        ytrain = y(trIdx, i);
                        xtrain = X(trIdx, :);
                        xtest  = X(teIdx, :);
                        
                        if feaFlag == true %%2024-02-08 增加波段特征选择，后面的调用参数要设为1
                            kopt = FeaSelect.GetOptimalFeature(xtrain, ytrain, arg.wave, arg.feaType, 1, arg.bShow);
                            xtrain = xtrain(:, kopt);
                            xtest = xtest(:, kopt);
                        end
                        %                        tree = TreeBagger(100, xtrain, ytrain, 'Method', 'regression', ...
                        %                                         'OOBPredictorImportance','On', 'MinLeafSize',3);
                        %
% %                         tree = TreeBagger(100, xtrain, ytrain, 'Method', 'regression', ...
% %                             'MinLeafSize',3);     
%                         tree = TreeBagger(100, xtrain, ytrain, 'Method', 'regression', 'MinLeafSize',3, 'Options', paroptions);   
                        tree = TreeBagger(120, xtrain, ytrain, ...
                                         'Method', 'regression',...
                                         'MinLeafSize', 3, ...
                                         'Options', paroptions);   
                        
                        yfits(teIdx, i) = predict(tree, xtest);
                    end
                end                
            end
            ret = Helper.CalcAllErrors(y, yfits, bShow);
            if MyModel.bRunTime == true
               toc 
            end           
        end
        
        
        %%fitrtree 决策树
        function ret = DoEachTree(X, y, bShow)
            lenY = size(y, 2);
            yfits = zeros(size(y));
            for i = 1 : lenY
                yy = y(:, i);
                %                  tree = fitrtree(X, yy, 'CrossVal', 'on');
                tree = fitrtree(X, yy);
                yfits(:, i) = predict(tree, X);
            end
            ret = Helper.CalcAllErrors(y, yfits, bShow);
        end
        
        %%执行SVR
        function [ret, yfits, mse] = DoEachSVR(X, y, bShow, type, kfold, cc, arg)
            lenY = size(y, 2);
            
            Mdl = cell(lenY, 1);            
            yfits = zeros(size(y));
            mse = zeros(lenY, 1);
            
            feaFlag = exist('arg', 'var'); %%2024-02-08 增加特征选择
          
            for i = 1 : lenY    
                if  feaFlag == true %2024-02-08 增加波段特征选择，这里不对，应该针对训练集进行训练，但考虑到论文并没有用到SVR，就算了
                    metal = arg.metal;
                    if metal ~= i %%只针对一种金属元素进行操作
                        continue;
                    end
                    kopt = FeaSelect.GetOptimalWave(X, y, arg.Wave, arg.feaType, metal);
                    X = X(:, kopt);
                end               
                
                yy = y(:, i);      
                
                if type == 1     
                    if kfold > 1
                        models = fitrsvm(X, yy, 'Kfold', kfold);
                        mse(i) = models.kfoldLoss;
                    else
                        Mdl{i} = fitrsvm(X, yy);
                    end
                elseif type == 2                    
                    if kfold > 1
                        %                        mm = fitrsvm(X, yy,'Standardize',true,'KernelFunction','gaussian', 'Kfold', kfold);
                        models = fitrsvm(X, yy,'Standardize',true,'KernelFunction','gaussian', 'CVPartition' , cc);
                        mse(i) = models.kfoldLoss;
                    else
                        Mdl{i} = fitrsvm(X, yy,'Standardize',true,'KernelFunction','gaussian');
                    end
                    
                elseif type == 3                    
                    if kfold > 1
                        models = fitrsvm(X, yy, 'KernelFunction', ...
                            'rbf','KernelScale','auto', 'Standardize',true, 'CVPartition' , cc);
                        mse(i) = models.kfoldLoss;
                    else
                        Mdl{i} = fitrsvm(X, yy, 'KernelFunction', ...
                                'rbf','KernelScale','auto', 'Standardize',true);
                    end
                elseif type == 4                    
                    if kfold > 1
                        models = fitrsvm(X, yy, 'Standardize',true,'KernelFunction',...
                            'polynomial', 'PolynomialOrder', 2, 'CVPartition' , cc);
                        mse(i) = models.kfoldLoss;
                    else
                        Mdl{i} = fitrsvm(X, yy, 'Standardize',true,'KernelFunction',...
                            'polynomial', 'PolynomialOrder', 2);
                    end
                elseif type == 5

                    %% PolynomialOrder >= 3的时候 R2超过1
                    if kfold > 1
                        models = fitrsvm(X, yy, 'Standardize',true,'KernelFunction',...
                            'polynomial', 'PolynomialOrder', 3, 'CVPartition' , cc);
                        mse(i) = models.kfoldLoss;
                    else
                        Mdl{i} = fitrsvm(X, yy, 'Standardize',true,'KernelFunction',...
                            'polynomial', 'PolynomialOrder', 3 );
                    end
                end
                if kfold == 1
                    yfits(:, i) = predict(Mdl{i}, X);
                else
                    for kk = 1 : kfold
                        mm2 = models.Trained{kk};%%取出不同的训练模型，进行后续预测
                        teIdx = cc.test(kk);
                        XX2 = X(teIdx, :);
                        yfits(teIdx, i) = predict(mm2, XX2);
                    end
                end
                %                 disp([num2str(i), ':__支持向量个数:', num2str(sum(Mdl{i}.IsSupportVector))])
            end
            
            ret = Helper.CalcAllErrors(y, yfits, bShow);
        end
        
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        %%% 采用PLSRegression， X，24*204， y，24*6，整体PLSR，不分金属
        function [ret, beta, PCTVAR, MSE] = DoPLSR(X, y, arg)
            ncomp = arg.ncomp;
            bShow = arg.bShow;
            cv = arg.cv;
            cc = arg.cc;
            
            if cv == 0
                [~,~,~,~, beta, PCTVAR, MSE] = plsregress(X, y, ncomp);
                
                yfits = [ones(size(X,1),1) X] * beta;
            else
                yfits = zeros(size(y));
                
                for kk = 1 : cv
                    trIdx = cc.training(kk);
                    teIdx = cc.test(kk);
                    ytrain = y(trIdx, :);
                    xtrain = X(trIdx, :);
                    xtest = X(teIdx, :);
                    
                    
                    
                    [~,~,~,~, beta, PCTVAR, MSE] = plsregress(xtrain, ytrain, ncomp);
                    
                    
                    yfits(teIdx, :) = [ones(size(xtest,1),1), xtest] * beta;
                end
                
                %                 rng(202311)
                %                 [~,~,~,~, beta, PCTVAR, MSE] = plsregress(X, y, ncomp, 'CV', cv);
                [~,~,~,~, beta, PCTVAR, MSE] = plsregress(X, y, ncomp, 'CV', cc);
                % %                 figure
                % %                 plot(0:ncomp,MSE(2,:),'b-o');
                % %                 xlabel('Number of components');
                % %                 ylabel('Estimated Mean Squared Prediction Error');
                % %                 legend('PLSR','location','NE');
            end
            ret = Helper.CalcAllErrors(y, yfits, arg.bShow);
            % %             beta
            
            % %             figure
            % %             plot(1:ncomp,cumsum(100*PCTVAR(2,:)),'-bo');
            % %             xlabel('Number of PLS components');
            % %             ylabel('Percent Variance Explained in Y');
            % %             if bShow == true
            % %                 yfit = [ones(size(X,1),1) X] * beta;
            % %                 Helper.PlotStemErrors(y, yfit)
            % %             end
            %             disp(MSE)
        end
        %% 对每一个属性进行 PLS，
        function [ret, mse] = DoEachRegression(X, y, arg)
            ncomp = arg.ncomp;%%这个是回归系数beta个数, 不是波段数
            bShow = arg.bShow;
            cv = arg.cv;
            cc = arg.cc;
            bEach = arg.bEach;
            
            feaFlag = isfield(arg, 'feaType');%%2024-02-10增加的特征选择！
            
            lenY = size(y, 2);
            bands = size(X, 2);
            beta = zeros(bands+1, lenY);
            yfits = zeros(size(y));
            %             rng(202311)
            mse = zeros(lenY, ncomp+1);
            for i = 1 : lenY
                %                disp(['检测分量：', num2str(i)])
                yy = y(:, i);
                if cv == 0
                    [~,~,~,~, bb, PCTVAR] = plsregress(X, yy, ncomp);
                    XX = [ones(size(X,1),1) X];
                    
                    yfit = XX *beta;
                else
                    if  feaFlag == true %%2024-02-08 增加特征选择
                        metal = arg.metal;
                        if metal ~= i %%只针对一种金属元素进行操作
                            continue;
                        end
                    end
                    
                    for kk = 1 : cv
                        trIdx = cc.training(kk);
                        teIdx = cc.test(kk);
                        ytrain = y(trIdx, i);
                        xtrain = X(trIdx, :);
                        xtest = X(teIdx, :);
                        
                        kklen = ncomp;
                        if feaFlag == true %%2024-02-08 增加波段特征选择，后面的调用参数要设为1
                            kopt = FeaSelect.GetOptimalFeature(xtrain, ytrain, ...
                                                               arg.wave, arg.feaType, ...
                                                               1, arg.bShow);
                            xtrain = xtrain(:, kopt);
                            xtest = xtest(:, kopt);
                            
                            if ncomp > length(kopt)
                                kklen = length(kopt);
                            end
                        end
                        
                        [~,~,~,~, beta2] = plsregress(xtrain, ytrain, kklen);
                        yfits(teIdx, i) = [ones(size(xtest,1),1), xtest] * beta2;
                    end
                    
                    %                     [~,~,~,~, bb, PCTVAR, MSE] = plsregress(X, yy, ncomp, 'CV', cv);
                    [~,~,~,~, bb, PCTVAR, MSE] = plsregress(X, yy, ncomp, 'CV', cc);%%2023-08-09
                    mse(i, :) = MSE(2, :);
                    % %                     figure
                    % %                     plot(0:ncomp,MSE(2,:),'b-o');
                    % %                     xlabel('Number of components');
                    % %                     ylabel('Estimated Mean Squared Prediction Error');
                    % %                     legend('PLSR','location','NE');
                end
                beta(:, i) = bb;
                
                if bEach == true%% 显示逐个因变量的 PLS情况
                    Helper.PlotPLSRvar(PCTVAR, i==1);
                end
            end
            if bEach == true
                Helper.AddLegends()
            end
            
            ret = Helper.CalcAllErrors(y, yfits, bShow);
        end
        
        %% 对每一个属性进行 MLS，
        function ret = DoEachMLSRegression(X, y, ncomp, kopt, bShow)
            lenY = size(y, 2);
            
            XX = X(:, kopt(1:ncomp));
            bands = size(XX, 2);
            beta = zeros(bands+1, lenY);
            
            XXX = [ones(size(XX, 1),1), XX];
            
            for i = 1 : lenY
                %                disp(['检测分量：', num2str(i)])
                yy = y(:, i);
                beta(:, i) = regress(yy, XXX);
            end
            
            yfit = XXX *beta;
            
            ret = Helper.CalcAllErrors(y, yfit, bShow);
        end
        

        
        %% 对每一个属性进行 MLS，
        function ret = DoEachMLSRegressionGroup(X, y, fsss, bShow, ncomp)
            lenY = size(y, 2);
            yfit = zeros(size(y));
            for i = 1 : lenY
                if nargin <= 4
                    fs = fsss{i};
                else
                    if ncomp == -1
                        fs = fsss{i};
                    else
                        if ncomp< length(fsss{i})
                            fs = fsss{i}(1:ncomp);
                        else
                            fs = fsss{i};
                        end
                    end
                end
                
                yy = y(:, i);
                XXX = [ones(size(X, 1),1), X(:, fs)];
                
                beta = regress(yy, XXX);
                
                yfit(:, i) = XXX * beta;
            end
            
            
            ret = Helper.CalcAllErrors(y, yfit, bShow);
        end
        
        %%%lasso回归
        function ret = DoEachLasso(X, y, bShow)
            lenY = size(y, 2);
            yfit = zeros(size(y));
            for i = 1 : lenY
                yy = y(:, i);
                %                 [B, FitInfo] = lasso(X, yy);
                %                 idxLambda1SE = FitInfo.Index1SE;
                %                 coef = B(:,idxLambda1SE);
                %                 coef0 = FitInfo.Intercept(idxLambda1SE);
                B = lasso(X, yy);
                
                
                
                yfit(:, i)  = X*B;
            end
            ret = Helper.CalcAllErrors(y, yfit, bShow);
        end
        
        
% % % % % % % % % % % % % % % % %         
        function CalcOneErrorWithSelects(X, y, fsss)
            lenY = size(y, 2);
            yfit = zeros(size(y));
            
            
            for i = 1 : lenY
                yy = y(:, i);
                ff = fsss{i};
                xx = xx(:, ff);
                
                %                 
            end 
        end
    end
    
end