classdef MyModel< handle
    properties (Constant)
        %% 定义回归模型
        %%  mypool = parpool(3)   ;  %%delete(gcp('nocreate')).
        bParallel = true  %% RF 的并行选项
%         bParallel = false %% RF 的并行选项
%         bRunTime = true   %% RF 的运行时间显示
        bRunTime = false  %% RF 的运行时间显示
        
    end
    methods(Static = true)
        %% 这个用不了
        function [ret1, ret2] = DoOneModel(x_train, y_train, x_test, y_test, modelType, arg)
            switch modelType
                case 1
                    [ret1, ret2] = MyModel.DoEachRegression(x_train, y_train, x_test, y_test, arg);
                case 2                   
                    [ret1, ret2] = MyModel.DoEachSVR(x_train, y_train, x_test, y_test, arg);%%论文没用到，代码没有cv，用到需检查
                case 3
                    [ret1, ret2] = MyModel.DoEachRF(x_train, y_train, x_test, y_test, arg);
                case 4                    
                    [ret1, ret2] = MyModel.DoEachLSB(x_train, y_train, x_test, y_test, arg);
                case 5
                    [ret1, ret2] = MyModel.DoEachANN(x_train, y_train, x_test, y_test, arg);
                otherwise
                    disp('Error in DoOneModel..............');
            end
        end
        
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        function ret = DoEachRFwithBest(X, y, bShow)
            lenY = size(y, 2);
            %             Mdl = cell(lenY, 1);
            yfits = zeros(size(y));
            for i = 1 : lenY
                disp(i)
                yy = y(:, i);
                
                XX = X;
                kk = 1:size(X, 2);
                while true
                    tree = TreeBagger(150, XX, yy, 'Method', 'regression', ...
                        'OOBPredictorImportance','On', 'MinLeafSize',3);
                    %                     rmse = rms(oobError(tree))
                    rmse = rms(oobError(tree))
                    
                    idxvar = find(tree.OOBPermutedPredictorDeltaError>0);
                    kk = kk(idxvar);
                    if length(idxvar) == size(XX, 2)
                        disp(kk);
                        yfits(:, i) = predict(tree, XX);
                        break;
                    end
                    XX =  XX(:, idxvar);
                end
            end
            ret = Helper.CalcAllErrors(y, yfits, bShow);
        end
        
        function ret = DoEachRFwithBest2(X, y, bShow)
            lenY = size(y, 2);
            %             Mdl = cell(lenY, 1);
            yfits = zeros(size(y));
            for i = 1 : lenY
                disp(i)
                yy = y(:, i);
                
                XX = X;
                kk = 1:size(X, 2);
                rmse = [];
                trees = {};
                jj = 1;
                kkSave = {};
                idxvar =  kk;
                while true
                    tree = TreeBagger(200, XX, yy, 'Method', 'regression', ...
                        'OOBPredictorImportance','On', 'MinLeafSize',3);
                    %                     rmse = rms(oobError(tree))
                    rmse1 = rms(oobError(tree));
                    rmse = [rmse; rmse1];
                    
                    trees{jj} = tree;
                    kkSave{jj} = kk;
                    jj = jj+1;
                    
                    if length(idxvar) == size(XX, 2)
                        [mmm, ins] = min(rmse)
                        kk = kkSave{ins};
                        tree = trees{ins};
                        XX = X(:, kk);
                        disp(kk);
                        yfits(:, i) = predict(tree, XX);
                        break;
                    end
                    
                    idxvar = find(tree.OOBPermutedPredictorDeltaError>0);
                    kk = kk(idxvar);
                    XX =  XX(:, idxvar);
                end
            end
            ret = Helper.CalcAllErrors(y, yfits, bShow);
        end
        
        
        %% LSBoost 2023-08-09 最小二乘提升
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        function [ret1, ret2] = DoEachLSB(x_train, y_train, x_test, y_test, args)
            lenY = size(y_train, 2);
            y_train_hat = zeros(size(y_train));
            y_test_hat = zeros(size(y_test));
            
            bShow = args.bShow;
%
            t = templateTree('MinLeafSize', 5);
%             t = templateTree('MinLeafSize', 3); %%2024-07-29 改
            for i = 1 : lenY       
                if iscell(x_train) == true
                    xx_train = x_train{i};
                else
                    xx_train = x_train;
                end                                
                
                yy = y_train(:, i);
                tree = fitrensemble(xx_train, yy);
                
                y_train_hat(:, i) = predict(tree, xx_train);
                
                if iscell(x_test) == true
                    y_test_hat(:, i) = predict(tree, x_test{i});
                else
                    y_test_hat(:, i) = predict(tree, x_test);
                end
            end
            ret1 = Helper.CalcAllErrors(y_train, y_train_hat, bShow);
            ret2 = Helper.CalcAllErrors(y_test, y_test_hat, bShow);
        end
        
        %%2025-07-30 神经网络  x_train~178*210(样本数量*波段数)
        function [ret1, ret2] = DoEachANN(x_train, y_train, x_test, y_test, args)
            lenY = size(y_train, 2);            
            y_train_hat = zeros(size(y_train));
            y_test_hat = zeros(size(y_test));
            
            bShow = args.bShow;%
            bUseNormal = args.bUseNormal;            
            
            bParallel = false;%%服务器没有作用feature('numCores') >10;
            
            for i = 1 : lenY               
                rng(args.seed)
                
                if iscell(x_train) == true
                    xx_train = x_train{i}';%210*178
                else
                    xx_train = x_train';%210*178
                end
                if iscell(x_test) == true
                    xx_test  = x_test{i}';
                else
                    xx_test  = x_test';
                end
                lenX = size(xx_train, 1);
                
                if bUseNormal == true %%2025-08-02 全局归一化，好选取统一的学习步长
% %                     [xx_train_T, PS] = mapminmax(xx_train_T);
% %                     xx_test_T = mapminmax('apply', xx_test_T, PS);
                    [xx_train, PS] = MyModel.MyMinmax(xx_train);
                    xx_test = MyModel.MyMinmax(xx_test, PS);
                end                
                
                yy = y_train(:, i);
                
%                 net = fitnet([210 64 32 8 1], 'traingdm');
%                 net = fitnet([210 64 32 8 1], 'traingd');
%                 net = fitnet([210 64 12 1], 'traingdm');   
%                 net = fitnet([96, 16, 4], 'traingdm');
%                 net = fitnet([96 16], 'traingdm');
%                 net = fitnet([32 8], 'traingdm');
                if lenX >100
                    net = fitnet([108 24], 'traingdm');
                else
%                     net = fitnet([18 16], 'traingdm');
                    net = fitnet([108 24], 'traingdm');
                end
                
% % %                 net.trainParam.show =500;
                net.trainParam.showWindow = args.showANN; %%true; %%false
%                 net.trainParam.lr = 0.000051;%000000751  0.0000015  0.0001(AK溢出）
                net.trainParam.lr = 0.00001;%0.000005;%0.00001;%(FD) %0.00002（原合适）
                % 20： 0.0001 Nan， 0.00001 好
                % 210 0.00001 一般会跳  0.000005一般不跳(G)
                % 0.000001
                
                %0.000051 MSC达到0.734(AN), 0.00001（比较合适，但梯度有浮动）

                net.trainParam.mc = 0.5;
                net.trainParam.epochs = 3000;%3000; %2500;%3000
                net.trainParam.goal = 1e-3; 
                net.trainparam.max_fail = 1000;
                net.divideParam.trainRatio = 1;
                net.divideParam.valRatio =0;
                net.divideParam.testRatio =0;
                
                if i == 3
                    net.trainParam.lr = 0.0000025;%%0.000001; %%0.00002;
                    % 20：0.0001 Nan，0.00001 平 ， 0.000002可以
                    % 210: 0.0000025（G)  0.000001（FD）
                    % 0.0000005
                    net.trainParam.epochs = 3000;%300;%3000
                elseif i == 2
                    net.trainParam.lr = 0.000005;%%0.000001;%%0.000003;
                    % 20： 0.0001 Nan， 0.00001 可以
                    %0.00001;% 0.00001; %0.00001
                    % 210： 0.00001 可以微跳  0.000005较好 
                    % 210：0.0000005(FD)
                    net.trainParam.epochs = 3000; %3000%3500;%3000
                end
                
%                 [net,tr] = train(net, xx_train', yy', 'useParallel','yes', 'useGPU', 'yes');
%                 net = train(net, xx_train', yy', 'useGPU', 'yes');
%                 net = train(net, xx_train', yy', 'useParallel','yes');
                if bParallel == true
                    net = train(net, xx_train, yy', 'useParallel', 'yes');
                else
                    net = train(net, xx_train, yy');
                end
                y_train_hat(:, i) = net(xx_train);
                y_test_hat(:, i) = net(xx_test);
            end
            ret1 = Helper.CalcAllErrors(y_train, y_train_hat, bShow);
            ret2 = Helper.CalcAllErrors(y_test, y_test_hat, bShow);
        end
        
        %%% 2024-11-23 增加y_test单向量
        function [bEachRemove, y_test, ins] = ExtractRetains(y_test, arg)
            bEachRemove = false;
            ins = [];
            
            bExist = isField(arg, 'bEachRemove');            
            if bExist == false
                return
            end
            
            bEachRemove = arg.bEachRemove;
            if bEachRemove == true                 
                ins = arg.re_ins;
                nRemove = arg.nRemove;
                lenRow = size(y_test, 1);
                yyy = zeros(lenRow - nRemove, lenY);
                for i = 1 : lenY 
                    inn = ins(:, i);
                    yyy(:, i) = y_test(inn, i);
                end
                y_test = yyy;                
            end            
        end
        
        %% TreeBagger
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        function [ret1, ret2, y_train_hat, y_test_hat] = DoEachRF(x_train, y_train, x_test, y_test, arg)
            %%x_train: ...*206,   y_train:  ...*3
            if MyModel.bParallel == true
                paroptions = statset('UseParallel',true);
            else
                paroptions = statset();
            end
            if MyModel.bRunTime == true
                tic
            end
            
            bShow = arg.bShow;
            lenY = size(y_train, 2);      
            
            y_train_hat = zeros(size(y_train));
            y_test_hat = zeros(size(y_test));

            for i = 1 : lenY         
% %                     tree = TreeBagger(100, X, yy, 'Method', 'regression', ...
% %                         'OOBPredictorImportance','On', 'MinLeafSize',3);
                    yy = y_train(:, i);
                    
                    if iscell(x_train) == true
                        xx = x_train{i};
                    else
                        xx = x_train;
                    end
                    
                    tree = TreeBagger(400, xx, yy, 'Method', 'regression', ...
                                      'MinLeafSize',5,'Options',paroptions);                   
% %                     tree = TreeBagger(100, X, yy, 'Method', 'regression', 'Options',paroptions);
                   
                    y_train_hat(:, i) = predict(tree, xx);                    

                    if iscell(x_test) == true                        
                        y_test_hat(:, i) = predict(tree, x_test{i});
                    else
                        y_test_hat(:, i) = predict(tree, x_test);
                    end
            end
            ret1 = Helper.CalcAllErrors(y_train, y_train_hat, bShow);
            ret2 = Helper.CalcAllErrors(y_test, y_test_hat, bShow);
            
            if MyModel.bRunTime == true
               toc 
            end           
        end
        
        
        %%fitrtree 决策树
        function ret = DoEachTree(X, y, bShow)
            lenY = size(y, 2);
            yfits = zeros(size(y));
            for i = 1 : lenY
                yy = y(:, i);
                %                  tree = fitrtree(X, yy, 'CrossVal', 'on');
                tree = fitrtree(X, yy);
                yfits(:, i) = predict(tree, X);
            end
            ret = Helper.CalcAllErrors(y, yfits, bShow);
        end
        
        %%执行SVR
        function [ret1, ret2, y_train_hat, y_test_hat] = DoEachSVR(x_train, y_train, x_test, y_test, arg)
            bShow = arg.bShow;
            type = arg.type;
            
            lenY = size(y_train, 2);            
            
            y_train_hat = zeros(size(y_train));
            y_test_hat = zeros(size(y_test));
            for i = 1 : lenY    
                if iscell(x_train) == true%%2025-08-11
                    xx = x_train{i};
                else
                    xx = x_train;
                end
                
                yy = y_train(:, i);      
                
                if type == 1        
                    Mdl{i} = fitrsvm(xx, yy);
                    
                elseif type == 2                  
                    Mdl{i} = fitrsvm(xx, yy,'Standardize',true,'KernelFunction','gaussian');
                    
                elseif type == 3                    
                    Mdl{i} = fitrsvm(xx, yy, 'KernelFunction', ...
                                'rbf','KernelScale','auto', 'Standardize',true);
                elseif type == 4                    
                    Mdl{i} = fitrsvm(xx, yy, 'Standardize',true,'KernelFunction',...
                            'polynomial', 'PolynomialOrder', 2);
                elseif type == 5
                    Mdl{i} = fitrsvm(xx, yy, 'Standardize',true,'KernelFunction',...
                            'polynomial', 'PolynomialOrder', 3 );
                end                
                
                y_train_hat(:, i) = predict(Mdl{i}, xx);                
                
                if iscell(x_test) == true%%2024-11-25
                    y_test_hat(:, i) = predict(Mdl{i}, x_test{i});
                else
                    y_test_hat(:, i) = predict(Mdl{i}, x_test);
                end
                %                 disp([num2str(i), ':__支持向量个数:', num2str(sum(Mdl{i}.IsSupportVector))])
            end
            
            ret1 = Helper.CalcAllErrors(y_train, y_train_hat, bShow);
            ret2 = Helper.CalcAllErrors(y_test, y_test_hat, bShow);
        end
        
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
        %%% 采用PLSRegression， X，24*204， y，24*6，整体PLSR，不分金属
        function [ret1, ret2, beta, PCTVAR, MSE] = DoPLSR(x_train, y_train, x_test, y_test, arg)
            ncomp = arg.ncomp;
            bShow = arg.bShow;            
            
            [~,~,~,~, beta, PCTVAR, MSE] = plsregress(x_train, y_train, ncomp);

            y_train_hat = [ones(size(x_train,1),1) x_train] * beta;
            y_test_hat  = [ones(size(x_test,1),1) x_test] * beta;
        
            ret1 = Helper.CalcAllErrors(y_train, y_train_hat, bShow);
            ret2 = Helper.CalcAllErrors(y_test, y_test_hat, bShow);
            % %             beta
            
            % %             figure
            % %             plot(1:ncomp,cumsum(100*PCTVAR(2,:)),'-bo');
            % %             xlabel('Number of PLS components');
            % %             ylabel('Percent Variance Explained in Y');
            % %             if bShow == true
            % %                 yfit = [ones(size(X,1),1) X] * beta;
            % %                 Helper.PlotStemErrors(y, yfit)
            % %             end
            %             disp(MSE)
        end
        %% 对每一个属性进行 PLS，
        function [ret1, ret2, mse] = DoEachRegression(x_train, y_train, x_test, y_test, arg)
            ncomp = arg.ncomp;%%这个是回归系数beta个数, 不是波段数
            bShow = arg.bShow;          
            bEach = arg.bEach;            
                        
            lenY  = size(y_train, 2);
            bands = size(x_train, 2);
            beta  = zeros(bands+1, lenY);
            
            y_train_hat = zeros(size(y_train));
            y_test_hat  = zeros(size(y_test));
            
            mse = zeros(lenY, ncomp+1);      
            
            
            if iscell(x_train) == false
                XX_train = [ones(size(x_train,1),1), x_train];
            end
            
            if iscell(x_test) == false%%%2024-11-25
                XX_test = [ones(size(x_test,1),1), x_test];            
            end
            
            for i = 1 : lenY                
                yy = y_train(:, i);
                
                if iscell(x_train) == true
                    xx = x_train{i};
                    XX_train = [ones(size(xx,1),1), xx];
                else
                    xx = x_train;
                end
                
                [~,~,~,~, bb, PCTVAR] = plsregress(xx, yy, ncomp);                
                
                
                y_train_hat(:, i) = XX_train *bb;    
                
                if iscell(x_test) == true%%%2024-11-25
                    XX_test = [ones(size(x_test{i},1),1), x_test{i}];  
                end
                
                y_test_hat(:, i) = XX_test *bb;%%OK！！！                
                
                beta(:, i) = bb;
                
                if bEach == true%% 显示逐个因变量的 PLS情况
                    Helper.PlotPLSRvar(PCTVAR, i==1);
                end
            end
            if bEach == true
                Helper.AddLegends()
            end
            
            ret1 = Helper.CalcAllErrors(y_train, y_train_hat, bShow);
            ret2 = Helper.CalcAllErrors(y_test, y_test_hat, bShow);
        end
        
        %% 对每一个属性进行 MLS，
        function ret = DoEachMLSRegression(X, y, ncomp, kopt, bShow)
            lenY = size(y, 2);
            
            XX = X(:, kopt(1:ncomp));
            bands = size(XX, 2);
            beta = zeros(bands+1, lenY);
            
            XXX = [ones(size(XX, 1),1), XX];
            
            for i = 1 : lenY
                %                disp(['检测分量：', num2str(i)])
                yy = y(:, i);
                beta(:, i) = regress(yy, XXX);
            end
            
            yfit = XXX *beta;
            
            ret = Helper.CalcAllErrors(y, yfit, bShow);
        end
        

        
        %% 对每一个属性进行 MLS，
        function ret = DoEachMLSRegressionGroup(X, y, fsss, bShow, ncomp)
            lenY = size(y, 2);
            yfit = zeros(size(y));
            for i = 1 : lenY
                if nargin <= 4
                    fs = fsss{i};
                else
                    if ncomp == -1
                        fs = fsss{i};
                    else
                        if ncomp< length(fsss{i})
                            fs = fsss{i}(1:ncomp);
                        else
                            fs = fsss{i};
                        end
                    end
                end
                
                yy = y(:, i);
                XXX = [ones(size(X, 1),1), X(:, fs)];
                
                beta = regress(yy, XXX);
                
                yfit(:, i) = XXX * beta;
            end
            
            
            ret = Helper.CalcAllErrors(y, yfit, bShow);
        end
        
        %%%lasso回归
        function ret = DoEachLasso(X, y, bShow)
            lenY = size(y, 2);
            yfit = zeros(size(y));
            for i = 1 : lenY
                yy = y(:, i);
                %                 [B, FitInfo] = lasso(X, yy);
                %                 idxLambda1SE = FitInfo.Index1SE;
                %                 coef = B(:,idxLambda1SE);
                %                 coef0 = FitInfo.Intercept(idxLambda1SE);
                B = lasso(X, yy);
                
                
                
                yfit(:, i)  = X*B;
            end
            ret = Helper.CalcAllErrors(y, yfit, bShow);
        end
        
        %% 简单线性二阶段模型-2024-11-4
%         function ret = LinearModel(y_true, y_fit, bShow)
        function [ret1, ret2, beta] = DoLinearModel(x_train, y_train, x_test, y_test, args)
            bShow = args.bShow;
            lenY  = size(y_train, 2); 
            beta  = zeros(2, lenY);
            
            y_train_hat = zeros(size(y_train));
            y_test_hat  = zeros(size(y_test));    
            
            for i = 1 : lenY                
                yy = y_train(:, i);
                xx = [ones(size(x_train(:, i))), x_train(:, i)];
                bb = regress(yy, xx);                
           
                y_train_hat(:, i) = xx * bb;               

                xx = [ones(size(x_test(:, i))), x_test(:, i)];
                y_test_hat(:, i) = xx * bb;
                
                beta(:, i) = bb;
            end

            ret1 = Helper.CalcAllErrors(y_train, y_train_hat, bShow);
            ret2 = Helper.CalcAllErrors(y_test, y_test_hat, bShow);
        end
    
        function [ret1, ret2, beta] = DoLinearModel2(x_train, y_train, x_test, y_test, args)
            bShow = args.bShow;
            lenY  = size(y_train, 2); 
            beta  = zeros(1, lenY);
            
            y_train_hat = zeros(size(y_train));
            y_test_hat  = zeros(size(y_test));    
            
            kk = 10 * size(y_train, 1);
            for i = 1 : lenY                
                yy = y_train(:, i);
                ymean = mean(yy);
                
                xx = x_train(:, i);
                xmean = mean(xx);
                
% %                 bb2 = regress(yy - ymean, xx - xmean);     
                xxx = xx - xmean;
                yyy = yy - ymean;
                bb = xxx' * yyy / (xxx' * xxx + kk);
%                 bb = ridge(yy - ymean, xx - xmean, 0);
                
                y_train_hat(:, i) = (xx - xmean) * bb + ymean;                
                
                xx = x_test(:, i);     
                y_test_hat(:, i) = (xx - xmean) * bb + ymean;
                
                beta(:, i) = bb;
            end

            ret1 = Helper.CalcAllErrors(y_train, y_train_hat, bShow);
            ret2 = Helper.CalcAllErrors(y_test, y_test_hat, bShow);
        end
  
        
% % % % % % % % % % % % % % % % %         
        function CalcOneErrorWithSelects(X, y, fsss)
            lenY = size(y, 2);
            yfit = zeros(size(y));
            
            for i = 1 : lenY
                yy = y(:, i);
                ff = fsss{i};
                xx = xx(:, ff);
            end 
        end
        
        %210*178
        function  [YY, PS] = MyMinmax(XX, PS)
            if exist('PS', 'var')
                nn = PS(1);
                mm = PS(2);                   
            else
                mm = max(XX, [], 'all');
                nn = min(XX, [], 'all');
            
                PS = [nn, mm];  
            end
            YY = 2 * (XX - nn) / (mm - nn) - 1;
        end
                    
        
    end
    
end