import graphviz
import xgboost as xgb
from bayes_opt import BayesianOptimization
from catboost import CatBoostRegressor
# import sys
from prettytable import PrettyTable
from sklearn import tree
from sklearn.ensemble import AdaBoostRegressor
from sklearn.ensemble import BaggingRegressor
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.ensemble import RandomForestRegressor as RFR
from sklearn.gaussian_process import GaussianProcessRegressor as GPR
from sklearn.gaussian_process.kernels import DotProduct, WhiteKernel, Matern, PairwiseKernel, RationalQuadratic
from sklearn.gaussian_process.kernels import RBF
from sklearn.linear_model import Lasso
from sklearn.linear_model import LinearRegression as LinearR
from sklearn.linear_model import Ridge
from sklearn.metrics import make_scorer
from sklearn.model_selection import LeaveOneOut
from sklearn.model_selection import cross_validate as CV
from sklearn.model_selection import train_test_split as TTS
from sklearn.neighbors import KNeighborsRegressor
from sklearn.neural_network import MLPRegressor
from sklearn.svm import SVR
# from sko.AFSA import AFSA
from streamlit_extras.colored_header import colored_header

from business.algorithm.utils import *


def run():
    colored_header(label="机器学习：回归", description=" ", color_name="violet-90")
    file = st.file_uploader("Upload `.csv`file", type=['csv'], label_visibility="collapsed")
    if file is None:
        table = PrettyTable(['file name', 'class', 'description'])
        table.add_row(['file_1', 'dataset', 'data file'])
        st.write(table)
    if file is not None:
        df = pd.read_csv(file)
        # 检测缺失值
        check_string_NaN(df)

        colored_header(label="数据信息", description=" ", color_name="violet-70")
        nrow = st.slider("rows", 1, len(df), 5)
        df_nrow = df.head(nrow)
        st.write(df_nrow)

        colored_header(label="特征&目标", description=" ", color_name="violet-70")

        target_num = st.number_input('目标数量', min_value=1, max_value=10, value=1)

        col_feature, col_target = st.columns(2)
        # features
        features = df.iloc[:, :-target_num]
        # targets
        targets = df.iloc[:, -target_num:]
        with col_feature:
            st.write(features.head())
        with col_target:
            st.write(targets.head())
        # =================== model ====================================
        reg = REGRESSOR(features, targets)

        colored_header(label="target", description=" ", color_name="violet-70")

        target_selected_option = st.selectbox('target', list(reg.targets)[::-1])

        reg.targets = targets[target_selected_option]

        colored_header(label="Regressor", description=" ", color_name="violet-30")

        model_path = './models/regressors'

        template_alg = model_platform(model_path)

        inputs, col2 = template_alg.show()

        if inputs['model'] == 'DecisionTreeRegressor':

            with col2:
                with st.expander('Operator'):
                    operator = st.selectbox('', ('train test split', 'cross val score', 'leave one out'),
                                            label_visibility="collapsed")
                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])

                    elif operator == 'cross val score':
                        cv = st.number_input('cv', 1, 20, 5)

                    elif operator == 'leave one out':
                        loo = LeaveOneOut()

            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)
            if button_train:
                if operator == 'train test split':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = tree.DecisionTreeRegressor(random_state=inputs['random state'],
                                                               splitter=inputs['splitter'],
                                                               max_depth=inputs['max depth'],
                                                               min_samples_leaf=inputs['min samples leaf'],
                                                               min_samples_split=inputs['min samples split'])

                        reg.DecisionTreeRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']
                        plot_and_export_results(reg, "DTR")

                        if inputs['tree graph']:
                            class_names = list(set(reg.targets.astype(str).tolist()))
                            dot_data = tree.export_graphviz(reg.model, out_file=None, feature_names=list(reg.features),
                                                            class_names=class_names, filled=True, rounded=True)
                            graph = graphviz.Source(dot_data)
                            graph.render('Tree graph', view=True)

                    elif inputs['auto hyperparameters']:
                        def DTR_TT(max_depth, min_samples_leaf, min_samples_split):
                            reg.model = tree.DecisionTreeRegressor(max_depth=int(max_depth),
                                                                   min_samples_leaf=int(min_samples_leaf),
                                                                   min_samples_split=int(min_samples_split))
                            reg.DecisionTreeRegressor()
                            return reg.score

                        DTRbounds = {'max_depth': (1, inputs['max depth']),
                                     'min_samples_leaf': (1, inputs['min samples leaf']),
                                     'min_samples_split': (2, inputs['min samples split'])}

                        with st.expander('hyperparameter opt'):

                            optimizer = BayesianOptimization(f=DTR_TT, pbounds=DTRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])

                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['max_depth'] = int(params_best['max_depth'])
                        params_best['min_samples_leaf'] = int(params_best['min_samples_leaf'])
                        params_best['min_samples_split'] = int(params_best['min_samples_split'])
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = tree.DecisionTreeRegressor(random_state=inputs['random state'],
                                                               splitter=inputs['splitter'],
                                                               max_depth=params_best['max_depth'],
                                                               min_samples_leaf=params_best['min_samples_leaf'],
                                                               min_samples_split=params_best['min_samples_split'])

                        reg.DecisionTreeRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']
                        plot_and_export_results(reg, "DTR")

                        if inputs['tree graph']:
                            class_names = list(set(reg.targets.astype(str).tolist()))
                            dot_data = tree.export_graphviz(reg.model, out_file=None, feature_names=list(reg.features),
                                                            class_names=class_names, filled=True, rounded=True)
                            graph = graphviz.Source(dot_data)
                            graph.render('Tree graph', view=True)

                elif operator == 'cross val score':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = tree.DecisionTreeRegressor(random_state=inputs['random state'],
                                                               splitter=inputs['splitter'],
                                                               max_depth=inputs['max depth'],
                                                               min_samples_leaf=inputs['min samples leaf'],
                                                               min_samples_split=inputs['min samples split'])

                        export_cross_val_results(reg, cv, "DTR_cv", inputs['random state'])

                    elif inputs['auto hyperparameters']:
                        def DTR_TT(max_depth, min_samples_leaf, min_samples_split):
                            reg.model = tree.DecisionTreeRegressor(max_depth=int(max_depth),
                                                                   min_samples_leaf=int(min_samples_leaf),
                                                                   min_samples_split=int(min_samples_split))
                            cv_score = cv_cal(reg, cv, inputs['random state'])
                            return cv_score

                        DTRbounds = {'max_depth': (1, inputs['max depth']),
                                     'min_samples_leaf': (1, inputs['min samples leaf']),
                                     'min_samples_split': (2, inputs['min samples split'])}

                        with st.expander('hyperparameter opt'):

                            optimizer = BayesianOptimization(f=DTR_TT, pbounds=DTRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])

                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['max_depth'] = int(params_best['max_depth'])
                        params_best['min_samples_leaf'] = int(params_best['min_samples_leaf'])
                        params_best['min_samples_split'] = int(params_best['min_samples_split'])
                        st.write("\n", "\n", "best params: ", params_best)
                        reg.model = tree.DecisionTreeRegressor(random_state=inputs['random state'],
                                                               splitter=inputs['splitter'],
                                                               max_depth=params_best['max_depth'],
                                                               min_samples_leaf=params_best['min_samples_leaf'],
                                                               min_samples_split=params_best['min_samples_split'])

                        export_cross_val_results(reg, cv, "DTR_cv", inputs['random state'])


                elif operator == 'leave one out':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = tree.DecisionTreeRegressor(random_state=inputs['random state'],
                                                               splitter=inputs['splitter'],
                                                               max_depth=inputs['max depth'],
                                                               min_samples_leaf=inputs['min samples leaf'],
                                                               min_samples_split=inputs['min samples split'])

                        export_loo_results(reg, loo, "DTR_loo")

                    elif inputs['auto hyperparameters']:
                        def DTR_TT(max_depth, min_samples_leaf, min_samples_split):
                            reg.model = tree.DecisionTreeRegressor(max_depth=int(max_depth),
                                                                   min_samples_leaf=int(min_samples_leaf),
                                                                   min_samples_split=int(min_samples_split))
                            loo_score = loo_cal(reg, loo)
                            return loo_score

                        DTRbounds = {'max_depth': (1, inputs['max depth']),
                                     'min_samples_leaf': (1, inputs['min samples leaf']),
                                     'min_samples_split': (2, inputs['min samples split'])}

                        with st.expander('hyperparameter opt'):

                            optimizer = BayesianOptimization(f=DTR_TT, pbounds=DTRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])

                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['max_depth'] = int(params_best['max_depth'])
                        params_best['min_samples_leaf'] = int(params_best['min_samples_leaf'])
                        params_best['min_samples_split'] = int(params_best['min_samples_split'])
                        st.write("\n", "\n", "best params: ", params_best)
                        reg.model = tree.DecisionTreeRegressor(random_state=inputs['random state'],
                                                               splitter=inputs['splitter'],
                                                               max_depth=params_best['max_depth'],
                                                               min_samples_leaf=params_best['min_samples_leaf'],
                                                               min_samples_split=params_best['min_samples_split'])
                        export_loo_results(reg, loo, "DTR_loo")

        if inputs['model'] == 'RandomForestRegressor':
            with col2:
                with st.expander('Operator'):
                    operator = st.selectbox('data operator', ('train test split', 'cross val score', 'leave one out'))
                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])
                    elif operator == 'cross val score':
                        cv = st.number_input('cv', 1, 20, 5)

                    elif operator == 'leave one out':
                        loo = LeaveOneOut()

                    # elif operator == 'oob score':
                    #     inputs['oob score']  = st.selectbox('oob score',[True], disabled=True)
                    #     inputs['warm start'] = True

            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)

            if button_train:
                if operator == 'train test split':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = RFR(n_estimators=inputs['nestimators'], random_state=inputs['random state'],
                                        max_depth=inputs['max depth'], min_samples_leaf=inputs['min samples leaf'],
                                        min_samples_split=inputs['min samples split'], oob_score=inputs['oob score'],
                                        warm_start=inputs['warm start'],
                                        n_jobs=inputs['njobs'])
                        reg.RandomForestRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']
                        plot_and_export_results(reg, "RFR")

                    elif inputs['auto hyperparameters']:
                        def RFR_TT(n_estimators, max_depth, min_samples_leaf, min_samples_split):

                            reg.model = RFR(n_estimators=int(n_estimators), max_depth=int(max_depth),
                                            min_samples_leaf=int(min_samples_leaf),
                                            min_samples_split=int(min_samples_split), n_jobs=-1)
                            reg.RandomForestRegressor()
                            return reg.score

                        RFRbounds = {'n_estimators': (1, inputs['nestimators']), 'max_depth': (1, inputs['max depth']),
                                     'min_samples_leaf': (1, inputs['min samples leaf']),
                                     'min_samples_split': (2, inputs['min samples split'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=RFR_TT, pbounds=RFRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['n_estimators'] = int(params_best['n_estimators'])
                        params_best['max_depth'] = int(params_best['max_depth'])
                        params_best['min_samples_leaf'] = int(params_best['min_samples_leaf'])
                        params_best['min_samples_split'] = int(params_best['min_samples_split'])
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = RFR(n_estimators=params_best['n_estimators'], random_state=inputs['random state'],
                                        max_depth=params_best['max_depth'],
                                        min_samples_leaf=params_best['min_samples_leaf'],
                                        min_samples_split=params_best['min_samples_split'],
                                        oob_score=inputs['oob score'], warm_start=inputs['warm start'],
                                        n_jobs=inputs['njobs'])

                        reg.RandomForestRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']
                        plot_and_export_results(reg, "RFR")

                elif operator == 'cross val score':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = RFR(n_estimators=inputs['nestimators'], random_state=inputs['random state'],
                                        max_depth=inputs['max depth'], min_samples_leaf=inputs['min samples leaf'],
                                        min_samples_split=inputs['min samples split'], oob_score=inputs['oob score'],
                                        warm_start=inputs['warm start'],
                                        n_jobs=inputs['njobs'])
                        export_cross_val_results(reg, cv, "RFR_cv", inputs['random state'])
                    elif inputs['auto hyperparameters']:
                        def RFR_TT(n_estimators, max_depth, min_samples_leaf, min_samples_split):
                            reg.model = RFR(n_estimators=int(n_estimators), max_depth=int(max_depth),
                                            min_samples_leaf=int(min_samples_leaf),
                                            min_samples_split=int(min_samples_split), n_jobs=-1)
                            cv_score = cv_cal(reg, cv, inputs['random state'])
                            return cv_score

                        RFRbounds = {'n_estimators': (1, inputs['nestimators']), 'max_depth': (1, inputs['max depth']),
                                     'min_samples_leaf': (1, inputs['min samples leaf']),
                                     'min_samples_split': (2, inputs['min samples split'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=RFR_TT, pbounds=RFRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['n_estimators'] = int(params_best['n_estimators'])
                        params_best['max_depth'] = int(params_best['max_depth'])
                        params_best['min_samples_leaf'] = int(params_best['min_samples_leaf'])
                        params_best['min_samples_split'] = int(params_best['min_samples_split'])
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = RFR(n_estimators=params_best['n_estimators'], random_state=inputs['random state'],
                                        max_depth=params_best['max_depth'],
                                        min_samples_leaf=params_best['min_samples_leaf'],
                                        min_samples_split=params_best['min_samples_split'],
                                        oob_score=inputs['oob score'], warm_start=inputs['warm start'],
                                        n_jobs=inputs['njobs'])

                        export_cross_val_results(reg, cv, "RFR_cv", inputs['random state'])

                # elif operator == 'oob score':

                #     reg.model = RFR(criterion = inputs['criterion'],n_estimators=inputs['nestimators'] ,random_state=inputs['random state'],max_depth=inputs['max depth'],min_samples_leaf=inputs['min samples leaf'],
                #                                 min_samples_split=inputs['min samples split'],oob_score=inputs['oob score'], warm_start=inputs['warm start'],
                #                                 n_jobs=inputs['njobs'])

                #     reg_res  = reg.model.fit(reg.features, reg.targets)
                #     oob_score = reg_res.oob_score_
                #     st.write(f'oob score : {oob_score}')

                elif operator == 'leave one out':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = RFR(criterion=inputs['criterion'], n_estimators=inputs['nestimators'],
                                        random_state=inputs['random state'], max_depth=inputs['max depth'],
                                        min_samples_leaf=inputs['min samples leaf'],
                                        min_samples_split=inputs['min samples split'], oob_score=inputs['oob score'],
                                        warm_start=inputs['warm start'],
                                        n_jobs=inputs['njobs'])
                        export_loo_results(reg, loo, "RFR_loo")
                    elif inputs['auto hyperparameters']:

                        def RFR_TT(n_estimators, max_depth, min_samples_leaf, min_samples_split):
                            reg.model = RFR(n_estimators=int(n_estimators), max_depth=int(max_depth),
                                            min_samples_leaf=int(min_samples_leaf),
                                            min_samples_split=int(min_samples_split), n_jobs=-1)
                            loo_score = loo_cal(reg, loo)
                            return loo_score

                        RFRbounds = {'n_estimators': (1, inputs['nestimators']), 'max_depth': (1, inputs['max depth']),
                                     'min_samples_leaf': (1, inputs['min samples leaf']),
                                     'min_samples_split': (2, inputs['min samples split'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=RFR_TT, pbounds=RFRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['n_estimators'] = int(params_best['n_estimators'])
                        params_best['max_depth'] = int(params_best['max_depth'])
                        params_best['min_samples_leaf'] = int(params_best['min_samples_leaf'])
                        params_best['min_samples_split'] = int(params_best['min_samples_split'])
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = RFR(n_estimators=params_best['n_estimators'], random_state=inputs['random state'],
                                        max_depth=params_best['max_depth'],
                                        min_samples_leaf=params_best['min_samples_leaf'],
                                        min_samples_split=params_best['min_samples_split'],
                                        oob_score=inputs['oob score'], warm_start=inputs['warm start'],
                                        n_jobs=inputs['njobs'])

                        export_loo_results(reg, loo, "RFR_loo")

        if inputs['model'] == 'SupportVector':

            with col2:
                with st.expander('Operator'):

                    preprocess = st.selectbox('data preprocess', ['StandardScaler', 'MinMaxScaler'])

                    operator = st.selectbox('operator', ('train test split', 'cross val score', 'leave one out'),
                                            label_visibility='collapsed')
                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)

                        reg.features = pd.DataFrame(reg.features)

                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])

                    elif operator == 'cross val score':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        cv = st.number_input('cv', 1, 20, 5)

                    elif operator == 'leave one out':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        loo = LeaveOneOut()
            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)
            if button_train:
                if operator == 'train test split':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = SVR(kernel=inputs['kernel'], C=inputs['C'])

                        reg.SupportVector()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']

                        plot_and_export_results(reg, "SVR")

                    elif inputs['auto hyperparameters']:
                        def SVR_TT(C):
                            reg.model = SVR(kernel='rbf', C=C)
                            reg.SupportVector()
                            return reg.score

                        SVRbounds = {'C': (0.001, inputs['C'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=SVR_TT, pbounds=SVRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['kernel'] = 'rbf'
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = SVR(kernel='rbf', C=params_best['C'])

                        reg.SupportVector()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']
                        plot_and_export_results(reg, "SVR")
                elif operator == 'cross val score':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = SVR(kernel=inputs['kernel'], C=inputs['C'])

                        export_cross_val_results(reg, cv, "SVR_cv", inputs['random state'])
                    elif inputs['auto hyperparameters']:
                        def SVR_TT(C):
                            reg.model = SVR(kernel='rbf', C=C)
                            cv_score = cv_cal(reg, cv, inputs['random state'])
                            return cv_score

                        SVRbounds = {'C': (0.001, inputs['C'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=SVR_TT, pbounds=SVRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['kernel'] = 'rbf'
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = SVR(kernel='rbf', C=params_best['C'])

                        export_cross_val_results(reg, cv, "SVR_cv", inputs['random state'])

                elif operator == 'leave one out':
                    if inputs['auto hyperparameters'] == False:
                        # kernel = PairwiseKernel()
                        reg.model = SVR(kernel=inputs['kernel'], C=inputs['C'])
                        # reg.model = SVR(kernel=kernel, C=inputs['C'])

                        export_loo_results(reg, loo, "SVR_loo")
                    elif inputs['auto hyperparameters']:
                        def SVR_TT(C):
                            reg.model = SVR(kernel='rbf', C=C)
                            loo_score = loo_cal(reg, loo)
                            return loo_score

                        SVRbounds = {'C': (0.001, inputs['C'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=SVR_TT, pbounds=SVRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['kernel'] = 'rbf'
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = SVR(kernel='rbf', C=params_best['C'])

                        export_loo_results(reg, loo, "SVR_loo")

        if inputs['model'] == 'GPRegressor':

            with col2:
                with st.expander('Operator'):

                    preprocess = st.selectbox('data preprocess', ['StandardScaler', 'MinMaxScaler'])

                    operator = st.selectbox('operator', ('train test split', 'cross val score', 'leave one out'),
                                            label_visibility='collapsed')
                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)

                        reg.features = pd.DataFrame(reg.features)

                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])

                    elif operator == 'cross val score':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        cv = st.number_input('cv', 1, 20, 5)

                    elif operator == 'leave one out':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        loo = LeaveOneOut()
            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)
            if button_train:
                if operator == 'train test split':
                    if inputs['kernel'] == None:
                        kernel = None
                    elif inputs['kernel'] == 'DotProduct':
                        kernel = DotProduct()
                    elif inputs['kernel'] == 'WhiteKernel':
                        kernel = WhiteKernel()
                    elif inputs['kernel'] == 'DotProduct+WhiteKernel':
                        kernel = DotProduct() + WhiteKernel()
                    elif inputs['kernel'] == 'Matern':
                        kernel = Matern()
                    elif inputs['kernel'] == 'PairwiseKernel':
                        kernel = PairwiseKernel()
                    elif inputs['kernel'] == 'RationalQuadratic':
                        kernel = RationalQuadratic()
                    elif inputs['kernel'] == 'RBF':
                        kernel = RBF()
                    elif inputs['kernel'] == 'DotProduct+RationalQuadratic':
                        kernel = DotProduct() + RationalQuadratic()
                    elif inputs['kernel'] == 'PairwiseKernel+RationalQuadratic':
                        kernel = PairwiseKernel() + RationalQuadratic()
                    elif inputs['kernel'] == 'DotProduct+PairwiseKernel':
                        kernel = DotProduct() + PairwiseKernel()

                    reg.model = GPR(kernel=kernel, random_state=inputs['random state'])

                    reg.GPRegressor()

                    result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                    result_data.columns = ['actual', 'prediction']

                    plot_and_export_results(reg, "GPR")

                elif operator == 'cross val score':
                    if inputs['kernel'] == None:
                        kernel = None
                    elif inputs['kernel'] == 'DotProduct':
                        kernel = DotProduct()
                    elif inputs['kernel'] == 'WhiteKernel':
                        kernel = WhiteKernel()
                    elif inputs['kernel'] == 'DotProduct+WhiteKernel':
                        kernel = DotProduct() + WhiteKernel()
                    elif inputs['kernel'] == 'Matern':
                        kernel = Matern()
                    elif inputs['kernel'] == 'PairwiseKernel':
                        kernel = PairwiseKernel()
                    elif inputs['kernel'] == 'RationalQuadratic':
                        kernel = RationalQuadratic()
                    elif inputs['kernel'] == 'RBF':
                        kernel = RBF()
                    elif inputs['kernel'] == 'DotProduct+RationalQuadratic':
                        kernel = DotProduct() + RationalQuadratic()
                    elif inputs['kernel'] == 'PairwiseKernel+RationalQuadratic':
                        kernel = PairwiseKernel() + RationalQuadratic()
                    elif inputs['kernel'] == 'DotProduct+PairwiseKernel':
                        kernel = DotProduct() + PairwiseKernel()
                    reg.model = GPR(kernel=kernel, random_state=inputs['random state'])

                    export_cross_val_results(reg, cv, "GPR_cv", inputs['random state'])

                elif operator == 'leave one out':
                    if inputs['kernel'] == None:
                        kernel = None
                    elif inputs['kernel'] == 'DotProduct':
                        kernel = DotProduct()
                    elif inputs['kernel'] == 'WhiteKernel':
                        kernel = WhiteKernel()
                    elif inputs['kernel'] == 'DotProduct+WhiteKernel':
                        kernel = DotProduct() + WhiteKernel()
                    elif inputs['kernel'] == 'Matern':
                        kernel = Matern()
                    elif inputs['kernel'] == 'PairwiseKernel':
                        kernel = PairwiseKernel()
                    elif inputs['kernel'] == 'RationalQuadratic':
                        kernel = RationalQuadratic()
                    elif inputs['kernel'] == 'RBF':
                        kernel = RBF()
                    elif inputs['kernel'] == 'DotProduct+RationalQuadratic':
                        kernel = DotProduct() + RationalQuadratic()
                    elif inputs['kernel'] == 'PairwiseKernel+RationalQuadratic':
                        kernel = PairwiseKernel() + RationalQuadratic()
                    elif inputs['kernel'] == 'DotProduct+PairwiseKernel':
                        kernel = DotProduct() + PairwiseKernel()
                    reg.model = GPR(kernel=kernel, random_state=inputs['random state'])

                    export_loo_results(reg, loo, "GPR_loo")

        if inputs['model'] == 'KNeighborsRegressor':

            with col2:
                with st.expander('Operator'):

                    preprocess = st.selectbox('data preprocess', ['StandardScaler', 'MinMaxScaler'])

                    operator = st.selectbox('operator', ('train test split', 'cross val score', 'leave one out'),
                                            label_visibility='collapsed')
                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)

                        reg.features = pd.DataFrame(reg.features)

                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])

                    elif operator == 'cross val score':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        cv = st.number_input('cv', 1, 20, 5)

                    elif operator == 'leave one out':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        loo = LeaveOneOut()

            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)
            if button_train:
                if operator == 'train test split':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = KNeighborsRegressor(n_neighbors=inputs['n neighbors'])

                        reg.KNeighborsRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']

                        plot_and_export_results(reg, "KNR")
                    elif inputs['auto hyperparameters']:
                        def KNNR_TT(n_neighbors):
                            reg.model = KNeighborsRegressor(n_neighbors=int(n_neighbors))
                            reg.KNeighborsRegressor()
                            return reg.score

                        KNNRbounds = {'n_neighbors': (1, inputs['n neighbors'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=KNNR_TT, pbounds=KNNRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['n_neighbors'] = int(params_best['n_neighbors'])
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = KNeighborsRegressor(n_neighbors=params_best['n_neighbors'])

                        reg.KNeighborsRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']
                        plot_and_export_results(reg, "KNNR")
                elif operator == 'cross val score':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = KNeighborsRegressor(n_neighbors=inputs['n neighbors'])

                        export_cross_val_results(reg, cv, "KNR_cv", inputs['random state'])
                    elif inputs['auto hyperparameters']:
                        def KNNR_TT(n_neighbors):
                            reg.model = KNeighborsRegressor(n_neighbors=int(n_neighbors))
                            cv_score = cv_cal(reg, cv, inputs['random state'])
                            return cv_score

                        KNNRbounds = {'n_neighbors': (1, inputs['n neighbors'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=KNNR_TT, pbounds=KNNRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['n_neighbors'] = int(params_best['n_neighbors'])
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = KNeighborsRegressor(n_neighbors=params_best['n_neighbors'])
                        export_cross_val_results(reg, cv, "KNR_cv", inputs['random state'])

                elif operator == 'leave one out':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = KNeighborsRegressor(n_neighbors=inputs['n neighbors'])

                        export_loo_results(reg, loo, "KNR_loo")
                    elif inputs['auto hyperparameters']:
                        def KNNR_TT(n_neighbors):
                            reg.model = KNeighborsRegressor(n_neighbors=int(n_neighbors))
                            loo_score = loo_cal(reg, loo)
                            return loo_score

                        KNNRbounds = {'n_neighbors': (1, inputs['n neighbors'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=KNNR_TT, pbounds=KNNRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['n_neighbors'] = int(params_best['n_neighbors'])
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = KNeighborsRegressor(n_neighbors=params_best['n_neighbors'])
                        export_loo_results(reg, loo, "KNR_loo")

        if inputs['model'] == 'LinearRegressor':

            with col2:
                with st.expander('Operator'):

                    preprocess = st.selectbox('data preprocess', ['StandardScaler', 'MinMaxScaler'])

                    operator = st.selectbox('operator', ('train test split', 'cross val score', 'leave one out'),
                                            label_visibility='collapsed')
                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)

                        reg.features = pd.DataFrame(reg.features)

                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])

                    elif operator == 'cross val score':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        cv = st.number_input('cv', 1, 20, 5)

                    elif operator == 'leave one out':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        loo = LeaveOneOut()

            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)
            if button_train:
                if operator == 'train test split':

                    reg.model = LinearR()

                    reg.LinearRegressor()

                    result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                    result_data.columns = ['actual', 'prediction']
                    plot_and_export_results(reg, "LinearR")


                elif operator == 'cross val score':

                    reg.model = LinearR()

                    export_cross_val_results(reg, cv, "LinearR_cv", inputs['random state'])


                elif operator == 'leave one out':

                    reg.model = LinearR()

                    export_loo_results(reg, loo, "LinearR_loo")

        if inputs['model'] == 'LassoRegressor':

            with col2:
                with st.expander('Operator'):

                    preprocess = st.selectbox('data preprocess', ['StandardScaler', 'MinMaxScaler'])

                    operator = st.selectbox('', ('train test split', 'cross val score', 'leave one out'),
                                            label_visibility='collapsed')
                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)

                        reg.features = pd.DataFrame(reg.features)

                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])

                    elif operator == 'cross val score':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        cv = st.number_input('cv', 1, 20, 5)

                    elif operator == 'leave one out':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        loo = LeaveOneOut()

            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)
            if button_train:
                if operator == 'train test split':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = Lasso(alpha=inputs['alpha'], random_state=inputs['random state'])

                        reg.LassoRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']

                        plot_and_export_results(reg, "LassoR")

                    elif inputs['auto hyperparameters']:
                        def LassoR_TT(alpha):
                            reg.model = Lasso(alpha=alpha)
                            reg.LassoRegressor()
                            return reg.score

                        LassoRbounds = {'alpha': (0.001, inputs['alpha'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=LassoR_TT, pbounds=LassoRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = Lasso(alpha=params_best['alpha'], random_state=inputs['random state'])

                        reg.LassoRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']
                        plot_and_export_results(reg, "LassoR")
                elif operator == 'cross val score':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = Lasso(alpha=inputs['alpha'], random_state=inputs['random state'])

                        export_cross_val_results(reg, cv, "LassoR_cv", inputs['random state'])

                    elif inputs['auto hyperparameters']:
                        def LassoR_TT(alpha):
                            reg.model = Lasso(alpha=alpha)
                            cv_score = cv_cal(reg, cv, inputs['random state'])
                            return cv_score

                        LassoRbounds = {'alpha': (0.001, inputs['alpha'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=LassoR_TT, pbounds=LassoRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = Lasso(alpha=params_best['alpha'], random_state=inputs['random state'])
                        export_cross_val_results(reg, cv, "LassoR_cv", inputs['random state'])

                elif operator == 'leave one out':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = Lasso(alpha=inputs['alpha'], random_state=inputs['random state'])

                        export_loo_results(reg, loo, "LassoR_loo")
                    elif inputs['auto hyperparameters']:
                        def LassoR_TT(alpha):
                            reg.model = Lasso(alpha=alpha)
                            loo_score = loo_cal(reg, loo)
                            return loo_score

                        LassoRbounds = {'alpha': (0.001, inputs['alpha'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=LassoR_TT, pbounds=LassoRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = Lasso(alpha=params_best['alpha'], random_state=inputs['random state'])
                        export_loo_results(reg, loo, "LassoR_loo")

        if inputs['model'] == 'RidgeRegressor':
            with col2:
                with st.expander('Operator'):

                    preprocess = st.selectbox('data preprocess', ['StandardScaler', 'MinMaxScaler'])

                    operator = st.selectbox('data operator', ('train test split', 'cross val score', 'leave one out'))
                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)

                        reg.features = pd.DataFrame(reg.features)

                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])

                    elif operator == 'cross val score':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        cv = st.number_input('cv', 1, 20, 5)

                    elif operator == 'leave one out':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        loo = LeaveOneOut()

            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)
            if button_train:
                if operator == 'train test split':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = Ridge(alpha=inputs['alpha'], random_state=inputs['random state'])

                        reg.RidgeRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']

                        plot_and_export_results(reg, "RidgeR")

                    elif inputs['auto hyperparameters']:
                        def RidgeR_TT(alpha):
                            reg.model = Ridge(alpha=alpha)
                            reg.RidgeRegressor()
                            return reg.score

                        RidgeRbounds = {'alpha': (0.001, inputs['alpha'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=RidgeR_TT, pbounds=RidgeRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = Ridge(alpha=params_best['alpha'], random_state=inputs['random state'])

                        reg.RidgeRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']
                        plot_and_export_results(reg, "RidgeR")
                elif operator == 'cross val score':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = Ridge(alpha=inputs['alpha'], random_state=inputs['random state'])

                        export_cross_val_results(reg, cv, "RidgeR_cv", inputs['random state'])
                    elif inputs['auto hyperparameters']:
                        def RidgeR_TT(alpha):
                            reg.model = Ridge(alpha=alpha)
                            cv_score = cv_cal(reg, cv, inputs['random state'])
                            return cv_score

                        RidgeRbounds = {'alpha': (0.001, inputs['alpha'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=RidgeR_TT, pbounds=RidgeRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = Ridge(alpha=params_best['alpha'], random_state=inputs['random state'])
                        export_cross_val_results(reg, cv, "RidgeR_cv", inputs['random state'])

                elif operator == 'leave one out':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = Ridge(alpha=inputs['alpha'], random_state=inputs['random state'])

                        export_loo_results(reg, loo, "RidgeR_loo")
                    elif inputs['auto hyperparameters']:
                        def RidgeR_TT(alpha):
                            reg.model = Ridge(alpha=alpha)
                            loo_score = loo_cal(reg, loo)
                            return loo_score

                        RidgeRbounds = {'alpha': (0.001, inputs['alpha'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=RidgeR_TT, pbounds=RidgeRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = Ridge(alpha=params_best['alpha'], random_state=inputs['random state'])
                        export_loo_results(reg, loo, "RidgeR_loo")

        if inputs['model'] == 'GradientBoostingRegressor':

            with col2:
                with st.expander('Operator'):
                    operator = st.selectbox('data operator', ('train test split', 'cross val score', 'leave one out'))
                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])
                    elif operator == 'cross val score':
                        cv = st.number_input('cv', 1, 20, 5)
                    elif operator == 'leave one out':
                        loo = LeaveOneOut()

            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)
            if button_train:

                if operator == 'train test split':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = GradientBoostingRegressor(learning_rate=inputs['learning rate'],
                                                              n_estimators=inputs['nestimators'],
                                                              max_features=inputs['max features'],
                                                              random_state=inputs['random state'])

                        reg.GradientBoostingRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']
                        plot_and_export_results(reg, "GradientBoostingR")

                    elif inputs['auto hyperparameters']:
                        def GBR_TT(learning_rate, n_estimators):
                            reg.model = GradientBoostingRegressor(learning_rate=learning_rate,
                                                                  n_estimators=int(n_estimators),
                                                                  max_features=inputs['max features'])
                            reg.GradientBoostingRegressor()
                            return reg.score

                        GBRbounds = {'learning_rate': (0.001, inputs['learning rate']),
                                     'n_estimators': (1, inputs['nestimators'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=GBR_TT, pbounds=GBRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['n_estimators'] = int(params_best['n_estimators'])
                        params_best['max_features'] = inputs['max features']
                        st.write("\n", "\n", "best params: ", params_best)
                        reg.model = GradientBoostingRegressor(learning_rate=params_best['learning_rate'],
                                                              n_estimators=params_best['n_estimators'],
                                                              max_features=params_best['max_features'],
                                                              random_state=inputs['random state'])
                        reg.GradientBoostingRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']
                        plot_and_export_results(reg, "GBR")
                elif operator == 'cross val score':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = GradientBoostingRegressor(learning_rate=inputs['learning rate'],
                                                              n_estimators=inputs['nestimators'],
                                                              max_features=inputs['max features'],
                                                              random_state=inputs['random state'])

                        export_cross_val_results(reg, cv, "GradientBoostingR_cv", inputs['random state'])
                    elif inputs['auto hyperparameters']:
                        def GBR_TT(learning_rate, n_estimators):
                            reg.model = GradientBoostingRegressor(learning_rate=learning_rate,
                                                                  n_estimators=int(n_estimators),
                                                                  max_features=inputs['max features'])
                            cv_score = cv_cal(reg, cv, inputs['random state'])
                            return cv_score

                        GBRbounds = {'learning_rate': (0.001, inputs['learning rate']),
                                     'n_estimators': (1, inputs['nestimators'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=GBR_TT, pbounds=GBRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['n_estimators'] = int(params_best['n_estimators'])
                        params_best['max_features'] = inputs['max features']
                        st.write("\n", "\n", "best params: ", params_best)
                        reg.model = GradientBoostingRegressor(learning_rate=params_best['learning_rate'],
                                                              n_estimators=params_best['n_estimators'],
                                                              max_features=params_best['max_features'],
                                                              random_state=inputs['random state'])
                        export_cross_val_results(reg, cv, "GradientBoostingR_cv", inputs['random state'])

                elif operator == 'leave one out':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = GradientBoostingRegressor(learning_rate=inputs['learning rate'],
                                                              n_estimators=inputs['nestimators'],
                                                              max_features=inputs['max features'],
                                                              random_state=inputs['random state'])
                        export_loo_results(reg, loo, "GradientBoostingR_loo")
                    elif inputs['auto hyperparameters']:
                        def GBR_TT(learning_rate, n_estimators):
                            reg.model = GradientBoostingRegressor(learning_rate=learning_rate,
                                                                  n_estimators=int(n_estimators),
                                                                  max_features=inputs['max features'])
                            loo_score = loo_cal(reg, loo)
                            return loo_score

                        GBRbounds = {'learning_rate': (0.001, inputs['learning rate']),
                                     'n_estimators': (1, inputs['nestimators'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=GBR_TT, pbounds=GBRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['n_estimators'] = int(params_best['n_estimators'])
                        params_best['max_features'] = inputs['max features']
                        st.write("\n", "\n", "best params: ", params_best)
                        reg.model = GradientBoostingRegressor(learning_rate=params_best['learning_rate'],
                                                              n_estimators=params_best['n_estimators'],
                                                              max_features=params_best['max_features'],
                                                              random_state=inputs['random state'])
                        export_loo_results(reg, loo, "GradientBoostingR_loo")

        if inputs['model'] == 'XGBRegressor':
            with col2:
                with st.expander('Operator'):
                    preprocess = st.selectbox('data preprocess', ['StandardScaler', 'MinMaxScaler'])
                    operator = st.selectbox('operator', ('train test split', 'cross val score', 'leave one out'),
                                            label_visibility='collapsed')
                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)

                        reg.features = pd.DataFrame(reg.features)

                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])

                    elif operator == 'cross val score':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        cv = st.number_input('cv', 1, 20, 5)

                    elif operator == 'leave one out':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        reg.features = pd.DataFrame(reg.features)
                        loo = LeaveOneOut()

            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)
            if button_train:
                if operator == 'train test split':
                    if inputs['base estimator'] == "gbtree":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                         n_estimators=inputs['nestimators'],
                                                         max_depth=inputs['max depth'], subsample=inputs['subsample'],
                                                         colsample_bytree=inputs['subfeature'],
                                                         learning_rate=inputs['learning rate'],
                                                         random_state=inputs['random state'])
                            reg.XGBRegressor()

                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']

                            plot_and_export_results(reg, "XGBR")

                        elif inputs['auto hyperparameters']:
                            def XGBR_TT(n_estimators, max_depth, subsample, colsample_bytree, learning_rate):

                                reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                             n_estimators=int(n_estimators),
                                                             max_depth=int(max_depth), subsample=subsample,
                                                             colsample_bytree=colsample_bytree,
                                                             learning_rate=learning_rate)
                                reg.XGBRegressor()
                                return reg.score

                            XGBRbounds = {'n_estimators': (1, inputs['nestimators']),
                                          'max_depth': (1, inputs['max depth']),
                                          'subsample': (0.5, inputs['subsample']),
                                          'colsample_bytree': (0.5, inputs['subsample']),
                                          'learning_rate': (0.001, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=XGBR_TT, pbounds=XGBRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_depth'] = int(params_best['max_depth'])
                            params_best['base estimator'] = 'gbtree'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                         n_estimators=params_best['n_estimators'],
                                                         max_depth=params_best['max_depth'],
                                                         subsample=params_best['subsample'],
                                                         colsample_bytree=params_best['colsample_bytree'],
                                                         learning_rate=params_best['learning_rate'])

                            reg.XGBRegressor()

                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']
                            plot_and_export_results(reg, "XGBR")

                    elif inputs['base estimator'] == "gblinear":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                         n_estimators=inputs['nestimators'],
                                                         max_depth=inputs['max depth'], subsample=inputs['subsample'],
                                                         colsample_bytree=inputs['subfeature'],
                                                         learning_rate=inputs['learning rate'],
                                                         random_state=inputs['random state'])
                            reg.XGBRegressor()

                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']

                            plot_and_export_results(reg, "XGBR")

                        elif inputs['auto hyperparameters']:
                            def XGBR_TT(n_estimators, max_depth, subsample, colsample_bytree, learning_rate):

                                reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                             n_estimators=int(n_estimators),
                                                             max_depth=int(max_depth), subsample=subsample,
                                                             colsample_bytree=colsample_bytree,
                                                             learning_rate=learning_rate)
                                reg.XGBRegressor()
                                return reg.score

                            XGBRbounds = {'n_estimators': (1, inputs['nestimators']),
                                          'max_depth': (1, inputs['max depth']),
                                          'subsample': (0.5, inputs['subsample']),
                                          'colsample_bytree': (0.5, inputs['subsample']),
                                          'learning_rate': (0.001, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=XGBR_TT, pbounds=XGBRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_depth'] = int(params_best['max_depth'])
                            params_best['base estimator'] = 'gblinear'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                         n_estimators=params_best['n_estimators'],
                                                         max_depth=params_best['max_depth'],
                                                         subsample=params_best['subsample'],
                                                         colsample_bytree=params_best['colsample_bytree'],
                                                         learning_rate=params_best['learning_rate'])

                            reg.XGBRegressor()

                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']
                            plot_and_export_results(reg, "XGBR")

                elif operator == 'cross val score':
                    if inputs['base estimator'] == "gbtree":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                         n_estimators=inputs['nestimators'],
                                                         max_depth=inputs['max depth'], subsample=inputs['subsample'],
                                                         colsample_bytree=inputs['subfeature'],
                                                         learning_rate=inputs['learning rate'],
                                                         random_state=inputs['random state'])

                            cvs = CV(reg.model, reg.features, reg.targets, cv=cv, scoring=make_scorer(r2_score),
                                     return_train_score=False, return_estimator=True)

                            export_cross_val_results(reg, cv, "XGBR_cv", inputs['random state'])
                        elif inputs['auto hyperparameters']:
                            def XGBR_TT(n_estimators, max_depth, subsample, colsample_bytree, learning_rate):

                                reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                             n_estimators=int(n_estimators),
                                                             max_depth=int(max_depth), subsample=subsample,
                                                             colsample_bytree=colsample_bytree,
                                                             learning_rate=learning_rate)
                                cv_score = cv_cal(reg, cv, inputs['random state'])
                                return cv_score

                            XGBRbounds = {'n_estimators': (1, inputs['nestimators']),
                                          'max_depth': (1, inputs['max depth']),
                                          'subsample': (0.5, inputs['subsample']),
                                          'colsample_bytree': (0.5, inputs['subsample']),
                                          'learning_rate': (0.001, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=XGBR_TT, pbounds=XGBRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_depth'] = int(params_best['max_depth'])
                            params_best['base estimator'] = 'gbtree'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                         n_estimators=params_best['n_estimators'],
                                                         max_depth=params_best['max_depth'],
                                                         subsample=params_best['subsample'],
                                                         colsample_bytree=params_best['colsample_bytree'],
                                                         learning_rate=params_best['learning_rate'])

                            export_cross_val_results(reg, cv, "XGBR_cv", inputs['random state'])

                    elif inputs['base estimator'] == "gblinear":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                         n_estimators=inputs['nestimators'],
                                                         max_depth=inputs['max depth'], subsample=inputs['subsample'],
                                                         colsample_bytree=inputs['subfeature'],
                                                         learning_rate=inputs['learning rate'],
                                                         random_state=inputs['random state'])
                            # cvs = CV(reg.model, reg.features, reg.targets, cv = cv, scoring=make_scorer(r2_score), return_train_score=False, return_estimator=True)

                            export_cross_val_results(reg, cv, "XGBR_cv", inputs['random state'])
                        elif inputs['auto hyperparameters']:
                            def XGBR_TT(n_estimators, max_depth, subsample, colsample_bytree, learning_rate):

                                reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                             n_estimators=int(n_estimators),
                                                             max_depth=int(max_depth), subsample=subsample,
                                                             colsample_bytree=colsample_bytree,
                                                             learning_rate=learning_rate)
                                cv_score = cv_cal(reg, cv, inputs['random state'])
                                return cv_score

                            XGBRbounds = {'n_estimators': (1, inputs['nestimators']),
                                          'max_depth': (1, inputs['max depth']),
                                          'subsample': (0.5, inputs['subsample']),
                                          'colsample_bytree': (0.5, inputs['subsample']),
                                          'learning_rate': (0.001, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=XGBR_TT, pbounds=XGBRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_depth'] = int(params_best['max_depth'])
                            params_best['base estimator'] = 'gbtree'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                         n_estimators=params_best['n_estimators'],
                                                         max_depth=params_best['max_depth'],
                                                         subsample=params_best['subsample'],
                                                         colsample_bytree=params_best['colsample_bytree'],
                                                         learning_rate=params_best['learning_rate'])

                            export_cross_val_results(reg, cv, "XGBR_cv", inputs['random state'])

                elif operator == 'leave one out':
                    if inputs['base estimator'] == "gbtree":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                         n_estimators=inputs['nestimators'],
                                                         max_depth=inputs['max depth'], subsample=inputs['subsample'],
                                                         colsample_bytree=inputs['subfeature'],
                                                         learning_rate=inputs['learning rate'],
                                                         random_state=inputs['random state'])
                            export_loo_results(reg, loo, "XGBR_loo")
                        elif inputs['auto hyperparameters']:
                            def XGBR_TT(n_estimators, max_depth, subsample, colsample_bytree, learning_rate):

                                reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                             n_estimators=int(n_estimators),
                                                             max_depth=int(max_depth), subsample=subsample,
                                                             colsample_bytree=colsample_bytree,
                                                             learning_rate=learning_rate)
                                loo_score = loo_cal(reg, loo)
                                return loo_score

                            XGBRbounds = {'n_estimators': (1, inputs['nestimators']),
                                          'max_depth': (1, inputs['max depth']),
                                          'subsample': (0.5, inputs['subsample']),
                                          'colsample_bytree': (0.5, inputs['subsample']),
                                          'learning_rate': (0.001, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=XGBR_TT, pbounds=XGBRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_depth'] = int(params_best['max_depth'])
                            params_best['base estimator'] = 'gbtree'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                         n_estimators=params_best['n_estimators'],
                                                         max_depth=params_best['max_depth'],
                                                         subsample=params_best['subsample'],
                                                         colsample_bytree=params_best['colsample_bytree'],
                                                         learning_rate=params_best['learning_rate'])

                            export_loo_results(reg, loo, "XGBR_loo")

                    elif inputs['base estimator'] == "gblinear":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                         n_estimators=inputs['nestimators'],
                                                         max_depth=inputs['max depth'], subsample=inputs['subsample'],
                                                         colsample_bytree=inputs['subfeature'],
                                                         learning_rate=inputs['learning rate'],
                                                         random_state=inputs['random state'])

                            export_loo_results(reg, loo, "XGBR_loo")

                        elif inputs['auto hyperparameters']:
                            def XGBR_TT(n_estimators, max_depth, subsample, colsample_bytree, learning_rate):

                                reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                             n_estimators=int(n_estimators),
                                                             max_depth=int(max_depth), subsample=subsample,
                                                             colsample_bytree=colsample_bytree,
                                                             learning_rate=learning_rate)
                                loo_score = loo_cal(reg, loo)
                                return loo_score

                            XGBRbounds = {'n_estimators': (1, inputs['nestimators']),
                                          'max_depth': (1, inputs['max depth']),
                                          'subsample': (0.5, inputs['subsample']),
                                          'colsample_bytree': (0.5, inputs['subsample']),
                                          'learning_rate': (0.001, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=XGBR_TT, pbounds=XGBRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_depth'] = int(params_best['max_depth'])
                            params_best['base estimator'] = 'gbtree'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = xgb.XGBRegressor(booster=inputs['base estimator'],
                                                         n_estimators=params_best['n_estimators'],
                                                         max_depth=params_best['max_depth'],
                                                         subsample=params_best['subsample'],
                                                         colsample_bytree=params_best['colsample_bytree'],
                                                         learning_rate=params_best['learning_rate'])

                            export_loo_results(reg, loo, "XGBR_loo")

        if inputs['model'] == 'CatBoostRegressor':
            with col2:
                with st.expander('Operator'):
                    operator = st.selectbox('data operator', ('train test split', 'cross val score', 'leave one out'))

                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])
                    elif operator == 'cross val score':
                        cv = st.number_input('cv', 1, 20, 5)
                    elif operator == 'leave one out':
                        loo = LeaveOneOut()
            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)
            if button_train:

                if operator == 'train test split':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = CatBoostRegressor(iterations=inputs['niteration'],
                                                      learning_rate=inputs['learning rate'], depth=inputs['max depth'],
                                                      random_seed=inputs['random state'])

                        reg.CatBRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']

                        plot_and_export_results(reg, "CatBoostR")
                    elif inputs['auto hyperparameters']:
                        def CatBR_TT(iterations, depth, learning_rate):
                            reg.model = CatBoostRegressor(iterations=int(iterations), learning_rate=learning_rate,
                                                          depth=int(depth))
                            reg.CatBRegressor()
                            return reg.score

                        CatBRbounds = {'iterations': (1, inputs['niteration']), 'depth': (1, inputs['max depth']),
                                       'learning_rate': (0.001, inputs['learning rate'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=CatBR_TT, pbounds=CatBRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['iterations'] = int(params_best['iterations'])
                        params_best['depth'] = int(params_best['depth'])
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = CatBoostRegressor(iterations=params_best['iterations'],
                                                      learning_rate=params_best['learning_rate'],
                                                      depth=params_best['depth'], random_seed=inputs['random state'])

                        reg.CatBRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']
                        plot_and_export_results(reg, "CatBR")
                elif operator == 'cross val score':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = CatBoostRegressor(iterations=inputs['niteration'],
                                                      learning_rate=inputs['learning rate'], depth=inputs['max depth'],
                                                      random_seed=inputs['random state'])

                        export_cross_val_results(reg, cv, "CatBoostR_cv", inputs['random state'])
                    elif inputs['auto hyperparameters']:
                        def CatBR_TT(iterations, depth, learning_rate):
                            reg.model = CatBoostRegressor(iterations=int(iterations), learning_rate=learning_rate,
                                                          depth=int(depth))
                            cv_score = cv_cal(reg, cv, inputs['random state'])
                            return cv_score

                        CatBRbounds = {'iterations': (1, inputs['niteration']), 'depth': (1, inputs['max depth']),
                                       'learning_rate': (0.001, inputs['learning rate'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=CatBR_TT, pbounds=CatBRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['iterations'] = int(params_best['iterations'])
                        params_best['depth'] = int(params_best['depth'])
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = CatBoostRegressor(iterations=params_best['iterations'],
                                                      learning_rate=params_best['learning_rate'],
                                                      depth=params_best['depth'], random_seed=inputs['random state'])
                        export_cross_val_results(reg, cv, "CatBoostR_cv", inputs['random state'])

                elif operator == 'leave one out':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = CatBoostRegressor(iterations=inputs['niteration'],
                                                      learning_rate=inputs['learning rate'], depth=inputs['max depth'],
                                                      random_seed=inputs['random state'])
                        export_loo_results(reg, loo, "CatBoostR_loo")
                    elif inputs['auto hyperparameters']:
                        def CatBR_TT(iterations, depth, learning_rate):
                            reg.model = CatBoostRegressor(iterations=int(iterations), learning_rate=learning_rate,
                                                          depth=int(depth))
                            loo_score = loo_cal(reg, loo)
                            return loo_score

                        CatBRbounds = {'iterations': (1, inputs['niteration']), 'depth': (1, inputs['max depth']),
                                       'learning_rate': (0.001, inputs['learning rate'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=CatBR_TT, pbounds=CatBRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['iterations'] = int(params_best['iterations'])
                        params_best['depth'] = int(params_best['depth'])
                        st.write("\n", "\n", "best params: ", params_best)

                        reg.model = CatBoostRegressor(iterations=params_best['iterations'],
                                                      learning_rate=params_best['learning_rate'],
                                                      depth=params_best['depth'], random_seed=inputs['random state'])
                        export_loo_results(reg, loo, "CatBoostR_loo")

        if inputs['model'] == 'MLPRegressor':
            with col2:
                with st.expander('Operator'):

                    preprocess = st.selectbox('data preprocess', ['StandardScaler', 'MinMaxScaler'])

                    operator = st.selectbox('operator', ('train test split', 'cross val score', 'leave one out'),
                                            label_visibility='collapsed')
                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)

                        reg.features = pd.DataFrame(reg.features)

                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])

                    elif operator == 'cross val score':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        cv = st.number_input('cv', 1, 20, 5)

                    elif operator == 'leave one out':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        loo = LeaveOneOut()
            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)
            if button_train:
                if operator == 'train test split':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = MLPRegressor(hidden_layer_sizes=inputs['hidden layer size'],
                                                 activation=inputs['activation'], solver=inputs['solver'],
                                                 batch_size=inputs['batch size'], learning_rate=inputs['learning rate'],
                                                 max_iter=inputs['max iter'],
                                                 random_state=inputs['random state'])
                        reg.MLPRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']
                        plot_and_export_results(reg, "MLP")
                    elif inputs['auto hyperparameters']:
                        def MLPR_TT(layer_size, neuron_size):
                            layer_size = int(layer_size)
                            neuron_size = int(neuron_size)
                            hidden_layer_size = tuple([neuron_size] * layer_size)
                            reg.model = MLPRegressor(hidden_layer_sizes=hidden_layer_size,
                                                     activation=inputs['activation'], solver=inputs['solver'],
                                                     batch_size=inputs['batch size'],
                                                     learning_rate=inputs['learning rate'], max_iter=inputs['max iter'],
                                                     random_state=inputs['random state'])
                            reg.MLPRegressor()
                            return reg.score

                        MLPRbounds = {'layer_size': (1, inputs['layer size']),
                                      'neuron_size': (1, inputs['neuron size'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=MLPR_TT, pbounds=MLPRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['layer_size'] = int(params_best['layer_size'])
                        params_best['neuron_size'] = int(params_best['neuron_size'])
                        st.write("\n", "\n", "best params: ", params_best)
                        hidden_layer_size = tuple(params_best['layer_size'] * [params_best['neuron_size']])
                        reg.model = MLPRegressor(hidden_layer_sizes=hidden_layer_size, activation=inputs['activation'],
                                                 solver=inputs['solver'],
                                                 batch_size=inputs['batch size'], learning_rate=inputs['learning rate'],
                                                 max_iter=inputs['max iter'],
                                                 random_state=inputs['random state'])

                        reg.MLPRegressor()

                        result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                        result_data.columns = ['actual', 'prediction']
                        plot_and_export_results(reg, "MLPR")

                elif operator == 'cross val score':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = MLPRegressor(hidden_layer_sizes=inputs['hidden layer size'],
                                                 activation=inputs['activation'], solver=inputs['solver'],
                                                 batch_size=inputs['batch size'], learning_rate=inputs['learning rate'],
                                                 max_iter=inputs['max iter'],
                                                 random_state=inputs['random state'])

                        export_cross_val_results(reg, cv, "MLP_cv", inputs['random state'])
                    elif inputs['auto hyperparameters']:
                        def MLPR_TT(layer_size, neuron_size):
                            layer_size = int(layer_size)
                            neuron_size = int(neuron_size)
                            hidden_layer_size = tuple([neuron_size] * layer_size)
                            reg.model = MLPRegressor(hidden_layer_sizes=hidden_layer_size,
                                                     activation=inputs['activation'], solver=inputs['solver'],
                                                     batch_size=inputs['batch size'],
                                                     learning_rate=inputs['learning rate'], max_iter=inputs['max iter'],
                                                     random_state=inputs['random state'])
                            cv_score = cv_cal(reg, cv, inputs['random state'])
                            return cv_score

                        MLPRbounds = {'layer_size': (1, inputs['layer size']),
                                      'neuron_size': (1, inputs['neuron size'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=MLPR_TT, pbounds=MLPRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['layer_size'] = int(params_best['layer_size'])
                        params_best['neuron_size'] = int(params_best['neuron_size'])
                        st.write("\n", "\n", "best params: ", params_best)
                        hidden_layer_size = tuple(params_best['layer_size'] * [params_best['neuron_size']])
                        reg.model = MLPRegressor(hidden_layer_sizes=hidden_layer_size, activation=inputs['activation'],
                                                 solver=inputs['solver'],
                                                 batch_size=inputs['batch size'], learning_rate=inputs['learning rate'],
                                                 max_iter=inputs['max iter'],
                                                 random_state=inputs['random state'])

                        export_cross_val_results(reg, cv, "MLP_cv", inputs['random state'])
                elif operator == 'leave one out':
                    if inputs['auto hyperparameters'] == False:
                        reg.model = MLPRegressor(hidden_layer_sizes=inputs['hidden layer size'],
                                                 activation=inputs['activation'], solver=inputs['solver'],
                                                 batch_size=inputs['batch size'], learning_rate=inputs['learning rate'],
                                                 max_iter=inputs['max iter'],
                                                 random_state=inputs['random state'])

                        export_loo_results(reg, loo, "MLP_loo")
                    elif inputs['auto hyperparameters']:
                        def MLPR_TT(layer_size, neuron_size):
                            layer_size = int(layer_size)
                            neuron_size = int(neuron_size)
                            hidden_layer_size = tuple([neuron_size] * layer_size)
                            reg.model = MLPRegressor(hidden_layer_sizes=hidden_layer_size,
                                                     activation=inputs['activation'], solver=inputs['solver'],
                                                     batch_size=inputs['batch size'],
                                                     learning_rate=inputs['learning rate'], max_iter=inputs['max iter'],
                                                     random_state=inputs['random state'])
                            loo_score = loo_cal(reg, loo)
                            return loo_score

                        MLPRbounds = {'layer_size': (1, inputs['layer size']),
                                      'neuron_size': (1, inputs['neuron size'])}

                        with st.expander('hyperparameter opt'):
                            optimizer = BayesianOptimization(f=MLPR_TT, pbounds=MLPRbounds,
                                                             random_state=inputs['random state'],
                                                             allow_duplicate_points=True)
                            optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                        params_best = optimizer.max["params"]
                        score_best = optimizer.max["target"]
                        params_best['layer_size'] = int(params_best['layer_size'])
                        params_best['neuron_size'] = int(params_best['neuron_size'])
                        st.write("\n", "\n", "best params: ", params_best)
                        hidden_layer_size = tuple(params_best['layer_size'] * [params_best['neuron_size']])
                        reg.model = MLPRegressor(hidden_layer_sizes=hidden_layer_size, activation=inputs['activation'],
                                                 solver=inputs['solver'],
                                                 batch_size=inputs['batch size'], learning_rate=inputs['learning rate'],
                                                 max_iter=inputs['max iter'],
                                                 random_state=inputs['random state'])

                        export_loo_results(reg, loo, "MLP_loo")
            st.write('---')

        if inputs['model'] == 'BaggingRegressor':
            with col2:
                with st.expander('Operator'):
                    preprocess = st.selectbox('data preprocess', ['StandardScaler', 'MinMaxScaler'])
                    operator = st.selectbox('operator', ('train test split', 'cross val score', 'leave one out'),
                                            label_visibility='collapsed')
                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)

                        reg.features = pd.DataFrame(reg.features)

                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])

                    elif operator == 'cross val score':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        cv = st.number_input('cv', 1, 20, 5)

                    elif operator == 'leave one out':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        reg.features = pd.DataFrame(reg.features)
                        loo = LeaveOneOut()

            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)
            if button_train:

                if operator == 'train test split':
                    if inputs['base estimator'] == "DecisionTree":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = BaggingRegressor(estimator=None, n_estimators=inputs['nestimators'],
                                                         max_samples=inputs['max samples'],
                                                         max_features=inputs['max features'], n_jobs=-1)

                            reg.BaggingRegressor()

                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']

                            plot_and_export_results(reg, "BaggingR")
                        elif inputs['auto hyperparameters']:
                            def BaggingR_TT(n_estimators, max_samples, max_features):

                                reg.model = BaggingRegressor(estimator=None, n_estimators=int(n_estimators),
                                                             max_samples=int(max_samples),
                                                             max_features=int(max_features), n_jobs=-1)
                                reg.BaggingRegressor()
                                return reg.score

                            BaggingRbounds = {'n_estimators': (1, inputs['nestimators']),
                                              'max_samples': (1, inputs['max samples']),
                                              'max_features': (1, inputs['max features'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=BaggingR_TT, pbounds=BaggingRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_samples'] = int(params_best['max_samples'])
                            params_best['max_features'] = int(params_best['max_features'])
                            params_best['base estimator'] = 'decision tree'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = BaggingRegressor(estimator=None, n_estimators=params_best['n_estimators'],
                                                         max_samples=params_best['max_samples'],
                                                         max_features=params_best['max_features'], n_jobs=-1)

                            reg.BaggingRegressor()

                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']
                            plot_and_export_results(reg, "BaggingR")

                    elif inputs['base estimator'] == "SupportVector":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = BaggingRegressor(estimator=SVR(), n_estimators=inputs['nestimators'],
                                                         max_samples=inputs['max samples'],
                                                         max_features=inputs['max features'], n_jobs=-1)
                            reg.BaggingRegressor()

                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']

                            plot_and_export_results(reg, "BaggingR")
                        elif inputs['auto hyperparameters']:
                            def BaggingR_TT(n_estimators, max_samples, max_features):

                                reg.model = BaggingRegressor(estimator=SVR(), n_estimators=int(n_estimators),
                                                             max_samples=int(max_samples),
                                                             max_features=int(max_features), n_jobs=-1)
                                reg.BaggingRegressor()
                                return reg.score

                            BaggingRbounds = {'n_estimators': (1, inputs['nestimators']),
                                              'max_samples': (1, inputs['max samples']),
                                              'max_features': (1, inputs['max features'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=BaggingR_TT, pbounds=BaggingRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_samples'] = int(params_best['max_samples'])
                            params_best['max_features'] = int(params_best['max_features'])
                            params_best['base estimator'] = 'support vector machine'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = BaggingRegressor(estimator=SVR(), n_estimators=params_best['n_estimators'],
                                                         max_samples=params_best['max_samples'],
                                                         max_features=params_best['max_features'], n_jobs=-1)

                            reg.BaggingRegressor()

                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']
                            plot_and_export_results(reg, "BaggingR")

                    elif inputs['base estimator'] == "LinearRegression":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = BaggingRegressor(estimator=LinearR(), n_estimators=inputs['nestimators'],
                                                         max_samples=inputs['max samples'],
                                                         max_features=inputs['max features'], n_jobs=-1)
                            reg.BaggingRegressor()

                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']

                            plot_and_export_results(reg, "BaggingR")
                        elif inputs['auto hyperparameters']:
                            def BaggingR_TT(n_estimators, max_samples, max_features):

                                reg.model = BaggingRegressor(estimator=LinearR(), n_estimators=int(n_estimators),
                                                             max_samples=int(max_samples),
                                                             max_features=int(max_features), n_jobs=-1)
                                reg.BaggingRegressor()
                                return reg.score

                            BaggingRbounds = {'n_estimators': (1, inputs['nestimators']),
                                              'max_samples': (1, inputs['max samples']),
                                              'max_features': (1, inputs['max features'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=BaggingR_TT, pbounds=BaggingRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_samples'] = int(params_best['max_samples'])
                            params_best['max_features'] = int(params_best['max_features'])
                            params_best['base estimator'] = 'linear 回归'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = BaggingRegressor(estimator=LinearR(), n_estimators=params_best['n_estimators'],
                                                         max_samples=params_best['max_samples'],
                                                         max_features=params_best['max_features'], n_jobs=-1)

                            reg.BaggingRegressor()

                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']
                            plot_and_export_results(reg, "BaggingR")

                elif operator == 'cross val score':
                    if inputs['base estimator'] == "DecisionTree":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = BaggingRegressor(estimator=None, n_estimators=inputs['nestimators'],
                                                         max_samples=inputs['max samples'],
                                                         max_features=inputs['max features'], n_jobs=-1)
                            # cvs = CV(reg.model, reg.features, reg.targets, cv = cv, scoring=make_scorer(r2_score), return_train_score=False, return_estimator=True)

                            export_cross_val_results(reg, cv, "BaggingR_cv", inputs['random state'])
                        elif inputs['auto hyperparameters']:
                            def BaggingR_TT(n_estimators, max_samples, max_features):

                                reg.model = BaggingRegressor(estimator=None, n_estimators=int(n_estimators),
                                                             max_samples=int(max_samples),
                                                             max_features=int(max_features), n_jobs=-1)
                                cv_score = cv_cal(reg, cv, inputs['random state'])
                                return cv_score

                            BaggingRbounds = {'n_estimators': (1, inputs['nestimators']),
                                              'max_samples': (1, inputs['max samples']),
                                              'max_features': (1, inputs['max features'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=BaggingR_TT, pbounds=BaggingRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_samples'] = int(params_best['max_samples'])
                            params_best['max_features'] = int(params_best['max_features'])
                            params_best['base estimator'] = 'decision tree'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = BaggingRegressor(estimator=None, n_estimators=params_best['n_estimators'],
                                                         max_samples=params_best['max_samples'],
                                                         max_features=params_best['max_features'], n_jobs=-1)

                            export_cross_val_results(reg, cv, "BaggingR_cv", inputs['random state'])

                    elif inputs['base estimator'] == "SupportVector":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = BaggingRegressor(estimator=SVR(), n_estimators=inputs['nestimators'],
                                                         max_samples=inputs['max samples'],
                                                         max_features=inputs['max features'], n_jobs=-1)

                            # cvs = CV(reg.model, reg.features, reg.targets, cv = cv, scoring=make_scorer(r2_score), return_train_score=False, return_estimator=True)

                            export_cross_val_results(reg, cv, "BaggingR_cv", inputs['random state'])

                        elif inputs['auto hyperparameters']:
                            def BaggingR_TT(n_estimators, max_samples, max_features):

                                reg.model = BaggingRegressor(estimator=SVR(), n_estimators=int(n_estimators),
                                                             max_samples=int(max_samples),
                                                             max_features=int(max_features), n_jobs=-1)
                                cv_score = cv_cal(reg, cv, inputs['random state'])
                                return cv_score

                            BaggingRbounds = {'n_estimators': (1, inputs['nestimators']),
                                              'max_samples': (1, inputs['max samples']),
                                              'max_features': (1, inputs['max features'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=BaggingR_TT, pbounds=BaggingRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_samples'] = int(params_best['max_samples'])
                            params_best['max_features'] = int(params_best['max_features'])
                            params_best['base estimator'] = 'support vector machine'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = BaggingRegressor(estimator=None, n_estimators=params_best['n_estimators'],
                                                         max_samples=params_best['max_samples'],
                                                         max_features=params_best['max_features'], n_jobs=-1)

                            export_cross_val_results(reg, cv, "BaggingR_cv", inputs['random state'])

                    elif inputs['base estimator'] == "LinearRegression":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = BaggingRegressor(estimator=LinearR(), n_estimators=inputs['nestimators'],
                                                         max_samples=inputs['max samples'],
                                                         max_features=inputs['max features'], n_jobs=-1)

                            # cvs = CV(reg.model, reg.features, reg.targets, cv = cv, scoring=make_scorer(r2_score), return_train_score=False, return_estimator=True)

                            export_cross_val_results(reg, cv, "BaggingR_cv", inputs['random state'])
                        elif inputs['auto hyperparameters']:
                            def BaggingR_TT(n_estimators, max_samples, max_features):
                                reg.model = BaggingRegressor(estimator=LinearR(), n_estimators=int(n_estimators),
                                                             max_samples=int(max_samples),
                                                             max_features=int(max_features), n_jobs=-1)
                                cv_score = cv_cal(reg, cv, inputs['random state'])
                                return cv_score

                            BaggingRbounds = {'n_estimators': (1, inputs['nestimators']),
                                              'max_samples': (1, inputs['max samples']),
                                              'max_features': (1, inputs['max features'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=BaggingR_TT, pbounds=BaggingRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_samples'] = int(params_best['max_samples'])
                            params_best['max_features'] = int(params_best['max_features'])
                            params_best['base estimator'] = 'linear 回归'
                            st.write("\n", "\n", "best params: ", params_best)
                            reg.model = BaggingRegressor(estimator=LinearR(), n_estimators=params_best['n_estimators'],
                                                         max_samples=params_best['max_samples'],
                                                         max_features=params_best['max_features'], n_jobs=-1)

                            export_cross_val_results(reg, cv, "BaggingR_cv", inputs['random state'])
                elif operator == 'leave one out':
                    if inputs['base estimator'] == "DecisionTree":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = BaggingRegressor(
                                estimator=tree.DecisionTreeRegressor(random_state=inputs['random state']),
                                n_estimators=inputs['nestimators'],
                                max_samples=inputs['max samples'], max_features=inputs['max features'], n_jobs=-1)

                            export_loo_results(reg, loo, "BaggingR_loo")
                        elif inputs['auto hyperparameters']:

                            def BaggingR_TT(n_estimators, max_samples, max_features):
                                reg.model = BaggingRegressor(estimator=None, n_estimators=int(n_estimators),
                                                             max_samples=int(max_samples),
                                                             max_features=int(max_features), n_jobs=-1)
                                loo_score = loo_cal(reg, loo)
                                return loo_score

                            BaggingRbounds = {'n_estimators': (1, inputs['nestimators']),
                                              'max_samples': (1, inputs['max samples']),
                                              'max_features': (1, inputs['max features'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=BaggingR_TT, pbounds=BaggingRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_samples'] = int(params_best['max_samples'])
                            params_best['max_features'] = int(params_best['max_features'])
                            params_best['base estimator'] = 'decision tree'
                            st.write("\n", "\n", "best params: ", params_best)
                            reg.model = BaggingRegressor(estimator=None, n_estimators=params_best['n_estimators'],
                                                         max_samples=params_best['max_samples'],
                                                         max_features=params_best['max_features'], n_jobs=-1)
                            export_loo_results(reg, loo, "BaggingR_loo")

                    elif inputs['base estimator'] == "SupportVector":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = BaggingRegressor(estimator=SVR(), n_estimators=inputs['nestimators'],
                                                         max_samples=inputs['max samples'],
                                                         max_features=inputs['max features'], n_jobs=-1)
                            export_loo_results(reg, loo, "BaggingR_loo")
                        elif inputs['auto hyperparameters']:
                            def BaggingR_TT(n_estimators, max_samples, max_features):
                                reg.model = BaggingRegressor(estimator=SVR(), n_estimators=int(n_estimators),
                                                             max_samples=int(max_samples),
                                                             max_features=int(max_features), n_jobs=-1)
                                loo_score = loo_cal(reg, loo)
                                return loo_score

                            BaggingRbounds = {'n_estimators': (1, inputs['nestimators']),
                                              'max_samples': (1, inputs['max samples']),
                                              'max_features': (1, inputs['max features'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=BaggingR_TT, pbounds=BaggingRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_samples'] = int(params_best['max_samples'])
                            params_best['max_features'] = int(params_best['max_features'])
                            params_best['base estimator'] = 'support vector machine'
                            st.write("\n", "\n", "best params: ", params_best)
                            reg.model = BaggingRegressor(estimator=SVR(), n_estimators=params_best['n_estimators'],
                                                         max_samples=params_best['max_samples'],
                                                         max_features=params_best['max_features'], n_jobs=-1)
                            export_loo_results(reg, loo, "BaggingR_loo")

                    elif inputs['base estimator'] == "LinearRegression":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = BaggingRegressor(estimator=LinearR(), n_estimators=inputs['nestimators'],
                                                         max_samples=inputs['max samples'],
                                                         max_features=inputs['max features'], n_jobs=-1)

                            export_loo_results(reg, loo, "BaggingR_loo")
                        elif inputs['auto hyperparameters']:
                            def BaggingR_TT(n_estimators, max_samples, max_features):
                                reg.model = BaggingRegressor(estimator=LinearR(), n_estimators=int(n_estimators),
                                                             max_samples=int(max_samples),
                                                             max_features=int(max_features), n_jobs=-1)
                                loo_score = loo_cal(reg, loo)
                                return loo_score

                            BaggingRbounds = {'n_estimators': (1, inputs['nestimators']),
                                              'max_samples': (1, inputs['max samples']),
                                              'max_features': (1, inputs['max features'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=BaggingR_TT, pbounds=BaggingRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['max_samples'] = int(params_best['max_samples'])
                            params_best['max_features'] = int(params_best['max_features'])
                            params_best['base estimator'] = 'support vector machine'
                            st.write("\n", "\n", "best params: ", params_best)
                            reg.model = BaggingRegressor(estimator=LinearR(), n_estimators=params_best['n_estimators'],
                                                         max_samples=params_best['max_samples'],
                                                         max_features=params_best['max_features'], n_jobs=-1)
                            export_loo_results(reg, loo, "BaggingR_loo")

        if inputs['model'] == 'AdaBoostRegressor':
            with col2:
                with st.expander('Operator'):
                    preprocess = st.selectbox('data preprocess', ['StandardScaler', 'MinMaxScaler'])
                    operator = st.selectbox('operator', ('train test split', 'cross val score', 'leave one out'),
                                            label_visibility='collapsed')
                    if operator == 'train test split':
                        inputs['test size'] = st.slider('test size', 0.1, 0.5, 0.2)
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)

                        reg.features = pd.DataFrame(reg.features)

                        reg.Xtrain, reg.Xtest, reg.Ytrain, reg.Ytest = TTS(reg.features, reg.targets,
                                                                           test_size=inputs['test size'],
                                                                           random_state=inputs['random state'])

                    elif operator == 'cross val score':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        cv = st.number_input('cv', 1, 20, 5)

                    elif operator == 'leave one out':
                        if preprocess == 'StandardScaler':
                            reg.features = StandardScaler().fit_transform(reg.features)
                        if preprocess == 'MinMaxScaler':
                            reg.features = MinMaxScaler().fit_transform(reg.features)
                        reg.features = pd.DataFrame(reg.features)
                        loo = LeaveOneOut()

            colored_header(label="Training", description=" ", color_name="violet-30")
            with st.container():
                button_train = st.button('Train', use_container_width=True)
            if button_train:

                if operator == 'train test split':

                    if inputs['base estimator'] == "DecisionTree":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = AdaBoostRegressor(estimator=tree.DecisionTreeRegressor(),
                                                          n_estimators=inputs['nestimators'],
                                                          learning_rate=inputs['learning rate'],
                                                          random_state=inputs['random state'])
                            reg.AdaBoostRegressor()
                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']
                            plot_and_export_results(reg, "AdaBoostR")

                        elif inputs['auto hyperparameters']:
                            def AdaBoostR_TT(n_estimators, learning_rate):
                                reg.model = AdaBoostRegressor(estimator=tree.DecisionTreeRegressor(),
                                                              n_estimators=int(n_estimators),
                                                              learning_rate=learning_rate)
                                reg.AdaBoostRegressor()
                                return reg.score

                            AdaBoostRbounds = {'n_estimators': (1, inputs['nestimators']),
                                               'learning_rate': (1, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=AdaBoostR_TT, pbounds=AdaBoostRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['base estimator'] = 'decision tree'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = AdaBoostRegressor(estimator=tree.DecisionTreeRegressor(),
                                                          n_estimators=params_best['n_estimators'],
                                                          learning_rate=params_best['learning_rate'],
                                                          random_state=inputs['random state'])
                            reg.AdaBoostRegressor()
                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']
                            plot_and_export_results(reg, "AdaBoostR")

                    elif inputs['base estimator'] == "SupportVector":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = AdaBoostRegressor(estimator=SVR(), n_estimators=inputs['nestimators'],
                                                          learning_rate=inputs['learning rate'],
                                                          random_state=inputs['random state'])
                            reg.AdaBoostRegressor()
                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']
                            plot_and_export_results(reg, "AdaBoostR")
                        elif inputs['auto hyperparameters']:
                            def AdaBoostR_TT(n_estimators, learning_rate):
                                reg.model = AdaBoostRegressor(estimator=SVR(),
                                                              n_estimators=int(n_estimators),
                                                              learning_rate=learning_rate)
                                reg.AdaBoostRegressor()
                                return reg.score

                            AdaBoostRbounds = {'n_estimators': (1, inputs['nestimators']),
                                               'learning_rate': (1, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=AdaBoostR_TT, pbounds=AdaBoostRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['base estimator'] = 'support vector machine'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = AdaBoostRegressor(estimator=SVR(),
                                                          n_estimators=params_best['n_estimators'],
                                                          learning_rate=params_best['learning_rate'],
                                                          random_state=inputs['random state'])
                            reg.AdaBoostRegressor()
                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']
                            plot_and_export_results(reg, "AdaBoostR")


                    elif inputs['base estimator'] == "LinearRegression":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = AdaBoostRegressor(estimator=LinearR(), n_estimators=inputs['nestimators'],
                                                          learning_rate=inputs['learning rate'],
                                                          random_state=inputs['random state'])
                            reg.AdaBoostRegressor()
                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']
                            plot_and_export_results(reg, "AdaBoostR")
                        elif inputs['auto hyperparameters']:
                            def AdaBoostR_TT(n_estimators, learning_rate):
                                reg.model = AdaBoostRegressor(estimator=LinearR(),
                                                              n_estimators=int(n_estimators),
                                                              learning_rate=learning_rate)
                                reg.AdaBoostRegressor()
                                return reg.score

                            AdaBoostRbounds = {'n_estimators': (1, inputs['nestimators']),
                                               'learning_rate': (1, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=AdaBoostR_TT, pbounds=AdaBoostRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['base estimator'] = 'linear 回归'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = AdaBoostRegressor(estimator=LinearR(),
                                                          n_estimators=params_best['n_estimators'],
                                                          learning_rate=params_best['learning_rate'],
                                                          random_state=inputs['random state'])
                            reg.AdaBoostRegressor()
                            result_data = pd.concat([reg.Ytest, pd.DataFrame(reg.Ypred)], axis=1)
                            result_data.columns = ['actual', 'prediction']
                            plot_and_export_results(reg, "AdaBoostR")

                elif operator == 'cross val score':
                    if inputs['base estimator'] == "DecisionTree":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = AdaBoostRegressor(estimator=tree.DecisionTreeRegressor(),
                                                          n_estimators=inputs['nestimators'],
                                                          learning_rate=inputs['learning rate'],
                                                          random_state=inputs['random state'])
                            # cvs = CV(reg.model, reg.features, reg.targets, cv = cv, scoring=make_scorer(r2_score), return_train_score=False, return_estimator=True)
                            export_cross_val_results(reg, cv, "AdaBoostR_cv", inputs['random state'])
                        elif inputs['auto hyperparameters']:
                            def AdaBoostR_TT(n_estimators, learning_rate):
                                reg.model = AdaBoostRegressor(estimator=tree.DecisionTreeRegressor(),
                                                              n_estimators=int(n_estimators),
                                                              learning_rate=learning_rate)
                                cv_score = cv_cal(reg, cv, inputs['random state'])
                                return cv_score

                            AdaBoostRbounds = {'n_estimators': (1, inputs['nestimators']),
                                               'learning_rate': (1, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=AdaBoostR_TT, pbounds=AdaBoostRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['base estimator'] = 'decision tree'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = AdaBoostRegressor(estimator=tree.DecisionTreeRegressor(),
                                                          n_estimators=params_best['n_estimators'],
                                                          learning_rate=params_best['learning_rate'],
                                                          random_state=inputs['random state'])

                            export_cross_val_results(reg, cv, "AdaBoostR_cv", inputs['random state'])


                    elif inputs['base estimator'] == "SupportVector":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = AdaBoostRegressor(estimator=SVR(), n_estimators=inputs['nestimators'],
                                                          learning_rate=inputs['learning rate'],
                                                          random_state=inputs['random state'])

                            # cvs = CV(reg.model, reg.features, reg.targets, cv = cv, scoring=make_scorer(r2_score), return_train_score=False, return_estimator=True)

                            export_cross_val_results(reg, cv, "AdaBoostR_cv", inputs['random state'])
                        elif inputs['auto hyperparameters']:
                            def AdaBoostR_TT(n_estimators, learning_rate):
                                reg.model = AdaBoostRegressor(estimator=SVR(),
                                                              n_estimators=int(n_estimators),
                                                              learning_rate=learning_rate)
                                cv_score = cv_cal(reg, cv, inputs['random state'])
                                return cv_score

                            AdaBoostRbounds = {'n_estimators': (1, inputs['nestimators']),
                                               'learning_rate': (1, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=AdaBoostR_TT, pbounds=AdaBoostRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['base estimator'] = 'support vector machine'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = AdaBoostRegressor(estimator=SVR(),
                                                          n_estimators=params_best['n_estimators'],
                                                          learning_rate=params_best['learning_rate'],
                                                          random_state=inputs['random state'])

                            export_cross_val_results(reg, cv, "AdaBoostR_cv", inputs['random state'])

                    elif inputs['base estimator'] == "LinearRegression":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = reg.model = AdaBoostRegressor(estimator=LinearR(),
                                                                      n_estimators=inputs['nestimators'],
                                                                      learning_rate=inputs['learning rate'],
                                                                      random_state=inputs['random state'])
                            # cvs = CV(reg.model, reg.features, reg.targets, cv = cv, scoring=make_scorer(r2_score), return_train_score=False, return_estimator=True)

                            export_cross_val_results(reg, cv, "AdaBoostR_cv", inputs['random state'])
                        elif inputs['auto hyperparameters']:
                            def AdaBoostR_TT(n_estimators, learning_rate):
                                reg.model = AdaBoostRegressor(estimator=LinearR(),
                                                              n_estimators=int(n_estimators),
                                                              learning_rate=learning_rate)
                                cv_score = cv_cal(reg, cv, inputs['random state'])
                                return cv_score

                            AdaBoostRbounds = {'n_estimators': (1, inputs['nestimators']),
                                               'learning_rate': (1, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=AdaBoostR_TT, pbounds=AdaBoostRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['base estimator'] = 'linear 回归'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = AdaBoostRegressor(estimator=LinearR(),
                                                          n_estimators=params_best['n_estimators'],
                                                          learning_rate=params_best['learning_rate'],
                                                          random_state=inputs['random state'])

                            export_cross_val_results(reg, cv, "AdaBoostR_cv", inputs['random state'])
                elif operator == 'leave one out':
                    if inputs['base estimator'] == "DecisionTree":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = AdaBoostRegressor(estimator=tree.DecisionTreeRegressor(),
                                                          n_estimators=inputs['nestimators'],
                                                          learning_rate=inputs['learning rate'],
                                                          random_state=inputs['random state'])

                            export_loo_results(reg, loo, "AdaBoostR_loo")
                        elif inputs['auto hyperparameters']:
                            def AdaBoostR_TT(n_estimators, learning_rate):
                                reg.model = AdaBoostRegressor(estimator=tree.DecisionTreeRegressor(),
                                                              n_estimators=int(n_estimators),
                                                              learning_rate=learning_rate)
                                loo_score = loo_cal(reg, loo)
                                return loo_score

                            AdaBoostRbounds = {'n_estimators': (1, inputs['nestimators']),
                                               'learning_rate': (1, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=AdaBoostR_TT, pbounds=AdaBoostRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['base estimator'] = 'decision tree'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = AdaBoostRegressor(estimator=tree.DecisionTreeRegressor(),
                                                          n_estimators=params_best['n_estimators'],
                                                          learning_rate=params_best['learning_rate'],
                                                          random_state=inputs['random state'])

                            export_loo_results(reg, loo, "AdaBoostR_loo")

                    elif inputs['base estimator'] == "SupportVector":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = AdaBoostRegressor(estimator=SVR(), n_estimators=inputs['nestimators'],
                                                          learning_rate=inputs['learning rate'],
                                                          random_state=inputs['random state'])

                            export_loo_results(reg, loo, "AdaBoostR_loo")
                        elif inputs['auto hyperparameters']:
                            def AdaBoostR_TT(n_estimators, learning_rate):
                                reg.model = AdaBoostRegressor(estimator=SVR(),
                                                              n_estimators=int(n_estimators),
                                                              learning_rate=learning_rate)
                                loo_score = loo_cal(reg, loo)
                                return loo_score

                            AdaBoostRbounds = {'n_estimators': (1, inputs['nestimators']),
                                               'learning_rate': (1, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=AdaBoostR_TT, pbounds=AdaBoostRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['base estimator'] = 'support vector machine'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = AdaBoostRegressor(estimator=SVR(),
                                                          n_estimators=params_best['n_estimators'],
                                                          learning_rate=params_best['learning_rate'],
                                                          random_state=inputs['random state'])

                            export_loo_results(reg, loo, "AdaBoostR_loo")

                    elif inputs['base estimator'] == "LinearRegression":
                        if inputs['auto hyperparameters'] == False:
                            reg.model = AdaBoostRegressor(estimator=LinearR(), n_estimators=inputs['nestimators'],
                                                          learning_rate=inputs['learning rate'],
                                                          random_state=inputs['random state'])

                            export_loo_results(reg, loo, "AdaBoostR_loo")
                        elif inputs['auto hyperparameters']:
                            def AdaBoostR_TT(n_estimators, learning_rate):
                                reg.model = AdaBoostRegressor(estimator=LinearR(),
                                                              n_estimators=int(n_estimators),
                                                              learning_rate=learning_rate)
                                loo_score = loo_cal(reg, loo)
                                return loo_score

                            AdaBoostRbounds = {'n_estimators': (1, inputs['nestimators']),
                                               'learning_rate': (1, inputs['learning rate'])}

                            with st.expander('hyperparameter opt'):
                                optimizer = BayesianOptimization(f=AdaBoostR_TT, pbounds=AdaBoostRbounds,
                                                                 random_state=inputs['random state'],
                                                                 allow_duplicate_points=True)
                                optimizer.maximize(init_points=inputs['init points'], n_iter=inputs['iteration number'])
                            params_best = optimizer.max["params"]
                            score_best = optimizer.max["target"]
                            params_best['n_estimators'] = int(params_best['n_estimators'])
                            params_best['base estimator'] = 'linear 回归'
                            st.write("\n", "\n", "best params: ", params_best)

                            reg.model = AdaBoostRegressor(estimator=LinearR(),
                                                          n_estimators=params_best['n_estimators'],
                                                          learning_rate=params_best['learning_rate'],
                                                          random_state=inputs['random state'])

                            export_loo_results(reg, loo, "AdaBoostR_loo")
    st.write('---')