# 回归模型评判权重
MAE_WEIGHT = 0.6
RMSE_WEIGHT = 0.4
CV = 5
PARAM={
    'KNeighborsRegressor': {
        'n_neighbors':[i for i in range(3,11)],
        'weights':['uniform'],
        'algorithm':['auto']
    },
    'KNeighborsClassifier': {
        'n_neighbors': [i for i in range(3, 11)],
        'weights': ['uniform'],
        'algorithm': ['auto']
    },
    'DecisionTreeRegressor':{
        'max_depth':[i for i in range(5,20)],
        # 'min_samples_split':[i for i in range(2,21)],
        # 'min_samples_leaf':[i for i in range(2,6)]
    },
    'LinearRegression':{
        'fit_intercept':[True],
        'copy_X':[True],
        'positive':[False]
    },
    'LogisticRegression':{
        'C':[0.001, 0.01, 0.1, 1, 10, 100],             # 正则化强度的倒数
        'penalty':['l2'],
        'class_weight':['balanced'],
        'multi_class':['auto']
    },
    'DecisionTreeClassifier':{
        'criterion':["gini"],
        'max_depth':[i for i in range(3,30)],                 #树深
        # 'min_samples_split':[i for i in range(2,70)],         #分割内部节点所需的最小样本数
        # 'min_samples_leaf':[i for i in range(1,30)],          #叶子节点所需的最小样本数
        'min_impurity_decrease':[0]                         #分裂导致不纯度降低的最小值
    },
    'RandomForestClassifier':{
        'n_estimators':[ i for i in range(50,60)],           #决策树数量
        'max_depth':[i for i in range(5,8)],                 #树深
        'max_features':["log2"],                                #控制单棵树可选的特征数量
        # 'min_samples_split':[i for i in range(5,15) ],      #分割内部节点所需的最小样本数
        # 'min_samples_leaf':[i for i in range(2,6)],         #叶子节点所需的最小样本数
        'criterion':["gini"]                                    #基尼值
    },
    'AdaBoostClassifier':{
        'n_estimators':[i for i in range(50,70)],
        'learning_rate':[round(0.01 + i * 0.01, 2) for i in range(10)],    #学习率
        # 'random_state':[18,29,255,168,196,0,1],
        # 'algorithm':['SAMME']
    },
    'GradientBoostingClassifier':{
        'n_estimators':[i for i in range(50,300)],
        'learning_rate':[round(0.01 + i * 0.01, 2) for i in range(10)],
        'max_depth':[i for i in range(3,10)],
        'max_features':["sqrt","log2"]
    },
    'XGBClassifier':{
        'max_depth':[3,4,5],
        'n_estimators':[i for i in range(100,120)], # *****
        'learning_rate':[0.1]
    },
    'Lasso':{'alpha':[0.001,0.01,0.1,1.0]},
    'Ridge':{'alpha': [0.001, 0.01, 0.1, 1, 10, 100]}
}