import xgboost as xgb
from sklearn.datasets import make_regression
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
from sklearn.metrics import mean_squared_error
from my_models.base import Base

class xgbRegressor(Base):

    def __init__(self):
        pass
        # import ipdb;ipdb.set_trace();


    def train(self, x_train, y_train):
        dtrain = xgb.DMatrix(x_train, label=y_train)
        # 设置参数
        params = {
            'objective': 'reg:squarederror',  # 对于回归问题使用 squared error 损失函数
            'eval_metric': 'rmse',  # 使用均方根误差作为评估指标
            'max_depth': 6,  # 树的最大深度
            'eta': 0.3,  # 学习率，默认值是 0.3
            'subsample': 0.8,  # 训练每棵树时使用的子样本比例
            'colsample_bytree': 0.8  # 构建每棵树时使用的特征比例
        }

        # 训练模型
        num_round = 100  # 训练轮数
        self.bst = xgb.train(params, dtrain, num_round)


    def valid(self, x_test, y_test):
        dtest = xgb.DMatrix(x_test, label=y_test)
        y_pred = self.bst.predict(dtest)
        mse = mean_squared_error(y_test, y_pred)
        print(f"XGB Mean Squared Error: {mse:.3f}")

    def save_model(self, save_path):
        pass

    def load_model(self, model_path):
        pass
