from abc import ABCMeta
from pprint import pprint
from gplearn.genetic import SymbolicRegressor
import numpy as np
from sklearn.exceptions import DataConversionWarning
import torch
import torch.nn as nn
import torch.optim as optim
import warnings

warnings.filterwarnings("ignore", category=FutureWarning)
warnings.filterwarnings("ignore", category=DeprecationWarning)
warnings.filterwarnings("ignore", category=DataConversionWarning)

from sklearn.model_selection import train_test_split
from bishe_situations.utils import OPS_GPLEARN, BaseNeuralTrainer, HyperparameterOptimizer, history_to_kwargs, mse_tensor, postprocess_params, preprocess_params
from bishe_situations.utils import BINARY_OPS_GPLEARN, GPLEARN_PARAMS, UNARY_OPS_GPLEARN, generate_data
    
class NeuralGplearnTrainer(BaseNeuralTrainer):
    def __init__(self, params: dict = GPLEARN_PARAMS, population_size: int = 50, generations: int = 10, niterations: int = 10, **kwargs):
        super().__init__(params, population_size, generations, niterations, **kwargs)
        self.device = kwargs.get('device', 'cuda')
        
    def override_fit(self, X, y):
         # 划分训练集和验证集
        X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=0.2, random_state=42)
        # 初始化模型和优化器
        model = HyperparameterOptimizer(len(GPLEARN_PARAMS)).to(self.device)
        optimizer = optim.Adam(model.parameters(), lr=1e-3)
        criterion = nn.MSELoss()
        
        best_param = list(GPLEARN_PARAMS.values())
        best_param = torch.tensor(best_param).to(self.device)
            
        for trial in range(self.niterations):
            # 获取参数预测
            params: torch.Tensor = model(torch.tensor([0]).to(self.device))
            params_detached = params.clone().detach().cpu().numpy()

            # 处理参数
            processed_param = postprocess_params(params_detached, GPLEARN_PARAMS)
            # print(processed_param)
            kwargs = history_to_kwargs(processed_param[0], GPLEARN_PARAMS)
            pprint(kwargs)
            # 训练PySR模型
            gplearn_model = SymbolicRegressor(
                function_set=OPS_GPLEARN,
                verbose=1,
                random_state=42,
                metric='mse',
                population_size=self.population_size,
                generations=self.generations,
                **kwargs
            )
            gplearn_model.fit(X_train, y_train)
            
            # 在验证集上评估
            y_pred = gplearn_model.predict(X_val)
            
            # 将预测结果转换为tensor并保持梯度
            y_pred_tensor = torch.from_numpy(np.array(y_pred.flatten())).float().requires_grad_(True)
            y_val_tensor = torch.from_numpy(np.array(y_val.flatten())).float()
            
            # 计算loss并保持梯度
            mse: torch.Tensor = criterion(y_val_tensor, y_pred_tensor)
            loss = torch.tensor(mse.item()).requires_grad_(True)
            if mse.item() < self.mse:
                self.mse = mse.item()
                self.best_equation = str(gplearn_model._program)
                print(f"$$$$Trial {trial+1}/{self.niterations}, MSE: {mse.item():.4f}", str(gplearn_model._program), "#better#")
            else:
                print(f"$$$$Trial {trial+1}/{self.niterations}, MSE: {mse.item():.4f}", str(gplearn_model._program))
            
            # 反向传播和优化
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()
    
    def best_function(self):
        return self.best_equation
        

def optimize_hyperparameters(X, y, n_trials=10):
    model = NeuralGplearnTrainer(GPLEARN_PARAMS, 50, 10, 100, 4)
    model.fit(X, y)
    
    return model.best_function(), model.mse, model.train_time

def main():
    # 定义目标函数
    def target_function(X):
        return 6 * np.sin(X) * np.cos(X)
    
    # 生成数据
    X, y = generate_data(target_function, n_samples=512, noise=0.8)
    
    # 优化超参数
    print("开始优化超参数...")
    best_equation, best_mse, train_time = optimize_hyperparameters(X, y)
    
    print("\n最佳超参数:")
    print(f"最佳MSE: {best_mse:.4f}")
    print(f"最佳方程: {best_equation}")
    print(f"训练时间: {train_time:.4f}秒")

if __name__ == "__main__":
    main() 