import torch
import torch.nn as nn
from torch.utils.data import DataLoader, SubsetRandomSampler

# 定义一个通用的模型类
class GenericModel(nn.Module):
    def __init__(self, input_dim, hidden_dim, output_dim):
        super(GenericModel, self).__init__()
        self.fc1 = nn.Linear(input_dim, hidden_dim)
        self.fc2 = nn.Linear(hidden_dim, output_dim)

    def forward(self, x):
        x = torch.relu(self.fc1(x))
        x = self.fc2(x)
        return x

# 定义集成模型类
class CustomVotingClassifier(nn.Module):
    def __init__(self, models):
        super(CustomVotingClassifier, self).__init__()
        self.models = nn.ModuleList(models)

    def forward(self, x):
        outputs = [model(x) for model in self.models]
        outputs = torch.stack(outputs, dim=1)  # Stack along a new dimension
        mean_output = torch.mean(outputs, dim=1)  # Average across models
        return mean_output

# 定义训练器类
class MultiTrainer:
    def __init__(self, models, data_loaders, criterions, optimizers):
        self.models = models
        self.data_loaders = data_loaders
        self.criterions = criterions
        self.optimizers = optimizers

    def train(self, num_epochs):
        for epoch in range(num_epochs):
            for i, (model, data_loader, criterion, optimizer) in enumerate(zip(self.models, self.data_loaders, self.criterions, self.optimizers)):
                for batch_idx, (data, target) in enumerate(data_loader):
                    output = model(data)
                    loss = criterion(output, target)
                    optimizer.zero_grad()
                    loss.backward()
                    optimizer.step()
                print(f"Epoch {epoch + 1}, Model {i + 1}, Loss: {loss.item():.4f}")

def main_worker(cfgs):
    # 数据集划分
    dataset = ...  # 假设 dataset 是一个 PyTorch Dataset 对象
    num_samples = len(dataset)

    # 划分数据集
    split_sizes = [num_samples // 3] * 3
    indices = list(range(num_samples))
    indices_a, indices_b, indices_c = torch.split(torch.tensor(indices), split_sizes)

    # 创建数据加载器
    data_loader_a = DataLoader(dataset, batch_size=32, sampler=SubsetRandomSampler(indices_a))
    data_loader_b = DataLoader(dataset, batch_size=32, sampler=SubsetRandomSampler(indices_b))
    data_loader_c = DataLoader(dataset, batch_size=32, sampler=SubsetRandomSampler(indices_c))

    # 创建模型
    model_a = GenericModel(input_dim=cfgs[0].model.input_dim, hidden_dim=cfgs[0].model.hidden_dim, output_dim=cfgs[0].model.output_dim)
    model_b = GenericModel(input_dim=cfgs[1].model.input_dim, hidden_dim=cfgs[1].model.hidden_dim, output_dim=cfgs[1].model.output_dim)
    model_c = GenericModel(input_dim=cfgs[2].model.input_dim, hidden_dim=cfgs[2].model.hidden_dim, output_dim=cfgs[2].model.output_dim)

    # 创建集成模型
    ensemble_model = CustomVotingClassifier([model_a, model_b, model_c])

    # 创建优化器
    optimizer_a = torch.optim.Adam(model_a.parameters(), lr=cfgs[0].optimizer.lr)
    optimizer_b = torch.optim.Adam(model_b.parameters(), lr=cfgs[1].optimizer.lr)
    optimizer_c = torch.optim.Adam(model_c.parameters(), lr=cfgs[2].optimizer.lr)

    # 创建损失函数
    criterion = torch.nn.MSELoss()

    # 创建训练器
    trainer = MultiTrainer(
        models=[model_a, model_b, model_c],
        data_loaders=[data_loader_a, data_loader_b, data_loader_c],
        criterions=[criterion, criterion, criterion],
        optimizers=[optimizer_a, optimizer_b, optimizer_c]
    )

    # 开始训练
    trainer.train(num_epochs=10)

    # 测试集成模型
    test_data = ...  # 假设这是测试数据
    test_output = ensemble_model(test_data)
    print("Test Output:", test_output)

# 假设这是配置文件
cfgs = [
    {
        'model': {
            'input_dim': 10,
            'hidden_dim': 32,
            'output_dim': 1
        },
        'optimizer': {
            'lr': 0.001
        }
    },
    {
        'model': {
            'input_dim': 10,
            'hidden_dim': 64,
            'output_dim': 1
        },
        'optimizer': {
            'lr': 0.0005
        }
    },
    {
        'model': {
            'input_dim': 10,
            'hidden_dim': 128,
            'output_dim': 1
        },
        'optimizer': {
            'lr': 0.0001
        }
    }
]

# 运行主函数
main_worker(cfgs)