import os
from pathlib import Path

from pytorch_lightning import Trainer
from torch.utils.data import DataLoader, random_split
from .dataset import ArxivDataset
from .model import PaperClassifier
import torch
import pytorch_lightning as pl


class ModelTrainer:
    def __init__(self, config_path, data_path):
        self.dataset = ArxivDataset(data_path, config_path)
        self.model = PaperClassifier(
            config_path,
            num_classes=len(self.dataset.data['categories'].cat.categories)
        )

    def train(self):
        dataset_size = len(self.dataset)
        # 划分数据集
        train_size = int(0.8 * len(self.dataset))
        val_size = len(self.dataset) - train_size
        train_set, val_set = random_split(
            self.dataset,
            [train_size, val_size],
            generator=torch.Generator().manual_seed(42)  # 确保可复现
        )

        # 创建数据加载器
        train_loader = DataLoader(
            train_set,
            batch_size=8,
            collate_fn=self._collate_fn,
            num_workers=min(4, os.cpu_count())
        )
        val_loader = DataLoader(
            val_set,
            batch_size=8,
            collate_fn=self._collate_fn,
            num_workers=min(4, os.cpu_count())
        )

        # 配置训练器
        trainer = Trainer(
            max_epochs=3,
            accelerator="cpu",
            enable_progress_bar=True,
            enable_model_summary=True,
            enable_checkpointing=True,
            default_root_dir="saved_models",
            callbacks=[
                pl.callbacks.ModelCheckpoint(
                    monitor='val_acc',
                    mode='max',
                    save_top_k=1,
                    filename='best-{epoch}-{val_acc:.2f}'
                )
            ]
        )
        # 开始训练
        trainer.fit(self.model, train_loader, val_loader)
        # 训练完成后保存最终模型
        torch.save(
            self.model.state_dict(),
            Path(__file__).parent.parent / "saved_models/final_model.pt"
        )

    def _collate_fn(self, batch):
        return {
            'input_ids': torch.stack([x['input_ids'] for x in batch]),
            'attention_mask': torch.stack([x['attention_mask'] for x in batch]),
            'labels': torch.stack([x['labels'] for x in batch])
        }