import pytorch_lightning as pl
import torch
from torch import nn
from torchmetrics import Accuracy
from transformers import BertModel

class PaperClassifier(pl.LightningModule):
    def __init__(self, config_path, num_classes):
        super().__init__()
        self.save_hyperparameters()  # 保存超参数
        self.bert = BertModel.from_pretrained(
            config_path,
            local_files_only=True,  # 强制本地加载
            trust_remote_code=False
        )
        self.classifier = nn.Linear(self.bert.config.hidden_size, num_classes)
        self.loss_fn = nn.CrossEntropyLoss()

        self.train_acc = Accuracy(task='multiclass', num_classes=num_classes)
        self.val_acc = Accuracy(task='multiclass', num_classes=num_classes)

    def forward(self, input_ids, attention_mask, **kwargs):  # 添加**kwargs吸收多余参数
        outputs = self.bert(
            input_ids=input_ids,
            attention_mask=attention_mask,
            return_dict=True
        )
        return self.classifier(outputs.last_hidden_state[:, 0, :])

    def training_step(self, batch):
        input_ids = batch['input_ids']
        attention_mask = batch['attention_mask']
        labels = batch['labels']

        outputs = self(input_ids, attention_mask)
        loss = self.loss_fn(outputs, labels)
        self.log("train_loss", loss)
        return loss

    def validation_step(self, batch, batch_idx):
        input_ids = batch['input_ids']
        attention_mask = batch['attention_mask']
        labels = batch['labels']

        outputs = self(input_ids, attention_mask)
        loss = self.loss_fn(outputs, labels)

        self.log("val_loss", loss, prog_bar=True)
        self.val_acc(outputs.argmax(1), labels)
        self.log("val_acc", self.val_acc, prog_bar=True)

    def configure_optimizers(self):
        return torch.optim.AdamW(self.parameters(), lr=2e-5)