# -*- coding: UTF-8 -*-
"""
@Date    ：2025/10/23 14:53 
@Author  ：Liu Yuezhao
@Project ：bert 
@File    ：model.py
@IDE     ：PyCharm 
"""
import torch.nn as nn
import torch
from src.models.pre_time_bert_model import TimeBertEmbedding
from adapters import AutoAdapterModel

class TimeBertWithAdapter(nn.Module):
    def __init__(self, time_emb: TimeBertEmbedding, bert_model: AutoAdapterModel, num_labels: int = 2):
        super().__init__()
        self.time_emb = time_emb
        self.bert = bert_model
        self.classifier = nn.Linear(bert_model.config.hidden_size, num_labels)

        # 冻结 time_emb
        for param in self.time_emb.parameters():
            param.requires_grad = False

    def forward(
        self,
        input_ids=None,
        attention_mask=None,
        interval=None,
        same_event_interval=None,
        age=None,
        labels=None,
        pos_weight=10,
        **kwargs
    ):
        embeds = self.time_emb(
            input_ids=input_ids,
            interval=interval,
            same_event_interval=same_event_interval,
            age=age
        )
        outputs = self.bert(inputs_embeds=embeds, attention_mask=attention_mask, return_dict=True)
        pooled_output = outputs.last_hidden_state[:, 0]
        logits = self.classifier(pooled_output)

        loss = None
        if labels is not None:
            if pos_weight is not None:
                # 使用加权 CrossEntropy
                weight = torch.tensor([1.0, pos_weight], device=labels.device, dtype=logits.dtype)
                loss_fn = nn.CrossEntropyLoss(weight=weight)
                loss = loss_fn(logits, labels)
            else:
                loss = nn.CrossEntropyLoss()(logits, labels)
        return {"loss": loss, "logits": logits} if loss is not None else {"logits": logits}