import torch
import torch.nn as nn
from transformers import BertModel

from utils.config import Config

conf = Config('E:/Python+AI/group4_nlp_project')


class BertClassifier(nn.Module):
    def __init__(self):
        super(BertClassifier, self).__init__()
        self.bert = BertModel.from_pretrained(conf.bert_path)

        # 融合 [CLS] + mean pooling 的向量
        combined_dim = conf.embed_dim * 2

        # 分类任务头 (cat)
        self.fc_cat = nn.Sequential(
            nn.Linear(combined_dim, conf.hidden_size),
            nn.ReLU(),
            nn.Dropout(0.3),
            nn.Linear(conf.hidden_size, conf.num_cat_classes)
        )

        # 二分类/回归任务头 (label)
        self.fc_label = nn.Sequential(
            nn.Linear(combined_dim, conf.hidden_size),
            nn.ReLU(),
            nn.Dropout(0.3),
            nn.Linear(conf.hidden_size, 1)  # 输出 1 个值
        )

    def forward(self, input_ids, attention_mask):
        outputs = self.bert(input_ids=input_ids, attention_mask=attention_mask)

        # [CLS] 向量
        cls = outputs.last_hidden_state[:, 0]

        # mean pooling
        mask_expanded = attention_mask.unsqueeze(-1).expand(outputs.last_hidden_state.size())
        sum_embeddings = torch.sum(outputs.last_hidden_state * mask_expanded, 1)
        sum_mask = torch.clamp(mask_expanded.sum(1), min=1e-9)
        mean_pool = sum_embeddings / sum_mask

        # 拼接 [CLS] 和 mean pooling
        features = torch.cat([cls, mean_pool], dim=1)

        # 两个任务的输出
        out_cat = self.fc_cat(features)
        out_label = self.fc_label(features).squeeze(-1)

        return out_cat, out_label
