# -*- coding: UTF-8 -*-
"""
@Date    ：2025/10/23 18:22 
@Author  ：Liu Yuezhao
@Project ：bert 
@File    ：time_bert_with_adapter.py
@IDE     ：PyCharm 
"""
import os
import torch
import torch.nn as nn
from transformers import BertConfig, AutoConfig
from src.transfer.model import TimeBertWithAdapter
from adapters import AutoAdapterModel, AdapterConfig
from src.tools.utils import load_config
from src.models.pre_time_bert_model import TimeBertEmbedding


SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
CONFIG_PATH = os.path.normpath(os.path.join(SCRIPT_DIR, "../../config.yaml"))
CHECKPOINT_PATH = os.path.normpath(os.path.join(SCRIPT_DIR, "../../checkpoints/pretrain/best_mlm_model.pth"))

yaml_config = load_config(CONFIG_PATH)
device = yaml_config["config"]["device"]

event_seq_bert_config = BertConfig(
    vocab_size=yaml_config["pretrain_model_config"]["vocab_size"],
    hidden_size=yaml_config["pretrain_model_config"]["hidden_size"],
    num_hidden_layers=yaml_config["pretrain_model_config"]["num_hidden_layers"],
    num_attention_heads=yaml_config["pretrain_model_config"]["num_attention_heads"],
    intermediate_size=yaml_config["pretrain_model_config"]["intermediate_size"],
    max_position_embeddings=yaml_config["pretrain_model_config"]["max_position_embeddings"],
    hidden_act=yaml_config["pretrain_model_config"]["hidden_act"],
    hidden_dropout_prob=yaml_config["pretrain_model_config"]["hidden_dropout_prob"],
    attention_probs_dropout_prob=yaml_config["pretrain_model_config"]["attention_probs_dropout_prob"],
    pad_token_id=0,
    type_vocab_size=2
)

def load_time_bert_adapter_model(
    checkpoint_path: str,
    num_labels: int = 2,
    adapter_name: str = "fraud_detection",
    reduction_factor: int = 16,
    device: str = "cpu"
):
    checkpoint = torch.load(checkpoint_path, map_location="cpu")

    # --- Step 1: Time Embedding ---
    time_emb = TimeBertEmbedding(
        config=event_seq_bert_config,
        time2vec_dim=yaml_config["pretrain_model_config"]["hidden_size"],
        time_activation='cos'
    )
    time_emb_state = {k.replace("emb.", ""): v for k, v in checkpoint["model_state_dict"].items() if k.startswith("emb.")}
    time_emb.load_state_dict(time_emb_state, strict=True)

    # --- Step 2: 创建并加载 BERT 权重 ---
    bert_model = AutoAdapterModel.from_config(event_seq_bert_config)
    bert_state = {
        k.replace("bert_mlm.bert.", "bert."): v
        for k, v in checkpoint["model_state_dict"].items()
        if k.startswith("bert_mlm.bert.")
    }
    bert_model.load_state_dict(bert_state, strict=False)

    # --- Step 3: 添加并激活 Adapter（关键！）---
    from adapters import AdapterConfig
    adapter_config = AdapterConfig.load("pfeiffer", reduction_factor=reduction_factor)
    bert_model.add_adapter(adapter_name, config=adapter_config)
    bert_model.set_active_adapters(adapter_name)  # ✅ 显式激活
    bert_model.train_adapter(adapter_name)        # 冻结其他参数

    # --- Step 4: 组装模型 ---
    model = TimeBertWithAdapter(time_emb, bert_model, num_labels)
    model.to(device)
    return model