from typing import Optional
import numpy as np
from transformers import PreTrainedModel
from transformers.trainer_callback import TrainerCallback


class DynamicLayerActivationCallback(TrainerCallback):
    def __init__(
        self,
        n_layers: int,
        interval_steps: int,
        model: PreTrainedModel,
        lisa_layers_attribute: Optional[str] = None,
    ):
        super().__init__()
        self.n_layers = n_layers
        self.interval_steps = interval_steps
        self.model = model

        # Determine the way to access layers based on the model type
        class_to_layers_map = {
            "LlamaForCausalLM": "model.model.layers",
            "Qwen2ForCausalLM": "model.model.layers",
            "MistralForCausalLM": "model.model.layers",
            "MixtralForCausalLM": "model.model.layers",
            "GemmaForCausalLM": "model.model.layers",
            "GPT2LMHeadModel": "model.transformer.h",
            "HymbaForCausalLM": "model.model.layers",
        }
        model_class_name = self.model.__class__.__name__
        if model_class_name in class_to_layers_map:
            self.layers_attribute = class_to_layers_map[model_class_name]
        else:
            assert lisa_layers_attribute is not None, "Please provide the attribute to access the layers of the model."
            self.layers_attribute = lisa_layers_attribute
        self.total_layers = len(
            eval("self." + self.layers_attribute)
        )  # Dynamically execute to get the number of layers

        self.active_layers_indices = []

    def freeze_all_layers(self):
        layers = eval("self." + self.layers_attribute)  # Dynamically execute to get layers
        for layer in layers:
            for param in layer.parameters():
                param.requires_grad = False

    # 新增开始
    def on_step_begin(self, args, state, control, **kwargs):
        # 每隔 interval_steps 步切换一次活跃层（反向传播后执行）
        if state.global_step % self.interval_steps == 0:
            self.switch_active_layers()
    # 新增结束
        
    # 新增开始
    def switch_active_layers(self):
        # 先冻结所有层
        self.freeze_all_layers()
        layers = eval("self." + self.layers_attribute)

        # --- 计算每一层的平均梯度范数 ---
        layer_grad_norms = []
        for idx, layer in enumerate(layers):
            total_norm, count = 0.0, 0
            for param in layer.parameters():
                if param.grad is not None:
                    param_norm = param.grad.detach().data.norm(2).item()
                    total_norm += param_norm
                    count += 1
            avg_norm = total_norm / (count if count > 0 else 1)
            layer_grad_norms.append((idx, avg_norm))

        # --- 如果所有梯度都是 0，退化为随机选择 ---
        if all(norm == 0.0 for _, norm in layer_grad_norms):
            self.active_layers_indices = np.random.choice(
                range(self.total_layers), self.n_layers, replace=False
            )
            print(f"[Fallback Random] Activating layers: {self.active_layers_indices}", flush=True)
        else:
            # 按梯度范数排序，取 top-n
            layer_grad_norms.sort(key=lambda x: x[1], reverse=True)
            self.active_layers_indices = [idx for idx, _ in layer_grad_norms[:self.n_layers]]
            print(f"Activating layers (by grad norm) at indices: {self.active_layers_indices}", flush=True)

        # 解冻选中的层
        for idx in self.active_layers_indices:
            for param in layers[idx].parameters():
                param.requires_grad = True
    # 新增结束
