import torch
from torch import nn
from distillation.builder import build_feature_loss


class HookRegister:
    def __init__(self, student_model, teacher_model, distill_cfg):
        self.student_model = student_model
        self.teacher_model = teacher_model
        self.distill_cfg = distill_cfg
        self.cnt = 0

        self.student_model.distill_losses = nn.ModuleDict()
        student_modules = dict(student_model.named_modules())
        # print("----------------------------------------------")
        # print(student_modules)
        teacher_modules = dict(teacher_model.named_modules())
        # print("=================================================")
        # print(teacher_modules)

        self.student_handles = []
        # 遍历蒸馏配置里需要提取特征的模块
        for i, item_loc in enumerate(distill_cfg):

            teacher_model.register_buffer("student" + str(i), None)
            teacher_model.register_buffer("teacher" + str(i), None)

            # 给需要提取特征的模块注册钩子函数
            # 钩子函数内更新相应参数
            self.student_handles.append(
                student_modules[item_loc.student_module].register_forward_hook(self.hook_student_forward))
            teacher_modules[item_loc.teacher_module].register_forward_hook(self.hook_teacher_forward)

            for item_loss in item_loc.methods:
                loss_name = item_loss.name
                self.student_model.distill_losses[loss_name] = build_feature_loss(item_loss)

    def remove_student_hooks(self):
        for handle in self.student_handles:
            handle.remove()

    def register_student_hooks(self):
        self.student_handles.clear()
        student_modules = dict(self.student_model.named_modules())
        for i, item_loc in enumerate(self.distill_cfg):
            self.student_handles.append(
                student_modules[item_loc.student_module].register_forward_hook(self.hook_student_forward))

    def hook_student_forward(self, module, input, output):
        self.teacher_model.register_buffer("student" + str(self.cnt), output)
        self.cnt += 1
        if self.cnt % 3 == 0:
            self.cnt = 0

    def hook_teacher_forward(self, module, input, output):
        self.teacher_model.register_buffer("teacher" + str(self.cnt), output)
        self.cnt += 1
        if self.cnt % 3 == 0:
            self.cnt = 0

    def compute_loss(self):
        buffer_dict = dict(self.teacher_model.named_buffers())
        feature_loss = torch.zeros(1, device="cuda:0")
        for i, item_loc in enumerate(self.distill_cfg):
            student_module = "student" + str(i)
            teacher_module = "teacher" + str(i)
            student_feat = buffer_dict[student_module]
            teacher_feat = buffer_dict[teacher_module]
            for item_loss in item_loc.methods:
                loss_name = item_loss.name
                feature_loss += self.student_model.distill_losses[loss_name](student_feat, teacher_feat)

        return feature_loss
