import torch.nn as nn
from .module import PSAGAMParallel, SEAttention, SWA, OSRAAttention, CPAM, WTConv2d


class SelfDistillationModule(nn.Module):

    def __init__(self, input_channel, output_channel):
        super(SelfDistillationModule, self).__init__()
        
        # 添加特征融合模块
        self.fusion = nn.Sequential(
            nn.Conv2d(input_channel, input_channel, kernel_size=1),
            nn.BatchNorm2d(input_channel),
            nn.ReLU(True)
        )
        
        self.convtranpose = nn.ConvTranspose2d(in_channels=input_channel,
                                               out_channels=output_channel,
                                               kernel_size=3, stride=2,
                                               padding=1, output_padding=1, bias=False)
        self.norm2 = nn.BatchNorm2d(output_channel)
        self.relu = nn.ReLU(True)
        self.se = SEAttention(output_channel)
        
        # 添加残差连接
        self.shortcut = None
        if input_channel != output_channel:
            self.shortcut = nn.Sequential(
                nn.Conv2d(input_channel, output_channel, kernel_size=1, stride=1),
                nn.BatchNorm2d(output_channel)
            )
    
    def forward(self, x, prev_feature=None):
        # 特征融合
        if prev_feature is not None:
            x = x + self.fusion(prev_feature)
            
        identity = x
        
        x = self.convtranpose(x)
        x = self.norm2(x)
        x = self.relu(x)
        x = self.se(x)
        
        # 残差连接
        if self.shortcut is not None:
            identity = self.shortcut(identity)
        x = x + identity
        
        return x

class SelfDistillationModel(nn.Module):

    def __init__(self, input_channel, layer_num):
        super(SelfDistillationModel, self).__init__()

        self.layer_num = layer_num
        self.total_feature_maps = {}
        output_channel = int(input_channel / 2)
        
        # 添加输出层
        self.output_layers = nn.ModuleList()
        
        for i in range(layer_num):
            setattr(self, f'layer{i}', SelfDistillationModule(input_channel, output_channel))
            # 为每一层添加输出转换层
            self.output_layers.append(
                nn.Sequential(
                    nn.Conv2d(output_channel, 3, kernel_size=1),
                    nn.Tanh()
                )
            )
            input_channel = output_channel
            output_channel = int(input_channel / 2)

        self.register_hook()

    def forward(self, x):
        outputs = []
        feature = x
        
        # 递进式前向传播
        for i in range(self.layer_num):
            feature = getattr(self, f'layer{i}')(feature)
            # 每一层都产生输出
            current_output = self.output_layers[i](feature)
            outputs.append(current_output)
            
        return outputs

    def register_hook(self):

        self.extract_layers = [('layer%d' % i) for i in range(self.layer_num)]

        def get_activation(maps, name):
            def get_output_hook(module, input, output):
                maps[name+str(output.device)] = output

            return get_output_hook

        def add_hook(model, maps, extract_layers):
            for name, module in model.named_modules():
                if name in extract_layers:
                    module.register_forward_hook(get_activation(maps, name))

        add_hook(self, self.total_feature_maps, self.extract_layers)

class DIYSelfDistillationModel(nn.Module):

    def __init__(self, channel_nums, layer_num):
        super(DIYSelfDistillationModel, self).__init__()
        
        self.layer_num = layer_num
        self.total_feature_maps = {}
        
        # 添加输出层
        self.output_layers = nn.ModuleList()
        
        for i in range(layer_num):
            setattr(self, f'layer{i}', SelfDistillationModule(channel_nums[i], channel_nums[i+1]))
            # 为每一层添加输出转换层
            self.output_layers.append(
                nn.Sequential(
                    nn.Conv2d(channel_nums[i+1], 3, kernel_size=1),
                    nn.Tanh()
                )
            )
        
        self.register_hook()
    
    def forward(self, x):
        outputs = []
        features = []
        feature = x
        
        # 递进式前向传播
        for i in range(self.layer_num):
            # 获取上一层的特征用于融合
            prev_feature = features[-1] if features else None
            feature = getattr(self, f'layer{i}')(feature, prev_feature)
            features.append(feature)
            
            # 每一层都产生输出
            current_output = self.output_layers[i](feature)
            outputs.append(current_output)
        
        return outputs

    def register_hook(self):

        self.extract_layers = [('layer%d' % i) for i in range(self.layer_num)]

        def get_activation(maps, name):
            def get_output_hook(module, input, output):
                # maps[name] = output.pow(2).mean(1, keepdim=True)
                maps[name+str(output.device)] = output

            return get_output_hook

        def add_hook(model, maps, extract_layers):
            for name, module in model.named_modules():
                if name in extract_layers:
                    module.register_forward_hook(get_activation(maps, name))

        add_hook(self, self.total_feature_maps, self.extract_layers)