import torch
import torch.nn as nn
import torch.nn.utils.prune as prune
import numpy as np
from collections import OrderedDict

# 导入LightweightRRDBNet类
from knowledge_distillation import LightweightRRDBNet

class ChannelPruner:
    """通道剪枝器"""
    
    def __init__(self, pruning_rate=0.3):
        self.pruning_rate = pruning_rate
        self.pruned_layers = []
    
    def compute_channel_importance(self, model, calibration_loader, device):
        """计算通道重要性（基于激活值的L1范数）"""
        model.eval()
        
        # 收集每个卷积层的激活值
        activations = {}
        hooks = []
        
        def hook_fn(name):
            def hook(module, input, output):
                if name not in activations:
                    activations[name] = []
                # 计算每个通道的L1范数
                channel_importance = output.abs().mean(dim=(0, 2, 3)).detach().cpu()
                activations[name].append(channel_importance)
            return hook
        
        # 注册钩子
        for name, module in model.named_modules():
            if isinstance(module, nn.Conv2d) and module.groups == 1:  # 只处理普通卷积
                hook = module.register_forward_hook(hook_fn(name))
                hooks.append(hook)
                self.pruned_layers.append(name)
        
        # 用校准数据前向传播
        with torch.no_grad():
            for batch_idx, (lr_imgs, _) in enumerate(calibration_loader):
                if batch_idx >= 10:  # 只用10个批次校准
                    break
                lr_imgs = lr_imgs.to(device)
                _ = model(lr_imgs)
        
        # 移除钩子
        for hook in hooks:
            hook.remove()
        
        # 计算平均重要性
        channel_importance = {}
        for name, importance_list in activations.items():
            avg_importance = torch.stack(importance_list).mean(dim=0)
            channel_importance[name] = avg_importance
        
        return channel_importance
    
    def global_pruning(self, model, channel_importance):
        """全局剪枝"""
        # 收集所有需要剪枝的通道
        all_importances = []
        param_names = []
        
        for name, module in model.named_modules():
            if name in channel_importance:
                importance = channel_importance[name]
                all_importances.append(importance)
                param_names.append((name, module))
        
        # 全局重要性排序
        all_importances = torch.cat(all_importances)
        global_threshold = torch.quantile(all_importances, self.pruning_rate)
        
        print(f"全局剪枝阈值: {global_threshold:.6f}")
        
        # 应用剪枝
        for name, module in param_names:
            importance = channel_importance[name]
            mask = importance > global_threshold
            
            # 计算该层的剪枝率
            layer_pruning_rate = 1 - mask.float().mean()
            print(f"层 {name}: 剪枝率 {layer_pruning_rate:.3f} "
                  f"({mask.sum().item()}/{len(mask)} 通道保留)")
            
            # 创建自定义剪枝方法
            prune.custom_from_mask(module, name='weight', mask=mask.unsqueeze(0).unsqueeze(2).unsqueeze(3).to(module.weight.device))
        
        return model
    
    def remove_pruning(self, model):
        """移除剪枝掩码，永久化剪枝效果"""
        for name, module in model.named_modules():
            if prune.is_pruned(module):
                prune.remove(module, 'weight')
        
        return model
    
    def fine_tune_pruned_model(self, pruned_model, train_loader, epochs, device):
        """微调剪枝后的模型"""
        pruned_model.train()
        optimizer = torch.optim.Adam(pruned_model.parameters(), lr=1e-4)
        criterion = nn.L1Loss()
        
        for epoch in range(epochs):
            running_loss = 0.0
            for batch_idx, (lr_imgs, hr_imgs) in enumerate(train_loader):
                lr_imgs = lr_imgs.to(device)
                hr_imgs = hr_imgs.to(device)
                
                outputs = pruned_model(lr_imgs)
                loss = criterion(outputs, hr_imgs)
                
                optimizer.zero_grad()
                loss.backward()
                optimizer.step()
                
                running_loss += loss.item()
            
            if (epoch + 1) % 10 == 0:
                print(f'Fine-tuning Epoch [{epoch+1}/{epochs}], Loss: {running_loss/len(train_loader):.4f}')
        
        return pruned_model

def test_pruning():
    """测试剪枝流程"""
    # 创建测试模型
    model = LightweightRRDBNet(num_feat=32, num_block=8)
    
    # 统计原始参数量
    original_params = sum(p.numel() for p in model.parameters())
    print(f"原始模型参数量: {original_params/1e6:.2f}M")
    
    # 创建剪枝器
    pruner = ChannelPruner(pruning_rate=0.5)  # 50%剪枝率
    
    # 模拟校准数据
    calibration_loader = []  # 这里应该是真实的数据加载器
    
    # 计算通道重要性
    # channel_importance = pruner.compute_channel_importance(model, calibration_loader, device='cpu')
    
    # 执行剪枝
    # pruned_model = pruner.global_pruning(model, channel_importance)
    
    # 永久化剪枝
    # pruned_model = pruner.remove_pruning(pruned_model)
    
    # 统计剪枝后参数量
    # pruned_params = sum(p.numel() for p in pruned_model.parameters())
    # print(f"剪枝后参数量: {pruned_params/1e6:.2f}M")
    # print(f"实际压缩比: {original_params/pruned_params:.2f}x")

if __name__ == "__main__":
    test_pruning()