import torch.nn as nn
import torch.nn.functional as F


class ChannelAttention(nn.Module):
    """通道注意力模块（SENet风格）"""
    def __init__(self, channels, reduction=8):
        super().__init__()
        self.avg_pool = nn.AdaptiveAvgPool2d(1)
        self.fc = nn.Sequential(
            nn.Linear(channels, channels // reduction),
            nn.ReLU(inplace=True),
            nn.Linear(channels // reduction, channels),
            nn.Sigmoid()
        )

    def forward(self, x):
        b, c, _, _ = x.size()
        y = self.avg_pool(x).view(b, c)
        y = self.fc(y).view(b, c, 1, 1)
        return x * y  # 通道加权

class EnhancedTactileCNN(nn.Module):
    def __init__(self, input_channels=9):
        super().__init__()
        """
        网络结构设计原则：
        1. 渐进式增加通道数：16 → 32 → 64
        2. 使用小卷积核（3×3）保持空间信息
        3. 交替使用最大池化和深度可分离卷积控制参数量
        4. 引入残差连接增强梯度流动
        5. 使用全局平均池化替代全连接层防止过拟合
        """
        # 输入形状：(batch_size, 9, 20, 20)
        
        # 初始卷积块
        self.block1 = nn.Sequential(
            ChannelAttention(input_channels),
            nn.Conv2d(input_channels, 16, 3, padding=1),  # 16×20×20
            nn.BatchNorm2d(16),
            nn.ReLU(inplace=True),
            nn.MaxPool2d(2)  # 16×10×10
        )
        
        # 残差块1
        self.resblock1 = nn.Sequential(
            ChannelAttention(16),
            nn.Conv2d(16, 16, 3, padding=1),
            nn.BatchNorm2d(16),
            nn.ReLU(inplace=True),
            ChannelAttention(16),
            nn.Conv2d(16, 16, 3, padding=1),
            nn.BatchNorm2d(16)
        )
        
        # 中间卷积块
        self.block2 = nn.Sequential(
            ChannelAttention(16),
            nn.Conv2d(16, 32, 3, padding=1),  # 32×10×10
            nn.BatchNorm2d(32),
            nn.ReLU(inplace=True),
            nn.MaxPool2d(2)  # 32×5×5
        )
        
        # 深度可分离卷积块
        self.dwconv = nn.Sequential(
            ChannelAttention(32),
            nn.Conv2d(32, 32, 3, groups=32, padding=1),  # 深度卷积
            nn.Conv2d(32, 64, 1),  # 逐点卷积
            nn.BatchNorm2d(64),
            nn.ReLU(inplace=True)
        )
        
        # 输出层
        self.fc = nn.Sequential(
            nn.AdaptiveAvgPool2d(1),  # 全局平均池化
            nn.Flatten(),
            nn.Dropout(0.5),
            nn.Linear(64, 32),
            nn.ReLU(),
            nn.Linear(32, 1)
        )

    def forward(self, x):
        # Block1
        x = self.block1(x)  # 16×10×10
        
        # Residual connection
        residual = x
        x = self.resblock1(x) + residual
        x = F.relu(x)
        
        # Block2
        x = self.block2(x)  # 32×5×5
        
        # Depthwise separable
        x = self.dwconv(x)  # 64×5×5
        
        # Final output
        return self.fc(x)  # (batch_size, 1)
    
if __name__ == "__main__":
    # 测试网络结构
    model = EnhancedTactileCNN(input_channels=9)
    print(model)

    total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
    print(f"Total trainable parameters: {total_params}")