import torch
import torch.nn as nn
import torch.nn.functional as F

class GlobalPiCANet(nn.Module):
    def __init__(self, in_channels, inter_channels=64):
        super(GlobalPiCANet, self).__init__()
        self.query_conv = nn.Conv2d(in_channels, inter_channels, kernel_size=1)
        self.key_conv   = nn.Conv2d(in_channels, inter_channels, kernel_size=1)
        self.value_conv = nn.Conv2d(in_channels, inter_channels, kernel_size=1)
    
        self.out_conv   = nn.Conv2d(inter_channels, in_channels, kernel_size=1)
    
    def forward(self, x):
        # x: 输入特征图，维度是 (B, C, H, W)
        B, C, H, W = x.size()

        query = self.query_conv(x)  # (B, d, H, W)
        key   = self.key_conv(x)    # (B, d, H, W)
        value = self.value_conv(x)  # (B, d, H, W)
        
        query = query.view(B, -1, H * W)       # (B, d, N)
        key   = key.view(B, -1, H * W)         # (B, d, N)
        value = value.view(B, -1, H * W)       # (B, d, N)
        
        attn = torch.bmm(query.permute(0, 2, 1), key)  # (B, N, N)
        
        attn = F.softmax(attn, dim=-1)  # (B, N, N)
        
        out = torch.bmm(value, attn.permute(0, 2, 1))  # (B, d, N)
        
        out = out.view(B, -1, H, W)  # (B, d, H, W)
        out = self.out_conv(out)    # (B, C, H, W)
        out = out + x             # 残差连接
        return out

class LocalPiCANet(nn.Module):
    def __init__(self, in_channels, inter_channels=64, ksize=7):
        super(LocalPiCANet, self).__init__()
        assert ksize % 2 == 1, "ksize must be odd for center alignment"

        self.query_conv = nn.Conv2d(in_channels, inter_channels, kernel_size=1)
        self.key_conv   = nn.Conv2d(in_channels, inter_channels, kernel_size=1)
        self.value_conv = nn.Conv2d(in_channels, inter_channels, kernel_size=1)
        self.out_conv   = nn.Conv2d(inter_channels, in_channels, kernel_size=1)

        self.ksize = ksize
        self.pad = ksize // 2
        self.unfold = nn.Unfold(kernel_size=ksize, padding=self.pad)

    def forward(self, x):
        B, C, H, W = x.shape
        d = self.query_conv.out_channels  # inter_channels

        query = self.query_conv(x)  # (B, d, H, W)
        key   = self.key_conv(x)
        value = self.value_conv(x)

        query = query.view(B, d, H * W).transpose(1, 2)  # (B, H*W, d)

        key_unfold = self.unfold(key)    # (B, d*K*K, H*W)
        value_unfold = self.unfold(value)  # (B, d*K*K, H*W)

        key_unfold = key_unfold.view(B, d, self.ksize * self.ksize, H * W).permute(0, 3, 2, 1)  # (B, H*W, K*K, d)
        value_unfold = value_unfold.view(B, d, self.ksize * self.ksize, H * W).permute(0, 3, 2, 1)  # (B, H*W, K*K, d)

        query = query.unsqueeze(2)  # (B, H*W, 1, d)

        attn = torch.sum(query * key_unfold, dim=-1) / (d ** 0.5)  # (B, H*W, K*K)
        attn = F.softmax(attn, dim=-1)

        out = torch.sum(attn.unsqueeze(-1) * value_unfold, dim=2)  # (B, H*W, d)
        out = out.transpose(1, 2).view(B, d, H, W)  # (B, d, H, W)

        out = self.out_conv(out)  # (B, C, H, W)
        return out


    











