from mmcv.ops.carafe import CARAFEPack

class CARAFEAttn(CARAFEPack):
    """
        使用CARAFEPack实现CARAFE的动态attention
    """
    def __init__(self, dim, scale_factor, up_kernel=5, up_group=1, encoder_kernel=3, encoder_dilation=1, compressed_channels=64, use_res=True):
        super().__init__(dim, scale_factor, up_kernel=up_kernel, up_group=up_group, encoder_kernel=encoder_kernel, encoder_dilation=encoder_dilation, compressed_channels=compressed_channels)
        self.use_res = use_res

    def forward(self, x):
        if self.use_res:
            res = x
        out = super().forward(x)
        if self.use_res:
            out += res
        return out

if __name__ == "__main__":
    import torch
    attn = CARAFEAttn(640, scale_factor=1, up_kernel=3).cuda()
    x = torch.randn((16, 640, 5, 5)).cuda()

    out = attn(x)
    print(out.shape)