import torch
import torch.nn as nn
from timm.layers import DropPath

class LayerNorm2d(nn.Module):
    """LayerNorm for channels_first tensors (N, C, H, W)."""
    def __init__(self, num_channels, eps=1e-6):
        super().__init__()
        self.ln = nn.LayerNorm(num_channels, eps=eps)

    def forward(self, x):
        # (N, C, H, W) → (N, H, W, C)
        x = x.permute(0, 2, 3, 1)
        x = self.ln(x)
        # (N, H, W, C) → (N, C, H, W)
        return x.permute(0, 3, 1, 2)

class HornetLiteBlock(nn.Module):
    def __init__(self, in_dim, out_dim, stride=1, drop_path=0., layer_scale_init_value=1e-6):
        super().__init__()
        self.norm1 = LayerNorm2d(in_dim)
        self.dwconv = nn.Conv2d(in_dim, in_dim, kernel_size=7, padding=3, stride=stride, groups=in_dim)

        self.gamma1 = nn.Parameter(layer_scale_init_value * torch.ones((1, in_dim, 1, 1)), requires_grad=True)
        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()

        self.norm2 = LayerNorm2d(in_dim)
        self.pwconv = nn.Sequential(
            nn.Conv2d(in_dim, in_dim * 4, kernel_size=1),
            nn.GELU(),
            nn.Conv2d(in_dim * 4, out_dim, kernel_size=1)
        )

        self.gamma2 = nn.Parameter(layer_scale_init_value * torch.ones((1, out_dim, 1, 1)), requires_grad=True)
        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()

        # projection for first residual if stride != 1
        self.use_proj1 = stride != 1
        if self.use_proj1:
            self.proj1 = nn.Sequential(
                nn.Conv2d(in_dim, in_dim, kernel_size=1, stride=stride, bias=False),
                nn.BatchNorm2d(in_dim)
            )
        else:
            self.proj1 = nn.Identity()

        # projection for second residual if channel change
        self.use_proj2 = in_dim != out_dim
        if self.use_proj2:
            self.proj2 = nn.Sequential(
                nn.Conv2d(in_dim, out_dim, kernel_size=1, stride=1, bias=False),
                nn.BatchNorm2d(out_dim)
            )
        else:
            self.proj2 = nn.Identity()

    def forward(self, x):
        shortcut = self.proj1(x)

        x = self.norm1(x)
        x = self.dwconv(x)
        x = self.gamma1 * x
        x = self.drop_path1(x)
        x = shortcut + x  # first residual

        shortcut2 = x
        x = self.norm2(x)
        x = self.pwconv(x)
        x = self.gamma2 * x
        x = self.drop_path2(x)

        shortcut2 = self.proj2(shortcut2)
        x = shortcut2 + x  # second residual
        return x


