import torch
import torch.nn as nn
import torch.nn.functional as F
class HardSwish(nn.Module):
    def forward(self, x):
        return x * F.relu6(x + 3) / 6

class HardSigmoid(nn.Module):
    def forward(self, x):
        return F.relu6(x + 3) / 6

class SqueezeExcitation(nn.Module):
    def __init__(self, in_channels, reduction_ratio=4):
        super(SqueezeExcitation, self).__init__()
        self.avg_pool = nn.AdaptiveAvgPool2d(1)
        self.fc1 = nn.Conv2d(in_channels, in_channels // reduction_ratio, kernel_size=1)
        self.relu = nn.ReLU(inplace=True)
        self.fc2 = nn.Conv2d(in_channels // reduction_ratio, in_channels, kernel_size=1)
        self.hard_sigmoid = HardSigmoid()

    def forward(self, x):
        out = self.avg_pool(x)
        out = self.fc1(out)
        out = self.relu(out)
        out = self.fc2(out)
        out = self.hard_sigmoid(out)
        out = out * x
        return out

class InvertedResidual(nn.Module):
    def __init__(self, in_channels, out_channels, expansion_factor, stride):
        super(InvertedResidual, self).__init__()
        hidden_dim = int(in_channels * expansion_factor)

        layers = []
        if expansion_factor != 1:
            layers.append(nn.Conv2d(in_channels, hidden_dim, kernel_size=1, bias=False))
            layers.append(nn.BatchNorm2d(hidden_dim))
            layers.append(HardSwish())

        layers.extend([
            nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, stride=stride, padding=1, groups=hidden_dim, bias=False),
            nn.BatchNorm2d(hidden_dim),
            HardSwish(),
            SqueezeExcitation(hidden_dim),
            nn.Conv2d(hidden_dim, out_channels, kernel_size=1, bias=False),
            nn.BatchNorm2d(out_channels)
        ])

        self.conv = nn.Sequential(*layers)
        self.use_residual = stride == 1 and in_channels == out_channels

    def forward(self, x):
        out = self.conv(x)
        if self.use_residual:
            out += x
        return out

class MobileNetV3(nn.Module):
    def __init__(self, num_classes=1000, width_multiplier=1.0):
        super(MobileNetV3, self).__init__()
        self.conv1 = nn.Conv2d(3, int(16 * width_multiplier), kernel_size=3, stride=2, padding=1, bias=False)
        self.bn1 = nn.BatchNorm2d(int(16 * width_multiplier))
        self.hs1 = HardSwish()
        self.layers = nn.Sequential(
            InvertedResidual(int(16 * width_multiplier), int(16 * width_multiplier), expansion_factor=1, stride=1),
            InvertedResidual(int(16 * width_multiplier), int(24 * width_multiplier), expansion_factor=4, stride=2),
            InvertedResidual(int(24 * width_multiplier), int(24 * width_multiplier), expansion_factor=3, stride=1),
            InvertedResidual(int(24 * width_multiplier), int(40 * width_multiplier), expansion_factor=3, stride=2),
            InvertedResidual(int(40 * width_multiplier), int(40 * width_multiplier), expansion_factor=3, stride=1),
            InvertedResidual(int(40 * width_multiplier), int(40 * width_multiplier), expansion_factor=3, stride=1),
            InvertedResidual(int(40 * width_multiplier), int(80 * width_multiplier), expansion_factor=6, stride=2),
            InvertedResidual(int(80 * width_multiplier), int(80 * width_multiplier), expansion_factor=2.5, stride=1),
            InvertedResidual(int(80 * width_multiplier), int(80 * width_multiplier), expansion_factor=2.3, stride=1),
            InvertedResidual(int(80 * width_multiplier), int(80 * width_multiplier), expansion_factor=2.3, stride=1),
            InvertedResidual(int(80 * width_multiplier), int(112 * width_multiplier), expansion_factor=6, stride=1),
            InvertedResidual(int(112 * width_multiplier), int(112 * width_multiplier), expansion_factor=6, stride=1),
            InvertedResidual(int(112 * width_multiplier), int(160 * width_multiplier), expansion_factor=6, stride=2),
            InvertedResidual(int(160 * width_multiplier), int(160 * width_multiplier), expansion_factor=6, stride=1),
            InvertedResidual(int(160 * width_multiplier), int(160 * width_multiplier), expansion_factor=6, stride=1),
            InvertedResidual(int(160 * width_multiplier), int(320 * width_multiplier), expansion_factor=6, stride=1)
        )
        self.conv2 = nn.Conv2d(int(320 * width_multiplier), int(1280 * width_multiplier), kernel_size=1, stride=1, padding=0, bias=False)
        self.bn2 = nn.BatchNorm2d(int(1280 * width_multiplier))
        self.hs2 = HardSwish()
        self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
        self.fc = nn.Linear(int(1280 * width_multiplier), num_classes)

    def forward(self, x):
        out = self.conv1(x)
        out = self.bn1(out)
        out = self.hs1(out)
        out = self.layers(out)
        out = self.conv2(out)
        out = self.bn2(out)
        out = self.hs2(out)
        out = self.avgpool(out)
        out = torch.flatten(out, 1)
        out = self.fc(out)
        result = {'output':out}
        return result