import torch
import torch.nn as nn
import torch.nn.functional as F

# from utils.anchor import genAnchor
# from modules.initializer import module_weight_init
from torchvision.models.resnet import *
from torchvision.models.mobilenet import *
from modules.yolov3.yololayer import YoloLayer

# try:
#     from torchvision.models.resnet import conv1x1, conv3x3
# except:
def conv3x3(in_planes, out_planes, stride=1):
    """3x3 convolution block with padding"""
    return nn.Sequential(
        nn.Conv2d(in_planes, in_planes, kernel_size=3, stride=stride, padding=1, bias=False, groups=in_planes),
        nn.BatchNorm2d(in_planes),
        nn.LeakyReLU(),
        nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=1, padding=0, bias=False),
        nn.BatchNorm2d(out_planes)
        )
    
def conv1x1(in_planes, out_planes, stride=1):
    """1x1 convolution"""
    return nn.Sequential(
        nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False),
        nn.BatchNorm2d(out_planes)
        )
# endtry

# <class BasicBlock>
class BasicBlock(nn.Module):
    _expansion = 1
    # <method __init__>
    def __init__(self, inplanes, planes, stride=1, downsample=None):
        super(BasicBlock, self).__init__()
        self._conv1 = conv3x3(inplanes, planes, stride)
        self._conv2 = conv3x3(planes, planes)
        self._downsample = downsample
        self._stride = stride
        self._inplanes = inplanes
        self._planes = planes
    # <method __init__>

    # <method forward>
    def forward(self, x):
        y = x
        if self._downsample is not None:
            y = self._downsample(y)
        # endif
        z = self._conv1(x)
        z = F.leaky_relu(z)
        z = self._conv2(z)
        # 
        return F.leaky_relu(y + z)
    # <method forward>
# <class BasicBlock>

# <Module Bottleneck>
class Bottleneck(nn.Module):
    _expansion = 4
    # <method __init__>
    def __init__(self, inplanes, planes, stride=1, downsample=None):
        super(Bottleneck, self).__init__()
        self._conv1 = conv1x1(inplanes, planes)
        self._conv2 = conv3x3(planes, planes, stride)
        self._conv3 = conv1x1(planes, planes * self._expansion)
        self._downsample = downsample
        self._inplanes = inplanes
        self._planes = planes
        self._stride = stride
    # <method __init__>

    # <method forward>
    def forward(self, x):
        # 
        y = self._conv1(x)
        y = F.leaky_relu(y)
        y = self._conv2(y)
        y = F.leaky_relu(y)
        y = self._conv3(y)
        # 
        z = x
        if self._downsample is not None:
            z = self._downsample(z)
        # endif
        out = F.leaky_relu(y + z)
        return out
    # <method forward>
# <Module Bottleneck>

# <class DetResNet:Module>
class DetResNet(nn.Module):
    # <method __init__>
    def __init__(
        self, 
        block, 
        layers, 
        zero_init_residual, 
        grids,
        anchors,
        num_classes
        ):
        super(DetResNet, self).__init__()
        self._grids = grids
        self._anchors = anchors
        self._num_classes = num_classes
        self._inplanes = 8
        self.conv1 = nn.Sequential(
                nn.Conv2d(3, 8, kernel_size=7, stride=2, padding=3, bias=False),
                nn.BatchNorm2d(8),
                nn.LeakyReLU()
            ) 
        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)        
        self.layer1 = self._make_layer(block, 8, layers[0])
        self.layer2 = self._make_layer(block, 16, layers[1], stride=2)
        self.layer3 = self._make_layer(block, 32, layers[2], stride=2)
        self.layer4 = self._make_layer(block, 64, layers[3], stride=2)
        # 
        ochs = [len(anchors[0]) * (4 + 1 + num_classes), len(anchors[1]) * (4 + 1 + num_classes), len(anchors[2]) * (4 + 1 + num_classes)]
        # branch 0:
        self.out0 = nn.Sequential(
            nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1, bias=False),
            nn.BatchNorm2d(64),
            nn.LeakyReLU(),
            nn.Conv2d(64, ochs[0], kernel_size=1, stride=1, padding=0, bias=False),
            nn.BatchNorm2d(ochs[0]),
            )
        self.yolo0 = YoloLayer(anchors = anchors[0], num_classes = num_classes)
        # branch 1:
        self.up1 = nn.UpsamplingNearest2d(scale_factor=2)
        self.out1 = nn.Sequential(
            nn.Conv2d(ochs[0], 64, kernel_size=3, stride=1, padding=1, bias=False),
            nn.BatchNorm2d(64),
            nn.LeakyReLU(),
            nn.Conv2d(64, ochs[1], kernel_size=1, stride=1, padding=0, bias=False),
            nn.BatchNorm2d(ochs[1]),
            )
        self.yolo1 = YoloLayer(anchors = anchors[1], num_classes = num_classes)
        # branch 2:
        self.up2 = nn.UpsamplingNearest2d(scale_factor=2)
        self.out2 = nn.Sequential(
            nn.Conv2d(ochs[1], 64, kernel_size=3, stride=1, padding=1, bias=False),
            nn.BatchNorm2d(64),
            nn.LeakyReLU(),
            nn.Conv2d(64, ochs[2], kernel_size=1, stride=1, padding=0, bias=False),
            nn.BatchNorm2d(ochs[2]),
            )
        self.yolo2 = YoloLayer(anchors = anchors[2], num_classes = num_classes)
        # 
        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
            elif isinstance(m, nn.BatchNorm2d):
                nn.init.constant_(m.weight, 1)
                nn.init.constant_(m.bias, 0)
        # 
        # Zero-initialize the last BN in each residual branch,
        # so that the residual branch starts with zeros, and each residual block behaves like an identity.
        # This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
        if zero_init_residual:
            for m in self.modules():
                if isinstance(m, Bottleneck):
                    nn.init.constant_(m.bn3.weight, 0)
                elif isinstance(m, BasicBlock):
                    nn.init.constant_(m.bn2.weight, 0)
    # <method __init__>
    
    # <method _make_layer>
    def _make_layer(self, block, planes, blocks, stride=1):
        downsample = None
        if stride != 1 or self._inplanes != planes * block._expansion:
            downsample = conv1x1(self._inplanes, planes * block._expansion, stride)
        # endif
        layers = []
        layers.append(block(self._inplanes, planes, stride, downsample))
        self._inplanes = planes * block._expansion
        for _ in range(1, blocks):
            layers.append(block(self._inplanes, planes))
        # endfor
        return nn.Sequential(*layers)
    # <method _make_layer>

    # <method forward>
    def forward(self, x):
        nI_h = x.size(2)
        nI_w = x.size(3)
        # 
        x = self.conv1(x)
        x = self.maxpool(x)
        x = self.layer1(x)
        x = self.layer2(x)
        x = self.layer3(x)
        x = self.layer4(x)
        out0 = self.out0(x)
        up1 = self.up1(out0)
        out1 = self.out1(up1)
        up2 = self.up2(out1)
        out2 = self.out2(up2)
        fm0, boxes0 = self.yolo0(out0, nI_h, nI_w)
        fm1, boxes1 = self.yolo1(out1, nI_h, nI_w)
        fm2, boxes2 = self.yolo2(out2, nI_h, nI_w)
        # 
        return torch.cat((fm0, fm1, fm2), 1), torch.cat((boxes0, boxes1, boxes2), 1)
    # <method forward>
            

# <method DetResNet18>
def DetResNet18(*args, **kwargs):
    """
    Constructs a DetResNet-18 model.
    """    
    return DetResNet(BasicBlock, [2, 2, 2, 2], *args, **kwargs)
# <method DetResNet18>

# <unitTest test_BasicBlock>
def test_BasicBlock():
    block = BasicBlock(16, 16)
    inp = torch.rand(8, 16, 256, 256)
    outp = block(inp)
    print(inp.shape)
    print(outp.shape)
# <unitTest test_BasicBlock>

# <unitTest test_Bottleneck>
def test_Bottleneck():
    block = Bottleneck(64, 16)
    inp = torch.rand(8, 64, 256, 256)
    outp = block(inp)
    print(inp.shape)
    print(outp.shape)
# <unitTest test_Bottleneck>

# <unitTest test_DetResNet18>
def test_DetResNet18():
    net = DetResNet18()
    inp = torch.rand(32, 3, 416, 416)
    outp = net(inp)
    print(inp.shape)
    print(outp.shape)
# <unitTest test_DetResNet18>

if __name__ == '__main__':
    import fire
    fire.Fire(test_DetResNet18)