#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Time    : 2020/11/18 14:13
# @Author  : Wan Diwen
# @FileName: resnet_serial.py

import math
import os
from collections import defaultdict
from typing import List

from mindspore import nn, Tensor
from mindspore.ops import operations as P
from src.common import Activation, Conv2d


def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
    """3x3 convolution with padding"""
    return Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=dilation, groups=groups, bias=False,
                  dilation=dilation)


def conv1x1(in_planes, out_planes, stride=1):
    """1x1 convolution"""
    return Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)


class BasicBlock(nn.Cell):
    expansion = 1

    def __init__(self, inplanes: int, planes: int, stride=1, downsample=None, groups=1, base_width=64, dilation=1,
                 norm_layer=None):
        super(BasicBlock, self).__init__()
        if norm_layer is None:
            norm_layer = nn.BatchNorm2d
        if groups != 1 or base_width != 64:
            raise ValueError('BasicBlock only supports groups=1 and base_width=64')
        # if dilation > 1:
        #     raise NotImplementedError("Dilation > 1 not supported in BasicBlock")
        # Both self.conv1 and self.downsample layers downsample the input when stride != 1
        self.conv1 = conv3x3(inplanes, planes, stride, dilation=dilation)
        self.bn1 = norm_layer(planes)
        self.relu = nn.ReLU()
        self.conv2 = conv3x3(planes, planes)
        self.bn2 = norm_layer(planes)
        self.downsample = downsample
        self.stride = stride
        self.in_channels = inplanes
        self.out_channels = planes
        self.dilation = dilation
        self.add = P.TensorAdd()

    def construct(self, x):
        identity = x

        out = self.conv1(x)
        out = self.bn1(out)
        out = self.relu(out)

        out = self.conv2(out)
        out = self.bn2(out)

        if self.downsample is not None:
            identity = self.downsample(x)

        out = self.add(out, identity)
        out = self.relu(out)

        return out


class Bottleneck(nn.Cell):
    expansion = 4

    def __init__(self, inplanes: int, planes: int, stride=1, downsample=None, groups=1, base_width=64, dilation=1,
                 norm_layer=None):
        super(Bottleneck, self).__init__()
        if norm_layer is None:
            norm_layer = nn.BatchNorm2d
        width = int(planes * (base_width / 64.)) * groups
        # Both self.conv2 and self.downsample layers downsample the input when stride != 1
        self.conv1 = conv1x1(inplanes, width)
        self.bn1 = norm_layer(width)
        self.conv2 = conv3x3(width, width, stride, groups, dilation)
        self.bn2 = norm_layer(width)
        self.conv3 = conv1x1(width, planes * self.expansion)
        self.bn3 = norm_layer(planes * self.expansion)
        self.relu = nn.ReLU()
        self.downsample = downsample
        self.stride = stride
        self.in_channels = inplanes
        self.out_channels = planes * self.expansion
        self.groups = groups
        self.dilation = dilation
        self.width = width
        self.add = P.TensorAdd()

    def construct(self, x):
        identity = x

        out = self.conv1(x)
        out = self.bn1(out)
        out = self.relu(out)

        out = self.conv2(out)
        out = self.bn2(out)
        out = self.relu(out)

        out = self.conv3(out)
        out = self.bn3(out)

        if self.downsample is not None:
            identity = self.downsample(x)

        out = self.add(out, identity)
        out = self.relu(out)

        return out


class ResNet(nn.Cell):
    def __init__(self, block, layers, groups=1, width_per_group=64, strides=(2, 2, 2),
                 replace_stride_with_dilation=None, deep_stem=False, norm_layer=None, **kwargs):
        super(ResNet, self).__init__()
        if norm_layer is None:
            norm_layer = nn.BatchNorm2d
        self._norm_layer = norm_layer
        self.block_name = block.__name__
        self.layers = layers
        self.inplanes = 64
        self.dilation = 1
        assert len(strides) == 3, "The strides of layer2~layer4"
        if replace_stride_with_dilation is None:
            # each element in the tuple indicates if we should replace
            # the 2x2 stride with a dilated convolution instead
            replace_stride_with_dilation = [False, False, False]
        if len(replace_stride_with_dilation) != 3:
            raise ValueError("replace_stride_with_dilation should be None "
                             "or a 3-element tuple, got {}".format(replace_stride_with_dilation))
        self.groups = groups
        self.base_width = width_per_group

        self._channels = {}
        self._blocks = defaultdict(list)

        self.deep_stem = deep_stem
        if self.deep_stem:
            self.inplanes = 128
            self.conv1 = nn.SequentialCell(
                Conv2d(3, self.inplanes // 2, 3, 2, 1, bias=False),
                norm_layer(self.inplanes // 2),
                Activation('relu'),
                Conv2d(self.inplanes // 2, self.inplanes // 2, 3, 1, 1, bias=False),
                norm_layer(self.inplanes // 2),
                Activation('relu'),
                Conv2d(self.inplanes // 2, self.inplanes, 3, 1, 1, bias=False),
            )
        else:
            self.conv1 = Conv2d(3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False)
        self.bn1 = norm_layer(self.inplanes)
        self.relu = Activation('relu')
        self.down_stride = 2
        self._channels[self.down_stride] = self.inplanes

        self.pad = nn.Pad(((0, 0), (0, 0), (1, 1), (1, 1)))
        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, pad_mode='valid')
        self.down_stride *= 2
        self.layer1 = self._make_layer(block, 64, layers[0])
        self.layer2 = self._make_layer(block, 128, layers[1], stride=strides[0], dilate=replace_stride_with_dilation[0])
        self.layer3 = self._make_layer(block, 256, layers[2], stride=strides[1], dilate=replace_stride_with_dilation[1])
        self.layer4 = self._make_layer(block, 512, layers[3], stride=strides[2], dilate=replace_stride_with_dilation[2])
        if replace_stride_with_dilation == [False, False, False]:
            self.stride_type = 0
        elif replace_stride_with_dilation == [False, False, True]:
            self.stride_type = 1
        elif replace_stride_with_dilation == [False, True, True]:
            self.stride_type = 2
        else:
            raise NotImplementedError(f"replace_stride_with_dilation={replace_stride_with_dilation}")

    def _make_layer(self, block, planes, blocks, stride=1, dilate=False):
        norm_layer = self._norm_layer
        downsample = None
        previous_dilation = self.dilation
        if dilate:
            self.dilation *= stride
            stride = 1
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = nn.SequentialCell(
                Conv2d(self.inplanes, planes * block.expansion, kernel_size=1, stride=stride, bias=False),
                norm_layer(planes * block.expansion),
            )

        layers = [block(self.inplanes, planes, stride, downsample, self.groups, self.base_width, previous_dilation,
                        norm_layer)]
        self.inplanes = planes * block.expansion
        for _ in range(1, blocks):
            layers.append(
                block(self.inplanes, planes, groups=self.groups, base_width=self.base_width, dilation=self.dilation,
                      norm_layer=norm_layer))

        self.down_stride *= stride
        self._channels[self.down_stride] = self.inplanes
        return nn.SequentialCell(layers)

    def construct(self, x):
        features = {}
        x = self.relu(self.bn1(self.conv1(x)))
        features["2"] = x
        x = self.pad(x)
        x = self.maxpool(x)
        x = self.layer1(x)
        features["4"] = x
        x8 = self.layer2(x)
        x16 = self.layer3(x8)
        x32 = self.layer4(x16)
        if self.stride_type == 0:
            features["8"] = x8
            features["16"] = x16
            features["32"] = x32
        elif self.stride_type == 1:
            features["8"] = x16
            features["16"] = x32
        elif self.stride_type == 2:
            features["8"] = x32
        return features

    @property
    def channels(self):
        return self._channels


def resnet50c(**kwargs):
    r"""ResNet-C-50 model from
    `"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
    """
    kwargs['deep_stem'] = True
    return ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)


def _test():
    import numpy as np
    import mindspore.context as context

    kwargs = {'replace_stride_with_dilation': [False, True, True]}
    context.set_context(device_target="GPU")
    image_size = 256
    x = np.random.rand(2, 3, image_size, image_size).astype(np.float32)
    net = resnet50c(**kwargs)
    y = net(Tensor(x))
    print({k: v.shape for k, v in y.items()})
    net.compile_and_run(Tensor(x))


if __name__ == '__main__':
    _test()
