#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created by PyCharm.

@Date    : Tue Apr 28 2020 
@Time    : 22:06:24
@File    : resnet.py
@Author  : alpha
"""


import torch
import torch.nn as nn
import torch.nn.functional as F

from torchvision.models.resnet import resnet18
from functools import reduce

from src.dyconv import DyConv2D


class ConvBN(nn.Module):
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size=3,
                 stride=1,
                 padding=1,
                 dilation=1,
                 has_relu=True):
        super(ConvBN, self).__init__()
        self.conv = nn.Sequential(
            nn.Conv2d(
                in_channels,
                out_channels,
                kernel_size,
                stride,
                padding,
                dilation,
                bias=False
            ),
            nn.BatchNorm2d(out_channels)
        )
        self.has_relu = has_relu
        if self.has_relu:
            self.relu = nn.ReLU(inplace=True)

    def forward(self, x):
        x = self.conv(x)
        if self.has_relu:
            x = self.relu(x)
        return x


class DyConvBN(nn.Module):
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size=3,
                 stride=1,
                 padding=1,
                 dilation=1,
                 has_relu=True):
        super(DyConvBN, self).__init__()
        self.conv = DyConv2D(
            in_channels,
            out_channels,
            kernel_size,
            stride,
            padding,
            dilation,
            bias=False,
            with_bn=True
        )
        self.has_relu = has_relu
        if self.has_relu:
            self.relu = nn.ReLU(inplace=True)

    def forward(self, x):
        x = self.conv(x)
        if self.has_relu:
            x = self.relu(x)
        return x


class Concat(nn.Module):
    def __init__(self, dim=1):
        super(Concat, self).__init__()
        self.dim = dim

    def forward(self, *inputs):
        return torch.cat(inputs, dim=self.dim)


class EltwiseAdd(nn.Module):
    def __init__(self):
        super(EltwiseAdd, self).__init__()

    def forward(self, *inputs):
        return reduce(torch.add, inputs)


class BasicBlock(nn.Module):
    def __init__(self, in_channels, out_channels, stride=1):
        super(BasicBlock, self).__init__()
        assert stride in [1, 2]
        self.stride = stride
        self.conv1 = ConvBN(in_channels, out_channels, stride=stride)
        self.conv2 = ConvBN(out_channels, out_channels, has_relu=False)
        self.relu = nn.ReLU()
        if stride == 2:
            self.branch1 = nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=stride)
        self.eltadd = EltwiseAdd()

    def forward(self, x):
        conv1 = self.conv1(x)
        conv2 = self.conv2(conv1)
        if self.stride == 1:
            branch1 = x
        else:
            branch1 = self.branch1(x)
        eltadd = self.eltadd(branch1, conv2)
        out = self.relu(eltadd)
        return out


class ResNet18(nn.Module):
    def __init__(self, num_classes):
        super(ResNet18, self).__init__()
        self.conv1 = ConvBN(3, 64, kernel_size=7, padding=3, stride=2)
        self.pool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)

        self.stage2_1 = BasicBlock(64, 64, stride=1)
        self.stage2_2 = BasicBlock(64, 64, stride=1)

        self.stage3_1 = BasicBlock(64, 128, stride=2)
        self.stage3_2 = BasicBlock(128, 128, stride=1)

        self.stage4_1 = BasicBlock(128, 256, stride=2)
        self.stage4_2 = BasicBlock(256, 256, stride=1)

        self.stage5_1 = BasicBlock(256, 512, stride=2)
        self.stage5_2 = BasicBlock(512, 512, stride=1)

        self.avepool = nn.AdaptiveAvgPool2d((1, 1))
        self.fc = nn.Linear(512, num_classes)

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
            elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
                nn.init.constant_(m.weight, 1)
                nn.init.constant_(m.bias, 0)


    def forward(self, x):
        conv1 = self.conv1(x)
        pool1 = self.pool1(conv1)

        stage2 = self.stage2_2(self.stage2_1(pool1))
        stage3 = self.stage3_2(self.stage3_1(stage2))
        stage4 = self.stage4_2(self.stage4_1(stage3))
        stage5 = self.stage5_2(self.stage5_1(stage4))

        avepool = self.avepool(stage5)[:, :, 0, 0]
        fc = self.fc(avepool)
        return fc


