#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created by PyCharm.

@Date    : Sat Feb 06 2021 
@Time    : 11:56:32
@File    : repconv.py
@Author  : alpha
"""

import torch
from torch import nn
from torch.nn import functional as F


class ConvBN(nn.Module):
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size=3,
                 stride=1,
                 padding=1,
                 dilations=1):
        super(ConvBN, self).__init__()
        self.conv = nn.Sequential(
            nn.Conv2d(
                in_channels,
                out_channels,
                kernel_size,
                stride,
                padding,
                dilations,
                bias=False
            ),
            nn.BatchNorm2d(out_channels)
        )

    def forward(self, x):
        return self.conv(x)


class RepConv2D(nn.Module):
    def __init__(
            self,
            in_channels,
            out_channels,
            stride=1,
            has_relu=True
    ):
        super(RepConv2D, self).__init__()

        self.train_conv_3x3 = ConvBN(
            in_channels, out_channels, kernel_size=3, stride=stride, padding=1
        )
        self.train_conv_1x1 = ConvBN(
            in_channels, out_channels, kernel_size=1, stride=stride, padding=0
        )
        self.has_identity = (stride == 1 and in_channels == out_channels)
        if self.has_identity:
            self.train_conv_identity = nn.BatchNorm2d(out_channels)

        self.has_relu = has_relu
        if self.has_relu:
            self.relu = nn.ReLU(inplace=True)
        self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=1)
        # FUSE WEIGHTS AT INITIALIZATION
        self._fuse_weights()

    def forward(self, x):
        if self.training:
            x1 = self.train_conv_3x3(x)
            x2 = self.train_conv_1x1(x)
            if self.has_identity:
                x = x1 + x2 + self.train_conv_identity(x)
            else:
                x = x1 + x2
        else:
            x = self.conv(x)

        if self.has_relu:
            x = self.relu(x)

        return x

    def train(self, mode=True):
        self.training = mode
        if not self.training:
            self._fuse_weights()
        for module in self.children():
            module.train(mode)
        return self

    def _fuse_bn(self, module: ConvBN):
        kernel = module.conv[0].weight
        running_mean = module.conv[1].running_mean
        running_var = module.conv[1].running_var
        gamma = module.conv[1].weight
        beta = module.conv[1].bias
        eps = module.conv[1].eps

        std = (running_var + eps).sqrt()

        fuse_kernel = kernel * (gamma / std).reshape(-1, 1, 1, 1)
        fuse_bias = beta - running_mean * gamma / std

        return fuse_kernel, fuse_bias

    def _get_identity_kernel(self, module: nn.BatchNorm2d):
        running_mean = module.running_mean
        running_var = module.running_var
        gamma = module.weight
        beta = module.bias
        eps = module.eps

        C = gamma.shape[0]
        kernel = F.pad(torch.eye(C, C).reshape(C, C, 1, 1), [1, 1, 1, 1]).to(gamma.device)

        std = (running_var + eps).sqrt()

        fused_kernel = kernel * (gamma / std).reshape(-1, 1, 1, 1)
        fused_bias = beta - running_mean * gamma / std

        return fused_kernel, fused_bias

    def _fuse_weights(self):
        kernel_3x3, bias_3x3 = self._fuse_bn(self.train_conv_3x3)
        kernel_1x1, bias_1x1 = self._fuse_bn(self.train_conv_1x1)
        kernel_1x1 = F.pad(kernel_1x1, [1, 1, 1, 1])

        fused_kernel = kernel_3x3 + kernel_1x1
        fused_bias = bias_3x3 + bias_1x1

        if self.has_identity:
            kernel_identity, bias_identity = self._get_identity_kernel(self.train_conv_identity)
            fused_kernel += kernel_identity
            fused_bias += bias_identity

        self.conv.weight = nn.Parameter(fused_kernel)
        self.conv.bias = nn.Parameter(fused_bias)


class RepConv5x5(nn.Module):
    def __init__(
            self,
            in_channels,
            out_channels,
            stride=1,
            has_relu=True
    ):
        super(RepConv5x5, self).__init__()

        self.train_conv_5x5 = ConvBN(
            in_channels, out_channels, kernel_size=5, stride=stride, padding=2
        )
        self.train_conv_3x3 = ConvBN(
            in_channels, out_channels, kernel_size=3, stride=stride, padding=1
        )
        self.train_conv_1x1 = ConvBN(
            in_channels, out_channels, kernel_size=1, stride=stride, padding=0
        )
        self.has_identity = (stride == 1 and in_channels == out_channels)
        if self.has_identity:
            self.train_conv_identity = nn.BatchNorm2d(out_channels)

        self.has_relu = has_relu
        if self.has_relu:
            self.relu = nn.ReLU(inplace=True)
        self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=5, stride=stride, padding=2)
        # FUSE WEIGHTS AT INITIALIZATION
        self._fuse_weights()

    def forward(self, x):
        if self.training:
            x1 = self.train_conv_5x5(x)
            x2 = self.train_conv_3x3(x)
            x3 = self.train_conv_1x1(x)
            if self.has_identity:
                x = x1 + x2 + x3 + self.train_conv_identity(x)
            else:
                x = x1 + x2 + x3
        else:
            x = self.conv(x)

        if self.has_relu:
            x = self.relu(x)

        return x

    def train(self, mode=True):
        self.training = mode
        if not self.training:
            self._fuse_weights()
        for module in self.children():
            module.train(mode)
        return self

    def _fuse_bn(self, module: ConvBN):
        kernel = module.conv[0].weight
        running_mean = module.conv[1].running_mean
        running_var = module.conv[1].running_var
        gamma = module.conv[1].weight
        beta = module.conv[1].bias
        eps = module.conv[1].eps

        std = (running_var + eps).sqrt()

        fuse_kernel = kernel * (gamma / std).reshape(-1, 1, 1, 1)
        fuse_bias = beta - running_mean * gamma / std

        return fuse_kernel, fuse_bias

    def _get_identity_kernel(self, module: nn.BatchNorm2d):
        running_mean = module.running_mean
        running_var = module.running_var
        gamma = module.weight
        beta = module.bias
        eps = module.eps

        C = gamma.shape[0]
        kernel = F.pad(torch.eye(C, C).reshape(C, C, 1, 1), [2, 2, 2, 2]).to(gamma.device)

        std = (running_var + eps).sqrt()

        fused_kernel = kernel * (gamma / std).reshape(-1, 1, 1, 1)
        fused_bias = beta - running_mean * gamma / std

        return fused_kernel, fused_bias

    def _fuse_weights(self):
        kernel_5x5, bias_5x5 = self._fuse_bn(self.train_conv_5x5)
        kernel_3x3, bias_3x3 = self._fuse_bn(self.train_conv_3x3)
        kernel_1x1, bias_1x1 = self._fuse_bn(self.train_conv_1x1)

        kernel_3x3 = F.pad(kernel_1x1, [1, 1, 1, 1])
        kernel_1x1 = F.pad(kernel_1x1, [2, 2, 2, 2])

        fused_kernel = kernel_5x5 + kernel_3x3 + kernel_1x1
        fused_bias = bias_5x5 + bias_3x3 + bias_1x1

        if self.has_identity:
            kernel_identity, bias_identity = self._get_identity_kernel(self.train_conv_identity)
            fused_kernel += kernel_identity
            fused_bias += bias_identity

        self.conv.weight = nn.Parameter(fused_kernel)
        self.conv.bias = nn.Parameter(fused_bias)