import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np

from .network_blocks import ConvBNLayer, SEModule, DepthwiseSeparable
from shape_spec import ShapeSpec

NET_CONFIG = {
    "blocks2":
    #k, in_c, out_c, s, use_se
    [[3, 16, 32, 1, False], ],
    "blocks3": [
        [3, 32, 64, 2, False],
        [3, 64, 64, 1, False],
    ],
    "blocks4": [
        [3, 64, 128, 2, False],
        [3, 128, 128, 1, False],
    ],
    "blocks5": [
        [3, 128, 256, 2, False],
        [5, 256, 256, 1, False],
        [5, 256, 256, 1, False],
        [5, 256, 256, 1, False],
        [5, 256, 256, 1, False],
        [5, 256, 256, 1, False],
    ],
    "blocks6": [[5, 256, 512, 2, True], [5, 512, 512, 1, True]]
}

def make_divisible(v, divisor=8, min_value=None):
    '''
    Also in much hardware, matrix multiplication with size divisible by d = 8, 16, ..., may be
    as fast as a smaller size due to alignment of processing unit(e.g., warp size in GPU is 32)
    '''
    if min_value is None:
        min_value = divisor
    new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
    if new_v < 0.9 * v:
        new_v += divisor
    return new_v

class LCNet(nn.Module):
    '''
    https://arxiv.org/pdf/2109.15099.pdf
    '''
    def __init__(self, scale=1.0, feature_maps=[3,4,5]):
        super(LCNet, self).__init__()
        self.scale = scale
        self.feature_maps = feature_maps

        out_channels = []

        self.conv1 = ConvBNLayer(
            ch_in=3,
            filter_size=3,
            ch_out=make_divisible(16 * scale),
            stride=2,
            act="hardswish"
        )

        self.blocks2 = nn.Sequential(* [
            DepthwiseSeparable(
                ch_in=make_divisible(in_c * scale),
                ch_out=make_divisible(out_c * scale),
                dw_size=k,
                stride=s,
                use_se=se)
            for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks2"])
        ])

        self.blocks3 = nn.Sequential(* [
            DepthwiseSeparable(
                ch_in=make_divisible(in_c * scale),
                ch_out=make_divisible(out_c * scale),
                dw_size=k,
                stride=s,
                use_se=se)
            for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks3"])
        ])

        out_channels.append(
            make_divisible(NET_CONFIG["blocks3"][-1][2] * scale))

        self.blocks4 = nn.Sequential(* [
            DepthwiseSeparable(
                ch_in=make_divisible(in_c * scale),
                ch_out=make_divisible(out_c * scale),
                dw_size=k,
                stride=s,
                use_se=se)
            for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks4"])
        ])

        out_channels.append(
            make_divisible(NET_CONFIG["blocks4"][-1][2] * scale))

        self.blocks5 = nn.Sequential(* [
            DepthwiseSeparable(
                ch_in=make_divisible(in_c * scale),
                ch_out=make_divisible(out_c * scale),
                dw_size=k,
                stride=s,
                use_se=se)
            for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks5"])
        ])

        out_channels.append(
            make_divisible(NET_CONFIG["blocks5"][-1][2] * scale))

        self.blocks6 = nn.Sequential(* [
            DepthwiseSeparable(
                ch_in=make_divisible(in_c * scale),
                ch_out=make_divisible(out_c * scale),
                dw_size=k,
                stride=s,
                use_se=se)
            for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks6"])
        ])

        out_channels.append(
            make_divisible(NET_CONFIG["blocks6"][-1][2] * scale))
        self._out_channels = [
            ch for idx, ch in enumerate(out_channels) if idx + 2 in feature_maps
        ]

    def forward(self, x):
        outs = []
        x = self.conv1(x)
        x = self.blocks2(x)
        x = self.blocks3(x)
        outs.append(x)
        x = self.blocks4(x)
        outs.append(x)
        x = self.blocks5(x)
        outs.append(x)
        x = self.blocks6(x)
        outs.append(x)
        outs = [o for i, o in enumerate(outs) if i + 2 in self.feature_maps]
        return outs

    @property
    def out_shape(self):
        return [ShapeSpec(channels=c) for c in self._out_channels]