import math
from typing import Union, Tuple, Optional

import paddle.fluid as fluid


__all__ = ['conv2d', 'x_bn', 'x_act', 'bn_act', 'pool2d', 'fc', 'global_avg_pool', 'set_fp16']

FP16 = False


def set_fp16(mode: bool):
    global FP16
    FP16 = mode


def _conv2d(x, out_channels, kernel_size, stride=1, padding='same', groups=1, dilation=1, bias=True, name=None):
    in_channels = x.shape[1]

    if isinstance(kernel_size, int):
        kernel_size = (kernel_size, kernel_size)

    if padding == 'same':
        kh, kw = kernel_size
        ph = (kh + (kh - 1) * (dilation - 1) - 1) // 2
        pw = (kw + (kw - 1) * (dilation - 1) - 1) // 2
        padding = (ph, pw)

    stdv = 1.0 / math.sqrt(in_channels * kernel_size[0] * kernel_size[1])

    param_name = name + '_weights' if name else None
    param_attr = fluid.param_attr.ParamAttr(
        initializer=fluid.initializer.Uniform(-stdv, stdv),
        name=param_name)
    if bias:
        bias_name = name + '_offset' if name else None
        bias_attr = fluid.param_attr.ParamAttr(
            initializer=fluid.initializer.Uniform(-stdv, stdv),
            name=bias_name)
    else:
        bias_attr = None

    use_cudnn = FP16 or (out_channels != groups)

    return fluid.layers.conv2d(
        input=x,
        num_filters=out_channels,
        filter_size=kernel_size,
        stride=stride,
        padding=padding,
        groups=groups,
        bias_attr=bias_attr,
        param_attr=param_attr,
        use_cudnn=use_cudnn,
    )


def _bn(x, name=None):
    param_name = name + '_scale' if name else None
    bias_name = name + '_offset' if name else None
    moving_mean_name = name + '_mean' if name else None
    moving_variance_name = name + '_variance' if name else None
    return fluid.layers.batch_norm(
        input=x,
        name=name,
        param_attr=fluid.param_attr.ParamAttr(name=param_name),
        bias_attr=fluid.param_attr.ParamAttr(bias_name),
        moving_mean_name=moving_mean_name,
        moving_variance_name=moving_variance_name)


def _act(x, act, name=None):
    if act == 'relu':
        return fluid.layers.relu(x, name=name)
    elif act == 'swish':
        return fluid.layers.swish(x, name=name)
    else:
        raise ValueError("No activation %s: " % act)


x_bn = _bn
x_act = _act


def bn_act(x, act, name=None):
    param_name = name + '.bn_scale' if name else None
    bias_name = name + '.bn_offset' if name else None
    moving_mean_name = name + '.bn_mean' if name else None
    moving_variance_name = name + '.bn_variance' if name else None
    x = fluid.layers.batch_norm(
        input=x,
        act=act,
        name=name,
        param_attr=fluid.param_attr.ParamAttr(name=param_name),
        bias_attr=fluid.param_attr.ParamAttr(bias_name),
        moving_mean_name=moving_mean_name,
        moving_variance_name=moving_variance_name)
    return x


def conv2d(x,
           out_channels: int,
           kernel_size: Union[int, Tuple[int]],
           stride: Union[int, Tuple[int]] = 1,
           padding: Union[str, int, Tuple[int]] = 'same',
           groups: int = 1,
           dilation: int = 1,
           bias: Optional[bool] = None,
           bn: bool = False,
           act: Optional[str] = None,
           name: Optional[str] = None):
    layers = bn or act is not None

    if not layers:
        return _conv2d(x, out_channels, kernel_size, stride, padding, groups, dilation, bias, name)
    else:
        conv_name = name + '.conv' if name else None
        if bias is None:
            bias = not bn
        x = _conv2d(x, out_channels, kernel_size, stride, padding, groups, dilation, bias, conv_name)
        if bn and act:
            x = bn_act(x, act, name)
        elif bn:
            bn_name = name + '.bn' if name else None
            x = _bn(x, bn_name)
        elif act:
            act_name = name + '.act' if name else None
            x = _act(x, act, act_name)
        return x


def pool2d(x, kernel_size, stride, padding='same', type='avg', ceil_mode=True, name=None):
    if isinstance(kernel_size, int):
        kernel_size = (kernel_size, kernel_size)

    if padding == 'same':
        kh, kw = kernel_size
        ph = (kh - 1) // 2
        pw = (kw - 1) // 2
        padding = (ph, pw)
    return fluid.layers.pool2d(
        input=x,
        pool_size=kernel_size,
        pool_stride=stride,
        pool_padding=padding,
        pool_type=type,
        ceil_mode=ceil_mode,
        name=name)


def fc(x, size, act=None, name=None):
    stdv = 1.0 / math.sqrt(x.shape[1] * 1.0)
    param_name = name + '_weights' if name else None
    param_attr = fluid.param_attr.ParamAttr(
        name=param_name,
        initializer=fluid.initializer.Uniform(-stdv, stdv))
    bias_name = name + '_offset' if name else None
    bias_attr = fluid.param_attr.ParamAttr(name=bias_name)
    x = fluid.layers.fc(
        input=x,
        size=size,
        param_attr=param_attr,
        bias_attr=bias_attr,
        act=act)
    return x


def global_avg_pool(x, name=None):
    use_cudnn = FP16
    x = fluid.layers.pool2d(input=x, pool_type='avg', global_pooling=True, use_cudnn=use_cudnn, name=name)
    return x
