import tensorflow as tf
from tensorflow.keras.layers import Conv2D,Dense,Add,BatchNormalization,Activation,Lambda,Multiply,AvgPool2D,Reshape,Concatenate,GlobalAveragePooling2D
from .resnet import ResLayer,ResNetV1d
from .resnet import Bottleneck as _Bottleneck
import math
from tensorflow.keras.models import Model

def R_softmax(groups,radix):
    def _softmax(x):
        if radix > 1:
            d = x.shape[-1]
            out = Reshape((groups,radix,d//radix//groups))(x)
            out = Lambda(tf.transpose,arguments={"perm":[0,2,1,3]})(out)
            out = Lambda(tf.nn.softmax,arguments={"axis":1})(out)
            out = Reshape([d])(out)
        else:
            out = Activation(tf.nn.sigmoid)(x)
        return out
    return _softmax


class Group_conv():
    def __init__(self,inchannel,outchannel,kernel,stride,padding,groups):
        self.split = Lambda(tf.split,arguments={"num_or_size_splits":groups})
        self.out_channels = [outchannel//groups for _ in range(groups-1)]
        self.out_channels.append(outchannel-sum(self.out_channels))
        self.convs = []
        for out in self.out_channels:
            self.convs.append(Conv2D(out,kernel,stride,padding))
        self.concat = Concatenate()

    def __call__(self, input):
        splits = self.split(input)
        # try:
        group_convs = [conv(group) for conv,group in zip(self.convs,splits)]
        # except Exception as e:
        #     print(e)
        out = self.concat(group_convs)
        return out

class GroupConvMoudel():
    def __init__(self,inchannels,outchannels,kernel,stride,padding,groups):
        if groups > 1:
            self.conv = Group_conv(inchannels,outchannels,kernel,stride,padding,groups)
        else:
            self.conv = Conv2D(outchannels,kernel,stride,padding)
        self.BatchNorm = BatchNormalization()
        self.relu = Activation(tf.nn.relu)

    def __call__(self, input):
        # try:
        out = self.conv(input)
        # except Exception as e:
        #     print(e)
        out = self.BatchNorm(out)
        out = self.relu(out)
        return out

class SplitAttentionConv():
    def __init__(self,inplance,plance,kernel,stride,padding,radix,group,reduction_factor=4):
        self.radix = radix
        self.group = group
        inter_channel = max(inplance*radix//reduction_factor,32)
        self.GNR1 = GroupConvMoudel(inplance,plance*radix,kernel,stride,padding,group*radix)
        self.GlobleAvgPool = self._make_GlobleAvgPoolLayer()
        self.GNR2 = GroupConvMoudel(plance,inter_channel,1,1,'valid',group)
        if group > 1:
            self.groupConv = Group_conv(inter_channel,plance*radix,1,1,"valid",group)
        else:
            self.groupConv = Conv2D(plance*radix,1,1,"valid")
        self.r_softmax = R_softmax(group,radix)
    def _make_GlobleAvgPoolLayer(self):
        def Layer(input):
            d = input.shape[-1]
            out = GlobalAveragePooling2D()(input)
            out = Reshape([1,1,d])(out)
            return out
        return Layer
    def __call__(self, input):
        out = self.GNR1(input)
        w,h,d = out.shape[1:4]
        if self.radix > 1:
            out = Reshape((w,h,self.radix,d//self.radix))(out)
            atten = Lambda(tf.reduce_sum,arguments={"axis":3})(out)
        else:
            atten = out
        atten = self.GlobleAvgPool(atten)
        atten = self.GNR2(atten)
        atten = self.groupConv(atten)
        atten = self.r_softmax(atten)
        if self.radix > 1:
            # try:
            d = atten.shape[-1]
            # except Exception as e:
            #     print(e)
            atten = Reshape((1,1,self.radix,d//self.radix))(atten)
            out = Multiply()([atten,out])
            out = Lambda(tf.reduce_sum,arguments={"axis":3})(out)
        else:
            out = Multiply()([atten, out])
        return out

class Bottleneck(_Bottleneck):
    expansion = 4
    def __init__(self,inplance,plance,groups,radix,reduction_factor=4,base_width=4,base_channels=64,avg_down_stride=True,**kwargs):
        super(Bottleneck,self).__init__(inplance=inplance,plance=plance,**kwargs)
        if groups == 1:
            width = plance
        else:
            width = math.floor(plance*(base_width/base_channels))*groups

        self.avg_down_stride = avg_down_stride and self.conv2_stride > 1
        self.conv1 = Conv2D(width,1,self.conv1_stride,'valid')
        self.batch_norm1 = BatchNormalization()
        self.conv2 = SplitAttentionConv(width,width,3,1 if self.avg_down_stride else self.conv2_stride,'same',radix,groups,reduction_factor)
        if self.avg_down_stride:
            self.avd_layer = AvgPool2D((3,3),self.conv2_stride,"same")
        self.conv3 = Conv2D(plance*self.expansion,1,1,"valid")
        self.batch_norm3 = BatchNormalization()

    def __call__(self,input):
        shortcut = input
        out = self.conv1(input)
        out = self.batch_norm1(out)
        out = Activation(tf.nn.relu)(out)

        out = self.conv2(out)

        if self.avg_down_stride:
            out = self.avd_layer(out)

        out = self.conv3(out)
        out = self.batch_norm3(out)

        if self.down_sample is not None:
            shortcut = self.down_sample(input)
        out = Add()([out,shortcut])
        return out




class Resnest(ResNetV1d):
    arch_settings = {
        50: (Bottleneck, (3, 4, 6, 3)),
        101: (Bottleneck, (3, 4, 23, 3)),
        152: (Bottleneck, (3, 8, 36, 3)),
        200: (Bottleneck, (3, 24, 36, 3))
    }
    def __init__(self,groups,base_width,radix,reduction_factor,avg_down_stride,**kwargs):
        self.groups = groups
        self.base_width = base_width
        self.radix = radix
        self.reduction_factor = reduction_factor
        self.avg_down_stride = avg_down_stride
        super(Resnest, self).__init__(**kwargs)

    def make_res_layer(self,**kwargs):
        """
        新增参数初始化
        :param kwargs: 父类ResLayeor参数
        :return:
        """
        return ResLayer(
            groups=self.groups,
            base_width=self.base_width,
            base_channels=self.base_channels,
            radix=self.radix,
            reduction_factor=self.reduction_factor,
            avg_down_stride=self.avg_down_stride,
            **kwargs)

class ResNeStV1(Resnest):
    def __init__(self,**kwargs):
        super(ResNeStV1, self).__init__(groups=1,radix=2,base_width=4,reduction_factor=4,avg_down_stride=True,**kwargs)


def create_model(input, clase_num):
    x = ResNeStV1(depth=50)(input)
    x = GlobalAveragePooling2D(name='avg_pool')(x)
    fc_out = Dense(clase_num, kernel_initializer="he_normal", use_bias=False, name="fc_NObias")(x)
    model = Model(inputs=input, outputs=fc_out)
    return model