import tensorflow as tf
import numpy as np
import os

os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'

class BasicBlock(tf.keras.layers.Layer):
    expansion = 1
    
    def __init__(self, out_channel, strides=1, down_sample=None, **kwargs):
        super(BasicBlock, self).__init__(**kwargs)
        self.conv1 = tf.keras.layers.Conv2D(out_channel, kernel_size=3, strides=strides, padding='SAME', use_bias=False, name='conv1')
        self.bn1 = tf.keras.layers.BatchNormalization(momentum=0.9, epsilon=1e-5, name='conv1/BatchNorm')

        self.conv2 = tf.keras.layers.Conv2D(out_channel, kernel_size=3, strides=1, padding='SAME', use_bias=False, name='conv2')
        self.bn2 = tf.keras.layers.BatchNormalization(momentum=0.9, epsilon=1e-5, name='conv2/BatchNorm')

        self.down_sample = down_sample
        self.add = tf.keras.layers.Add()
        self.relu = tf.keras.layers.ReLU()
        
    def call(self, inputs, training=False):
        if self.down_sample is not None:
            identity = self.down_sample(inputs)
        else:
            identity = inputs
            
        x = self.conv1(inputs)
        x = self.bn1(x, training=training)
        x = self.relu(x)
        
        x = self.conv2(x)
        x = self.bn2(x, training=training)
        
        x = self.add([identity, x])
        x = self.relu(x)
        
        return x
    

class Bottleneck(tf.keras.layers.Layer):
    expansion = 4
    
    def __init__(self, out_channel, strides=1, down_sample=None, **kwargs):
        super(Bottleneck, self).__init__(**kwargs)
        self.conv1 = tf.keras.layers.Conv2D(out_channel, kernel_size=1, use_bias=False, name='conv1')
        self.bn1 = tf.keras.layers.BatchNormalization(momentum=0.9, epsilon=1e-5, name='conv1/BatchNorm')

        self.conv2 = tf.keras.layers.Conv2D(out_channel, kernel_size=3, strides=strides, padding='SAME', use_bias=False, name='conv2')
        self.bn2 = tf.keras.layers.BatchNormalization(momentum=0.9, epsilon=1e-5)

        self.conv3 = tf.keras.layers.Conv2D(out_channel*self.expansion, kernel_size=1, use_bias=False, name='conv3')
        self.bn3 = tf.keras.layers.BatchNormalization(momentum=0.9, epsilon=1e-5, name='conv3/BatchNorm')

        self.down_sample = down_sample
        self.add = tf.keras.layers.Add()
        self.relu = tf.keras.layers.ReLU()
        
    def call(self, inputs, training=False):
        if self.down_sample is not None:
            identity = self.down_sample(inputs)
        else:
            identity = inputs
        
        x = self.conv1(inputs)
        x = self.bn1(x, training=training)
        x = self.relu(x)
        
        x = self.conv2(x)
        x = self.bn2(x, training=training)
        x = self.relu(x)
        
        x = self.conv3(x)
        x = self.bn3(x, training=training)
        
        x = self.add([identity, x])
        x = self.relu(x)
        
        return x
    

class ResNet(tf.keras.Model):
    def __init__(self, block, blocks_num, num_classes=1000, include_top=True, **kwargs):
        super(ResNet, self).__init__(**kwargs)
        self.include_top = include_top
        
        self.conv1 = tf.keras.layers.Conv2D(filters=64, kernel_size=7, strides=2, padding='SAME', use_bias=False, name='conv1')
        self.bn1 = tf.keras.layers.BatchNormalization(momentum=0.9, epsilon=1e-5, name='conv1/BatchNorm')
        self.relu1 = tf.keras.layers.ReLU(name='relu1')

        self.maxpool1 = tf.keras.layers.MaxPool2D(pool_size=3, strides=2, padding='SAME', name='maxpool1')

        self.block1 = self._make_layer(block, True, 64, blocks_num[0], name='block1')
        self.block2 = self._make_layer(block, False, 128, blocks_num[1], strides=2, name='block2')
        self.block3 = self._make_layer(block, False, 256, blocks_num[2], strides=2, name='block3')
        self.block4 = self._make_layer(block, False, 512, blocks_num[3], strides=2, name='block4')

        if self.include_top == True:
            self.avgpool = tf.keras.layers.GlobalAveragePooling2D(name='avgpool1')
            self.fc = tf.keras.layers.Dense(num_classes, name='logits')
            self.softmax = tf.keras.layers.Softmax()
            
    def call(self, inputs, training=False, **kwargs):
        x = self.conv1(inputs)
        x = self.bn1(x)
        x = self.relu1(x)
        x = self.maxpool1(x)
        
        x = self.block1(x, training=training)
        x = self.block2(x, training=training)
        x = self.block3(x, training=training)
        x = self.block4(x, training=training)
        
        if self.include_top == True:
            x = self.avgpool(x)
            x = self.fc(x)
            x = self.softmax(x)
        
        return x
    
    def _make_layer(self, block, first_block, channel, block_num, name=None, strides=1):
        down_sample = None
        if strides != 1 or first_block is True:
            down_sample = tf.keras.models.Sequential([
                tf.keras.layers.Conv2D(channel*block.expansion, kernel_size=1, strides=strides, use_bias=False, name='conv1'),
                tf.keras.layers.BatchNormalization(momentum=0.9, epsilon=1e-5, name='BatchNorm')
            ], name='shortcut')
            
        layers_list = []
        layers_list.append(block(channel, down_sample=down_sample, strides=strides, name="unit_1"))
        
        for index in range(1, block_num):
            layers_list.append(block(channel, name='unit_' + str(index + 1)))
        
        return tf.keras.models.Sequential(layers_list, name=name)
    

class BasicBlock_v2(tf.keras.layers.Layer):
    expansion = 1
    
    def __init__(self, out_channel, strides=1, down_sample=None, **kwargs):
        super(BasicBlock_v2, self).__init__(**kwargs)
        self.conv1 = tf.keras.layers.Conv2D(out_channel, kernel_size=3, strides=strides, padding='SAME', use_bias=False, name='conv1')
        self.bn1 = tf.keras.layers.BatchNormalization(momentum=0.9, epsilon=1e-5, name='conv1/BatchNorm')

        self.conv2 = tf.keras.layers.Conv2D(out_channel, kernel_size=3, strides=1, padding='SAME', use_bias=False, name='conv2')
        self.bn2 = tf.keras.layers.BatchNormalization(momentum=0.9, epsilon=1e-5, name='conv2/BatchNorm')

        self.down_sample = down_sample
        self.add = tf.keras.layers.Add()
        self.relu = tf.keras.layers.ReLU()
        
    def call(self, inputs, training=False):
        if self.down_sample is not None:
            identity = self.down_sample(inputs)
        else:
            identity = inputs
        
        x = self.bn1(inputs, training=training)
        x = self.relu(x)
        x = self.conv1(x)
        x = self.bn2(x, training=training)
        x = self.relu(x)
        x = self.conv2(x)
        x = self.add([identity, x])
        
        return x

    
class Bottleneck_v2(tf.keras.layers.Layer):
    expansion = 4
    
    def __init__(self, out_channel, strides=1, down_sample=None, **kwargs):
        super(Bottleneck_v2, self).__init__(**kwargs)
        self.conv1 = tf.keras.layers.Conv2D(out_channel, kernel_size=1, use_bias=False, name='conv1')
        self.bn1 = tf.keras.layers.BatchNormalization(momentum=0.9, epsilon=1e-5, name='conv1/BatchNorm')

        self.conv2 = tf.keras.layers.Conv2D(out_channel, kernel_size=3, strides=strides, padding='SAME', use_bias=False, name='conv2')
        self.bn2 = tf.keras.layers.BatchNormalization(momentum=0.9, epsilon=1e-5)

        self.conv3 = tf.keras.layers.Conv2D(out_channel*self.expansion, kernel_size=1, use_bias=False, name='conv3')
        self.bn3 = tf.keras.layers.BatchNormalization(momentum=0.9, epsilon=1e-5, name='conv3/BatchNorm')

        self.down_sample = down_sample
        self.add = tf.keras.layers.Add()
        self.relu = tf.keras.layers.ReLU()
        
    def call(self, inputs, training=False):
        if self.down_sample is not None:
            identity = self.down_sample(inputs)
        else:
            identity = inputs
        
        x = self.bn1(inputs, training=training)
        x = self.relu(x)
        x = self.conv1(x)
        
        x = self.bn2(x, training=training)
        x = self.relu(x)
        x = self.conv2(x)
    
        x = self.bn3(x, training=training)
        x = self.relu(x)
        x = self.conv3(x)
        
        x = self.add([identity, x])
        
        return x
    

def resnet_34(num_classes=1000, include_top=True):
    block = BasicBlock
    block_num = [3, 4, 6, 3]
    return ResNet(block, block_num, num_classes, include_top)

def resnet_50(num_classes=1000, include_top=True):
    block = Bottleneck
    blocks_num = [3, 4, 6, 3]
    return ResNet(block, blocks_num, num_classes, include_top)

def resnet_101(num_classes=1000, include_top=True):
    block = Bottleneck
    blocks_num = [3, 4, 23, 3]
    return ResNet(block, blocks_num, num_classes, include_top)


def main():
    # model = resnet_34(num_classes=1000, include_top=True)
    # model = resnet_50(num_classes=1000, include_top=True)
    model = resnet_101(num_classes=1000, include_top=True)
#     model = resnet_v2_34(num_classes=1000, include_top=True)
#     model = resnet_v2_50(num_classes=1000, include_top=True)
#     model = resnet_v2_101(num_classes=1000, include_top=True)

    x_data = np.random.rand(4, 3, 224, 224).astype(np.float32)
    x_label = np.random.rand(4, 1, 224, 224).astype(np.int64)
    model.build((None, 3, 224, 224))
    model.trainable = False
    model.compile(optimizer="Adam", loss="mse", metrics=["mae", "acc"])
    model.summary()
    
    pred = model.predict(x_data)
    print("input shape:", x_data.shape) 
    print("output shape:", pred.shape)   
    
if __name__ == '__main__':
    main()