import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers, models
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Dense, BatchNormalization, Activation, GlobalAveragePooling2D
from tensorflow.keras.optimizers import Adam
import numpy as np
import time


# 共56层

class CellBlock(layers.Layer):
    def __init__(self, filter_num, stride=1):
        super(CellBlock, self).__init__()

        self.conv1 = Conv2D(filter_num[0], (1, 1), strides=stride, padding='same')
        self.bn1 = BatchNormalization()
        self.relu1 = Activation('relu')

        self.conv2 = Conv2D(filter_num[1], (3, 3), strides=1, padding='same')
        self.bn2 = BatchNormalization()
        self.relu2 = Activation('relu')

        self.conv3 = Conv2D(filter_num[2], (1, 1), strides=1, padding='same')
        self.bn3 = BatchNormalization()

        self.residual = Conv2D(filter_num[2], (1, 1), strides=stride, padding='same')

    def call(self, inputs, training=None):
        x = self.conv1(inputs)
        x = self.bn1(x)
        x = self.relu1(x)

        x = self.conv2(x)
        x = self.bn2(x)
        x = self.relu2(x)

        x = self.conv3(x)
        x = self.bn3(x)

        r = self.residual(inputs)

        x = layers.add([x, r])
        output = tf.nn.relu(x)

        return output


class ResNet152Model(models.Model):
    def get_config(self):
        pass

    # your Sequential model is here.
    def __init__(self, nb_classes=1000):
        super(ResNet152Model, self).__init__()

        self.model = keras.Sequential()
        # 开始模块
        self.model.add(Conv2D(64, (7, 7), strides=(2, 2), padding='same'))
        self.model.add(BatchNormalization())
        self.model.add(Activation('relu'))
        self.model.add(MaxPooling2D((3, 3), strides=(2, 2), padding='same'))
        # 不同卷积层中的filter个数
        filter_block1 = [64, 64, 256]
        filter_block2 = [128, 128, 512]
        filter_block3 = [256, 256, 1024]
        filter_block4 = [512, 512, 2048]
        # 卷积层1
        self.model.add(CellBlock(filter_block1, 1))
        self.model.add(CellBlock(filter_block1, 1))
        self.model.add(CellBlock(filter_block1, 1))

        # 卷积层2
        self.model.add(CellBlock(filter_block2, 2))
        for _ in range(1, 8):
            self.model.add(CellBlock(filter_block2, 1))

        # 卷积层3
        self.model.add(CellBlock(filter_block3, 2))
        for _ in range(1, 36):
            self.model.add(CellBlock(filter_block3, 1))

        # 卷积层4
        self.model.add(CellBlock(filter_block4, 2))
        for _ in range(1, 3):
            self.model.add(CellBlock(filter_block4, 1))

        # 池化加全连接层
        self.model.add(GlobalAveragePooling2D())
        self.model.add(Dense(nb_classes, activation='softmax'))
        self.model.build(input_shape=(None, 224, 224, 3))

    # train is inhrent from keras.Model
    def train(self, dataset, batch_size=32, nb_epoch=50):
        ''' train function '''
        # sgd = SGD(lr=0.01, decay=1e-6,
        #           momentum=0.9, nesterov=True)  # 采用SGD+momentum的优化器进行训练，首先生成一个优化器对象
        adam = Adam(lr=1e-4)
        self.model.compile(loss='categorical_crossentropy',
                           optimizer=adam,
                           metrics=['accuracy'])  # 完成实际的模型配置工作

        self.model.fit(dataset.train_images,
                       dataset.train_labels,
                       steps_per_epoch=dataset.train_images.shape[0],
                       epochs=nb_epoch,
                       validation_data=(dataset.valid_images, dataset.valid_labels),
                       shuffle=True)

    def save_model(self, file_path='./model/ResNet152model.h5'):
        self.model.save(file_path)

    def load_model(self, file_path='./model/ResNet152model.h5'):
        self.model = keras.Model.load_model(file_path)

    # x is input data with shape[None, 244, 244, 3] , return a numpy darray
    def splitpredict(self, x, startlayer, endlayer=0):
        if endlayer == 0:
            endlayer = len(self.model.layers)
        if startlayer < 0 or endlayer > len(self.model.layers):
            raise Exception("Layer range wrong, please check")
        for i in range(startlayer, endlayer):
            x = self.model.layers[i](x)
        return x.numpy()

    def call(self, inputs):
        x = inputs
        for i in range(len(self.model.layers)):
            x = self.model.layers[i](x)
        return x

    def estimateTimeAndData(self):
        nums = 100  # 默认执行100次取平均值
        len_model = len(self.model.layers)
        print(len_model)
        time_layer = [0.0] * len_model
        datasize = [0] * len_model
        for i in range(100):
            x = tf.random.normal([1, 224, 224, 3], dtype=tf.float32)
            for d in range(len_model):
                start = time.process_time()
                x = self.model.layers[d](x)
                end = time.process_time()
                time_layer[d] += (end - start)
                datasize[d] += (tf.size(x).numpy())
        return np.array(time_layer) / 100, np.array(datasize) / 100


if __name__ == '__main__':
    x = tf.random.normal([1, 224, 224, 3], dtype=tf.float32)
    resnet152 = ResNet152Model(1000)
    # resnet152.model.build(input_shape=(None, 224, 224, 3))
    # resnet152.model.summary()
    avg_time, avg_data = resnet152.estimateTimeAndData()
    print(tf.size(x).numpy(), avg_data)
    print(sum(avg_time))
# for i in range(len(resnet152.model.layers)):
# print('The {} layer\'s time cost is {}, output data size is {}'.format(i+1, avg_time[i], avg_data[i]))

# print("test the splitpredict")
# index = 20
# x = resnet152.splitpredict(x, 0, index)
# print('执行到第{}层的结果:{}'.format(index, x.size))
# x = resnet152.splitpredict(x, index)
# print('继续执行后续层结果:{}'.format(x))
