import numpy as np
import tensorflow as tf
import tensorflow.keras as keras
from tensorflow.keras import layers, optimizers, activations, losses, metrics, callbacks
import sys
import os

np.random.seed(777)
tf.random.set_seed(777)
filename = os.path.basename(__file__)

ver = 'v4.0'
alpha = 0.0001
n_epochs = 40
batch_size = 64
n_clip = 10
print(f'ver: {ver}, n_clip: {n_clip}, n_epochs: {n_epochs}, batch_size: {batch_size}, alpha: {alpha}')

# 1.	按照要求，完成ResNet以下处理（每题10分）
# ①	数据处理
# 1)	读取mnist数据集
(x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()

# 2)	对数据切分训练集测试集及相关预处理
x_train = x_train.astype(np.float32)
x_train /= 255.0
x_train = x_train.reshape((-1, 28, 28, 1))
x_test = x_test.astype(np.float32)
x_test /= 255.0
x_test = x_test.reshape((-1, 28, 28, 1))

# select smaller dataset
m_train, _, _, _ = x_train.shape
idx = np.arange(m_train)
x_train = x_train[idx % n_clip == 0]
y_train = y_train[idx % n_clip == 0]

# ②	模型处理
# 1)	设置自定义函数，初始设定卷积尺寸3*3	步长1,0补边
class ConvBNRelu(keras.Model):

    def __init__(self, ch, kernels=3, strides=1, padding='same', **kwargs):
        super().__init__(**kwargs)
        self.ch = ch
        self.kernels = kernels
        self.strides = strides
        self.padding = padding
        self.conv = layers.Conv2D(ch, kernels, strides, padding)
        self.bn = layers.BatchNormalization()
        self.relu = layers.ReLU()

    @tf.function
    def call(self, input, training=None):
        x = self.conv(input)
        x = self.bn(x, training=training)
        x = self.relu(x)
        return x


# ③	设置resNet模块（类），
# 1)	内部设置两个卷积（3*3），配合批量归一化处理
# 2)	判断是否跃迁，如果跃迁，设计跃迁卷积1*1
# 3)	实现正向传播处理
class ResNetBlock(keras.Model):

    def __init__(self, ch, strides, residual, **kwargs):
        super().__init__(**kwargs)
        self.ch = ch
        self.strides = strides
        self.residual = residual
        self.conv1 = ConvBNRelu(ch, 3, strides)
        self.conv2 = ConvBNRelu(ch, 3, 1)
        self.res_conv = ConvBNRelu(ch, 1, strides)

    @tf.function
    def call(self, input, training=None):
        x = self.conv1(input, training=training)
        x = self.conv2(x, training=training)
        if self.residual:
            r = self.res_conv(input, training=training)
            x += r
        else:
            x += input  # ATTENTION
        return x


# ④	Resnet主网络（类）（初始16通道）
# 1)	初始卷积后，生成动态序列
# 2)	按照resnet规则，进行resnet模块生成，（循环流程，每次小循环结束后，通道*2）
# 3)	最后使用平均池化进行分类，并完成正向传播
class ResNet(keras.Model):

    def __init__(self, blocks_spec_list, init_ch, n_cls, **kwargs):
        super().__init__(**kwargs)
        self.block_spec_list = blocks_spec_list
        self.init_ch = init_ch
        self.output_ch = init_ch
        self.n_cls = n_cls

        self.conv1 = ConvBNRelu(init_ch, 3, 1)

        self.blocks = keras.Sequential()
        for block_id, n_layers in enumerate(blocks_spec_list):
            for layer_id in range(n_layers):
                if layer_id == 0:
                    strides = 2
                else:
                    strides = 1
                if block_id == 0:
                    residual = True
                elif layer_id == 0:
                    residual = True
                else:
                    residual = False
                block = ResNetBlock(self.output_ch, strides, residual)
                self.blocks.add(block)
            self.output_ch *= 2

        self.avg_pool = layers.GlobalAvgPool2D()
        self.fc = layers.Dense(n_cls)

    @tf.function
    def call(self, input, training=None):
        x = self.conv1(input, training=training)
        x = self.blocks(x, training=training)
        x = self.avg_pool(x)
        x = self.fc(x)
        return x


# 4)	完成模型创建及训练
model = ResNet([3, 4, 6, 3], 16, 10)
model.build(input_shape=(None, 28, 28, 1))
model.compile(
    optimizer=optimizers.Adam(learning_rate=alpha),
    loss=losses.SparseCategoricalCrossentropy(from_logits=True),
    metrics=[metrics.sparse_categorical_accuracy]
)
model.summary()


class MyEarlyStopping(callbacks.EarlyStopping):

    def __init__(self, my_monitor_min_val, **kwargs):
        super().__init__(**kwargs)
        self.my_monitor_min_val = my_monitor_min_val

    def on_epoch_end(self, epoch, logs=None):
        current = self.get_monitor_value(logs)
        if current > self.my_monitor_min_val:
            super().on_epoch_end(epoch, logs)


logdir = os.path.join('_log', filename, ver)
tb_callback = callbacks.TensorBoard(log_dir=logdir, update_freq='batch', profile_batch=0)
early_stopping = MyEarlyStopping(my_monitor_min_val=0.75,
                                 monitor='val_sparse_categorical_accuracy',
                                 min_delta=1e-2,
                                 patience=2,
                                 verbose=1,
                                 restore_best_weights=True)
model.fit(x_train, y_train,
          batch_size=batch_size, epochs=n_epochs,
          verbose=1,
          callbacks=[tb_callback, early_stopping],
          validation_split=0.1)

model.evaluate(x_test, y_test, verbose=1)
