import tensorflow as tf
import tensorflow.keras as keras
from tensorflow.keras import layers, activations, losses, optimizers, metrics, callbacks
from python_ai.common.xcommon import *
import os
import numpy as np


class ConvCell(keras.Model):

    def __init__(self, filters, ksize, strides=(1, 1), padding='same', **kwargs):
        super().__init__(**kwargs)

        self.conv = layers.Conv2D(filters, ksize, strides, padding)
        self.bn = layers.BatchNormalization()

    def call(self, inputs, training=None, mask=None):
        x = self.conv(inputs, training=training)
        x = self.bn(x, training=training)
        return x


class ResNetBlock(keras.Model):

    def __init__(self, filters, strides=(1, 1), **kwargs):
        super().__init__(**kwargs)
        self.strides = strides
        self.conv1 = ConvCell(filters, (3, 3), strides)
        self.act1 = layers.ReLU()
        self.conv2 = ConvCell(filters, (3, 3), (1, 1))
        if strides != (1, 1):
            self.residual = ConvCell(filters, (1, 1), strides)
        self.act2 = layers.ReLU()

    def call(self, inputs, training=None, mask=None):
        x = self.conv1(inputs, training=training)
        x = self.act1(x, training=training)
        x = self.conv2(x, training=training)
        if self.strides != (1, 1):
            residual = self.residual(inputs, training=training)
        else:
            residual = inputs
        x = layers.add([x, residual])
        x = self.act2(x)
        return x


class ResNet(keras.Model):

    def __init__(self, init_ch, n_layers, n_cls, act=None, **kwargs):
        super().__init__(**kwargs)
        self.conv1 = ConvCell(init_ch, (3, 3), (1, 1))
        self.blocks = keras.Sequential()
        filters = init_ch
        for layer_id in range(n_layers):
            for block_id in range(2):
                if 0 == block_id and 0 != layer_id:
                    block = ResNetBlock(filters, (2, 2))
                else:
                    block = ResNetBlock(filters, (1, 1))
                self.blocks.add(block)
            filters *= 2
        self.globalPool = layers.GlobalAvgPool2D()
        self.fc = layers.Dense(n_cls, activation=act)

    def call(self, inputs, training=None, mask=None):
        x = self.conv1(inputs, training=training)
        x = self.blocks(x, training=training)
        x = self.globalPool(x, training=training)
        x = self.fc(x, training=training)
        return x


if '__main__' == __name__:

    np.random.seed(1)
    tf.random.set_seed(1)

    (x_train, y_train), (x_test, y_test) = keras.datasets.cifar10.load_data()
    x_train = x_train.astype(np.float32) / 255.
    x_test = x_test.astype(np.float32) / 255.

    VER = 'v1.1'
    BATCH_SIZE = 64
    EPOCHS = 8
    FILE_NAME = os.path.basename(__file__)
    LOG_DIR = os.path.join('_log', FILE_NAME, VER)
    SAVE_PATH = os.path.join('_save', FILE_NAME, VER, 'model.dat')
    dir = os.path.split(SAVE_PATH)[0]
    os.makedirs(dir, exist_ok=True)

    if os.path.exists(SAVE_PATH):
        model = keras.models.load_model(SAVE_PATH)
    else:
        model = ResNet(64, 4, 10, act=activations.softmax)

        model.build(input_shape=(None, 32, 32, 3))

        SUMMARY_LEN = 128
        sep('ResNet')
        model.summary(line_length=SUMMARY_LEN)
        sep('ResNet blocks Sequential')
        model.blocks.summary(line_length=SUMMARY_LEN)
        for i, block in enumerate(model.blocks.layers):
            sep(f'#{i + 1}')
            block.summary(line_length=SUMMARY_LEN)

        model.compile(
            loss=losses.sparse_categorical_crossentropy,
            optimizer=optimizers.Adam(learning_rate=0.001),
            metrics=metrics.sparse_categorical_accuracy
        )

        a = np.random.permutation(len(x_train))
        x_train = x_train[a]
        y_train = y_train[a]

        model.fit(x_train, y_train,
                  BATCH_SIZE, EPOCHS,
                  validation_split=0.05,
                  callbacks=callbacks.TensorBoard(log_dir=LOG_DIR, update_freq='batch', profile_batch=0))

        model.save(SAVE_PATH)

    model.evaluate(x_test, y_test, batch_size=BATCH_SIZE)
