import datetime
import os

import numpy as np
from tensorflow import keras
from tensorflow.keras.callbacks import TensorBoard, ModelCheckpoint, LearningRateScheduler, ReduceLROnPlateau
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Dropout, Flatten, Dense, BatchNormalization, Activation, \
    Input, AveragePooling2D
from tensorflow.keras.models import Model, Sequential
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.regularizers import l2
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.optimizers import Adam

import data

# base dir
prefix = './'

batch_size = 32
epochs = 200

# load data
train_data, train_label = data.train_data()
test_data, test_label = data.test_data()

# normal
train_data = train_data.reshape(-1, 32, 32, 3) / 255.0
test_data = test_data.reshape(-1, 32, 32, 3) / 255.0

# load meta_data
categories = data.categories()
categories_len = len(categories)

# one hot
train_label_one_hot = to_categorical(train_label, categories_len)
test_label_one_hot = to_categorical(test_label, categories_len)

# train log
log_dir = prefix + "logs/fit/" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
tensorboard_callback = TensorBoard(log_dir=log_dir, histogram_freq=1)

# checkpoint
model_checkpoint_callback = ModelCheckpoint(prefix + 'model/weights_best_{val_accuracy:06f}.h5', save_best_only=True,
                                            save_weights_only=True, period=1)

# model dir
_model_dir = prefix + 'model'
_model_path = prefix + 'model/weights_with_data_enhancement.h5'


def resnet_layer(inputs, num_filters=16, kernel_size=3, strides=1, activation='relu', batch_normalization=True,
                 conv_first=True):
    conv = Conv2D(num_filters, kernel_size=kernel_size, strides=strides, padding='same', kernel_initializer='he_normal',
                  kernel_regularizer=l2(1e-4))
    x = inputs
    if conv_first:
        x = conv(x)
        if batch_normalization:
            x = BatchNormalization()(x)
        if activation is not None:
            x = Activation(activation)(x)
    else:
        if batch_normalization:
            x = BatchNormalization()(x)
        if activation is not None:
            x = Activation(activation)(x)
        x = conv(x)
    return x


def lr_schedule(epoch):
    lr = 1e-3
    if epoch > 180:
        lr *= 0.5e-3
    elif epoch > 160:
        lr *= 1e-3
    elif epoch > 120:
        lr *= 1e-2
    elif epoch > 80:
        lr *= 1e-1
    print('Learning rate: ', lr)
    return lr


# 0.902300
def resnet_v1(input_shape, depth, num_classes=10):
    if (depth - 2) % 6 != 0:
        raise ValueError('depth should be 6n+2 (eg 20, 32, 44 in [a])')
    num_filters = 16
    num_res_blocks = int((depth - 2) / 6)
    inputs = Input(shape=input_shape)
    x = resnet_layer(inputs=inputs)
    for stack in range(3):
        for res_block in range(num_res_blocks):
            strides = 1
            if stack > 0 and res_block == 0:
                strides = 2
            y = resnet_layer(inputs=x, num_filters=num_filters, strides=strides)
            y = resnet_layer(inputs=y, num_filters=num_filters, activation=None)
            if stack > 0 and res_block == 0:
                x = resnet_layer(inputs=x, num_filters=num_filters, kernel_size=1, strides=strides, activation=None,
                                 batch_normalization=False)
            x = keras.layers.add([x, y])
            x = Activation('relu')(x)
        num_filters *= 2
    
    x = AveragePooling2D(pool_size=8)(x)
    y = Flatten()(x)
    outputs = Dense(num_classes,
                    activation='softmax',
                    kernel_initializer='he_normal')(y)
    
    model = Model(inputs=inputs, outputs=outputs)
    model.summary()
    model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=lr_schedule(0)),
                  metrics=['accuracy'])
    return model


# 0.88
def init_model():
    """
    定义CNN模型结构
    :return:
    """
    _model = Sequential()
    _model.add(Conv2D(32, (3, 3), padding='same', input_shape=train_data.shape[1:], activation='relu'))
    _model.add(Conv2D(32, (3, 3), activation='relu'))
    _model.add(BatchNormalization())
    _model.add(MaxPooling2D(pool_size=(2, 2)))
    _model.add(Dropout(0.25))
    
    _model.add(Conv2D(64, (3, 3), padding='same', activation='relu'))
    _model.add(Conv2D(64, (3, 3), activation='relu'))
    _model.add(BatchNormalization())
    _model.add(MaxPooling2D(pool_size=(2, 2)))
    _model.add(Dropout(0.25))
    
    _model.add(Conv2D(128, (3, 3), padding='same', activation='relu'))
    _model.add(Conv2D(128, (3, 3), activation='relu'))
    _model.add(BatchNormalization())
    _model.add(MaxPooling2D(pool_size=(2, 2)))
    _model.add(Dropout(0.25))
    
    _model.add(Flatten())
    _model.add(Dense(512, activation='relu'))
    _model.add(Dropout(0.5))
    
    _model.add(Dense(categories_len, activation='softmax'))
    
    _model.summary()
    
    _model.compile(loss='categorical_crossentropy', optimizer=keras.optimizers.RMSprop(lr=0.0001, decay=1e-6),
                   metrics=['accuracy'])
    return _model


def train_with_augmentation():
    datagen = ImageDataGenerator(
        width_shift_range=0.1,
        height_shift_range=0.1,
        shear_range=0.,
        zoom_range=0.,
        fill_mode='nearest',
        horizontal_flip=True,
        vertical_flip=False,
        rescale=None,
        validation_split=0.0
    )
    datagen.fit(train_data)
    
    _model = resnet_v1(input_shape=train_data.shape[1:], depth=20)
    if os.path.exists(_model_path):
        _model.load_weights(filepath=_model_path)
    
    lr_scheduler = LearningRateScheduler(lr_schedule)
    
    lr_reducer = ReduceLROnPlateau(factor=np.sqrt(0.1),
                                   cooldown=0,
                                   patience=5,
                                   min_lr=0.5e-6)
    _model.fit_generator(datagen.flow(train_data, train_label_one_hot, batch_size=batch_size), epochs=epochs,
                         validation_data=(test_data, test_label_one_hot),
                         callbacks=[tensorboard_callback, model_checkpoint_callback, lr_reducer, lr_scheduler],
                         workers=4)
    
    os.makedirs(_model_dir, exist_ok=True)
    _model.save_weights(_model_path)
    return _model.evaluate(test_data, test_label_one_hot, verbose=0)


def load_model_weight(model_path=_model_path):
    """
    加载模型权重
    :param model_path:
    :return:
    """
    _model = resnet_v1(input_shape=train_data.shape[1:], depth=20)
    _model.load_weights(model_path)
    return _model


if __name__ == '__main__':
    score = train_with_augmentation()
    print(score)
