from keras.callbacks import ModelCheckpoint
from tensorflow.keras.callbacks import TensorBoard
from tensorflow.keras.layers import *
from tensorflow.keras.models import *
from tensorflow.keras.optimizers import *
from tensorflow.keras.callbacks import EarlyStopping, CSVLogger, ModelCheckpoint
from tensorflow.keras.optimizers import *
from tensorflow.keras import regularizers, activations, losses

import os

from utils.coding import swap_axis

import data_pre.datasets as datasets
from data_pre.data import create_validate_code
from data_pre.datasets import generate_img


def get_model():
    data_input = Input(shape=(30, 120, 1))

    x = data_input
    x = BatchNormalization()(x)
    for i, n_cnn in enumerate([2, 2, 2]):
        for j in range(n_cnn):
            x = Conv2D(32, kernel_size=3, activation='relu', padding='same')(x)
            x = BatchNormalization()(x)
            x = Activation('relu')(x)
        x = MaxPooling2D(pool_size=2, padding='same')(x)

    content = Flatten()(x)

    c1 = Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.01))(content)
    c1 = Dropout(0.2)(c1)
    c1 = Dense(52, activation='softmax', name='c1')(c1)

    c2 = Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.01))(content)
    c2 = Dropout(0.2)(c2)
    c2 = Dense(52, activation='softmax', name='c2')(c2)

    c3 = Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.01))(content)
    c3 = Dropout(0.2)(c3)
    c3 = Dense(52, activation='softmax', name='c3')(c3)

    c4 = Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.01))(content)
    c4 = Dropout(0.2)(c4)
    c4 = Dense(52, activation='softmax', name='c4')(c4)

    out = [c1, c2, c3, c4]

    model = Model(inputs=data_input, outputs=out)

    model.compile(loss=losses.categorical_crossentropy,
                  optimizer=Adam(0.01, amsgrad=True),
                  metrics=['accuracy'])

    model.summary()
    return model


def solve():
    x_train, y_train, x_test, y_test = datasets.read_data()

    callbacks = [CSVLogger('cnn.csv'), ModelCheckpoint('cnn_best_bak.h5', save_best_only=True, verbose=1)]

    model = get_model()
    model.fit(x_train, [item for item in swap_axis(y_train)],
              batch_size=256, shuffle=True,
              validation_data=(x_test, [item for item in swap_axis(y_test)]),
              epochs=200,
              callbacks=callbacks)


if __name__ == '__main__':
    # generate_img(20000)
    datasets.make_h5()
    solve()

