from keras.models import Sequential
from keras.layers import Conv3D, BatchNormalization, Activation, MaxPooling3D, SpatialDropout3D, Flatten, Dense, Dropout
from keras.regularizers import l2
from keras.optimizers import Adadelta
from keras.callbacks import Callback, ModelCheckpoint
import numpy as np
from train_config import get_config
import logging
import os
import time
import datetime
from easy_io import write_pkl_file
import global_config


def build_model(input_shape, base_nb_filters, weight_decay):
    model = Sequential()
    model.add(Conv3D(base_nb_filters, 3, 3, 3, init='he_normal', border_mode='same', W_regularizer=l2(weight_decay),
                     input_shape=input_shape))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv3D(base_nb_filters, 3, 3, 3, init='he_normal', border_mode='same', W_regularizer=l2(weight_decay)))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2)))

    print(model.output_shape)

    model.add(SpatialDropout3D(0.25))
    model.add(Conv3D(base_nb_filters*2, 3, 3, 3, init='he_normal', border_mode='same', W_regularizer=l2(weight_decay)))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv3D(base_nb_filters*2, 3, 3, 3, init='he_normal', border_mode='same', W_regularizer=l2(weight_decay)))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2)))

    print(model.output_shape)

    model.add(SpatialDropout3D(0.25))
    model.add(Conv3D(base_nb_filters*4, 3, 3, 3, init='he_normal', border_mode='same', W_regularizer=l2(weight_decay)))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv3D(base_nb_filters*4, 3, 3, 3, init='he_normal', border_mode='same', W_regularizer=l2(weight_decay)))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv3D(base_nb_filters*4, 3, 3, 3, init='he_normal', border_mode='same', W_regularizer=l2(weight_decay)))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2)))

    print(model.output_shape)

    model.add(SpatialDropout3D(0.25))
    model.add(Conv3D(base_nb_filters*8, 3, 3, 3, init='he_normal', border_mode='same', W_regularizer=l2(weight_decay)))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv3D(base_nb_filters*8, 3, 3, 3, init='he_normal', border_mode='same', W_regularizer=l2(weight_decay)))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv3D(base_nb_filters*8, 3, 3, 3, init='he_normal', border_mode='same', W_regularizer=l2(weight_decay)))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2)))

    print(model.output_shape)

    model.add(Flatten())
    model.add(Dropout(0.5))
    model.add(Dense(base_nb_filters*16, init='he_normal', W_regularizer=l2(weight_decay)))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Dropout(0.5))
    model.add(Dense(base_nb_filters*16, init='he_normal', W_regularizer=l2(weight_decay)))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Dropout(0.5))
    model.add(Dense(2, init='he_normal', W_regularizer=l2(weight_decay)))
    model.add(Activation('softmax'))

    return model


class Statistician(Callback):
    def __init__(self, valid_generator, nb_valid_samples, statistical_function,
                 result_saveto, logging_str, logger=None):
        self._valid_generator = valid_generator
        self._nb_valid_samples = nb_valid_samples
        self._statistical_function = statistical_function
        self._result_saveto = result_saveto
        self._logging_str = logging_str
        if logger:
            self._print = logger.info
        else:
            self._print = print
        self._start_time = None
        super().__init__()

    def on_epoch_begin(self, epoch, logs=None):
        self._start_time = time.time()

    def on_epoch_end(self, epoch, logs=None):
        probs = self.model.predict_generator(self._valid_generator, self._nb_valid_samples)
        write_pkl_file(self._result_saveto.format(epoch=epoch), probs)
        statistics = self._statistical_function(probs)
        self._print(self._logging_str.format(epoch=epoch, now=datetime.datetime.today(),
                                             time=int(time.time()-self._start_time), **statistics, **logs))


class WarmPrinter(Callback):
    def __init__(self):
        self._notice = True
        super().__init__()

    def on_train_begin(self, logs=None):
        print('Process begin...')

    def on_batch_end(self, batch, logs=None):
        if self._notice:
            print('Batch {} finished...'.format(batch))
            self._notice = False


def train_model(name, valid_folds, batchsize, crop_shape, shift_range, lr, nb_epoches, seed, **kwargs):
    args = locals()
    args.update(args.pop('kwargs'))

    logger = logging.getLogger('')
    logger.setLevel(level=logging.DEBUG)
    console = logging.StreamHandler()
    console.setLevel(level=logging.INFO)
    os.makedirs(global_config.log_folder, exist_ok=True)
    file = logging.FileHandler(os.path.join(global_config.log_folder, name + '.log'))
    file.setLevel(level=logging.INFO)

    formatter = logging.Formatter(fmt='%(message)s')
    console.setFormatter(formatter)
    file.setFormatter(formatter)

    logger.addHandler(console)
    logger.addHandler(file)

    logger.info('')
    logger.info('*' * 20)
    for k in sorted(args.keys()):
        logger.info('{k}: {v}'.format(k=k, v=args[k]))
    logger.info('*' * 20)
    logger.info('')

    config_dict = get_config(valid_folds, batchsize, crop_shape, shift_range, seed, logger)
    train_generator = config_dict['train_generator']
    nb_train_samples = config_dict['nb_train_samples']
    valid_generator = config_dict['valid_generator']
    nb_valid_samples = config_dict['nb_valid_samples']
    statistical_function = config_dict['statistical_function']
    logging_str = config_dict['logging_str']

    np.random.seed(seed)

    kwargs['input_shape'] = config_dict['input_shape']
    model = build_model(**kwargs)
    model.compile(
        optimizer=Adadelta(lr=lr),
        loss='categorical_crossentropy',
        metrics=['accuracy']
    )

    os.makedirs(os.path.join(global_config.model_folder, name), exist_ok=True)
    model_saveto = os.path.join(global_config.model_folder, name, 'epoch-{epoch:04d}.hdf5')
    os.makedirs(os.path.join(global_config.result_folder, name), exist_ok=True)
    result_saveto = os.path.join(global_config.result_folder, name, 'epoch-{epoch:04d}.pkl')
    try:
        model.fit_generator(
            generator=train_generator,
            samples_per_epoch=nb_train_samples,
            nb_epoch=nb_epoches,
            verbose=1,
            callbacks=[
                ModelCheckpoint(model_saveto),
                Statistician(valid_generator, nb_valid_samples, statistical_function,
                             result_saveto, logging_str, logger),
                WarmPrinter(),
            ],
        )
    except KeyboardInterrupt:
        pass
