from __future__ import absolute_import
from __future__ import print_function

import importlib
import numpy as np
import os
import shutil
import sys
from distutils import dir_util
from sklearn.cross_validation import KFold

from dataset import load
from utils import data_utils, submission_utils
import utils.plotly_utils as plot
import utils.s3_utils as cloud

INIT_PY = '__init__.py'
MODEL_PY = 'model.py'
PREPROCESS_PY = 'preprocess.py'
POSTPROCESS_PY = 'postprocess.py'
CUSTOM_PY = 'custom.py'
TRAINING_PY = 'training.py'
README_MD = 'README.md'
MODEL_JSON = 'model.json'
TRAINING_JSON = 'training.json'
MODEL_SYS_W = 'weights_systole.hdf5'
MODEL_DIAS_W = 'weights_diastole.hdf5'
SUBMISSION_CSV = 'submission.csv'
META_DIR = 'metadata'
EXPERIMENTS_DIR = 'experiments'

visualize = True


def create(experiment_name):
    experiment_path = os.path.join(EXPERIMENTS_DIR, experiment_name).replace('\\', '/')
    os.mkdir(experiment_path)
    py_files_create = [INIT_PY]
    py_files_copy = [MODEL_PY, PREPROCESS_PY, POSTPROCESS_PY, CUSTOM_PY, TRAINING_PY, README_MD]
    for py_file in py_files_create:
        f = open(experiment_path + '/' + py_file, 'wb')
        f.close()
    for py_file in py_files_copy:
        shutil.copyfile('experiment_template/' + py_file, experiment_path + '/' + py_file)
    os.mkdir(experiment_path + '/' + META_DIR)
    print('Experiment \'{0}\' created.'.format(experiment_name))

def learning_rate_changer(per_epoch_number, current_epoch, learning_rate, learning_multiplier):
    '''Changes the learning rate during training
        Attributes:
        per_epoch_number: Number of epochs between changes
        learning_multiplier: The parameter with which we decrease the learning rate e.g. learning_multiplier=0.1 then
        new_learning_rate=old_learning_rate*0.1
    '''
    if current_epoch==0:
        return learning_rate
    elif current_epoch%75==0:
        return learning_rate*learning_multiplier

    return learning_rate

def run(experiment_name, cont=False, submission_checkpoint=True, learning_rate_decay=True):
    print('Running experiment \'{0}\''.format(experiment_name))

    if visualize:
        print("Building graphs")
        plot.build_loss_val_graph(experiment_name + "/loss_graph")
        plot.build_crps_graph(experiment_name + "/crps_graph")

    experiment_path = os.path.join(EXPERIMENTS_DIR, experiment_name).replace('\\', '/')
    # load model
    print('Loading models...')
    model_module = import_module(experiment_path, MODEL_PY)
    model_systole = model_module.get_model()
    model_diastole = model_module.get_model()
    # save model to JSON
    if not cont:
        model_json = model_systole.to_json(indent=4)
        f = open(experiment_path + '/' + META_DIR + '/' + MODEL_JSON, 'wb')
        f.write(model_json)
        f.close()
        print('Saved model to model.json')
    # get training metadata
    training_module = import_module(experiment_path, TRAINING_PY)
    training_metadata = training_module.get_training_metadata(cont)
    if not cont:
        training_metadata.save_to_json(experiment_path + '/' + META_DIR + '/' + TRAINING_JSON)
        print('Saved training metadata to training.json')
    # compile model
    print('Compiling models...')
    model_systole.compile(optimizer=training_metadata.optimizer, loss=training_metadata.loss)
    model_diastole.compile(optimizer=training_metadata.optimizer, loss=training_metadata.loss)
    # load weights (if continue experiment)
    if cont:
        model_systole.load_weights(experiment_path + '/' + META_DIR + '/' + MODEL_SYS_W)
        model_diastole.load_weights(experiment_path + '/' + META_DIR + '/' + MODEL_DIAS_W)
    # import pre-process module
    preprocess_module = import_module(experiment_path, PREPROCESS_PY)
    # import post-process module
    postprocess_module = import_module(experiment_path, POSTPROCESS_PY)

    # load data
    print('Loading data...')
    # X - input
    # y - output
    # m - metadata (for input)
    X, y, m = load.load_train_data(shuffle=True)

    # pre-process data before training
    print('Pre-processing data before training...')
    X, y = preprocess_module.pre_train(X, y)

    # split data
    X_train, y_train, X_test, y_test, m_train, m_test = load.split_data(X, y, m, split_ratio=0.2)
    kfold_crossvalidation = KFold(n=len(X_train), n_folds=5)
    systole_val_loss_min = sys.float_info.max
    diastole_val_loss_min = sys.float_info.max

    print('Model type {0}'.format(type(model_systole).__name__))
    graph = type(model_systole).__name__ == 'Graph'  # True if model is Graph

    # train
    #
    print('-' * 50)
    print('Training')
    print('-' * 50)
    for i in range(training_metadata.nb_iter+1):
        print('-' * 50)
        print('Iteration {0}'.format(i))
        print('-' * 50)

        # pre-process data per epoch
        print('Pre-processing data per epoch...')
        X_train_e = preprocess_module.per_epoch(X_train)
        if learning_rate_decay:
            lr = learning_rate_changer(75, i, model_diastole.optimizer.lr.get_lr(), 0.1)
            model_systole.optimizer.lr.set_value(lr)
            model_diastole.optimizer.lr.set_value(lr)
            print("Learning rate: "+str(lr))


        # fit
        if graph:
            print('Fitting systole model...')
            hist_systole = model_systole.fit({'conv_input': X_train_e, 'meta_input': m_train, 'output': y_train[:, 0]},
                                             shuffle=training_metadata.shuffle, verbose=1,
                                             validation_data={'conv_input': X_test, 'meta_input': m_test,
                                                              'output': y_test[:, 0]},
                                             batch_size=training_metadata.batch_size,
                                             nb_epoch=training_metadata.nb_epoch)
            print('Fitting diastole model...')
            hist_diastole = model_diastole.fit({'conv_input': X_train_e, 'meta_input': m_train, 'output': y_train[:, 1]},
                                               shuffle=training_metadata.shuffle, verbose=1,
                                               validation_data={'conv_input': X_test, 'meta_input': m_test,
                                                                'output': y_test[:, 1]},
                                               batch_size=training_metadata.batch_size,
                                               nb_epoch=training_metadata.nb_epoch)
        else:
            print('Fitting systole model...')
            hist_systole = model_systole.fit(X_train_e, y_train[:, 0], shuffle=training_metadata.shuffle, verbose=1,
                                             validation_data=(X_test, y_test[:, 0]),
                                             batch_size=training_metadata.batch_size,
                                             nb_epoch=training_metadata.nb_epoch)
            print('Fitting diastole model...')
            hist_diastole = model_diastole.fit(X_train_e, y_train[:, 1], shuffle=training_metadata.shuffle, verbose=1,
                                               validation_data=(X_test, y_test[:, 1]),
                                               batch_size=training_metadata.batch_size,
                                               nb_epoch=training_metadata.nb_epoch)

        # sigmas for predicted data, actually loss function values (RMSE)
        loss_systole = hist_systole.history['loss'][-1]
        loss_diastole = hist_diastole.history['loss'][-1]
        val_loss_systole = hist_systole.history['val_loss'][-1]
        val_loss_diastole = hist_diastole.history['val_loss'][-1]

        print('Evaluating CRPS...')
        X_train_input = X_train_e
        X_test_input = X_test
        if graph:
            X_train_input = {'conv_input': X_train_e, 'meta_input': m_train}
            X_test_input = {'conv_input': X_test, 'meta_input': m_test}

        # evaluate CRPS
        systole_pred = model_systole.predict(X_train_input, batch_size=training_metadata.batch_size, verbose=1)
        diastole_pred = model_diastole.predict(X_train_input, batch_size=training_metadata.batch_size, verbose=1)
        val_systole_pred = model_systole.predict(X_test_input, batch_size=training_metadata.batch_size, verbose=1)
        val_diastole_pred = model_diastole.predict(X_test_input, batch_size=training_metadata.batch_size, verbose=1)

        y_train_cdf = postprocess_module.to_cdf(np.concatenate((y_train[:, 0], y_train[:, 1])))
        y_test_cdf = postprocess_module.to_cdf(np.concatenate((y_test[:, 0], y_test[:, 1])))

        if graph:
            systole_pred = systole_pred['output']
            val_systole_pred = val_systole_pred['output']
            diastole_pred = diastole_pred['output']
            val_diastole_pred = val_diastole_pred['output']

        sys_cdf = postprocess_module.to_cdf(systole_pred, loss_systole)
        sys_val_cdf = postprocess_module.to_cdf(val_systole_pred, val_loss_systole)
        dias_cdf = postprocess_module.to_cdf(diastole_pred, loss_diastole)
        dias_val_cdf = postprocess_module.to_cdf(val_diastole_pred, val_loss_diastole)

        crps = data_utils.crps(y_train_cdf, np.concatenate((sys_cdf, dias_cdf)))
        print('CRPS={0}'.format(crps))
        val_crps = data_utils.crps(y_test_cdf, np.concatenate((sys_val_cdf, dias_val_cdf)))
        print('VAL_CRPS={0}'.format(val_crps))

        # update training performance log and save to training.json
        print('Updating performance log...')
        training_metadata.update_performance(hist_systole.history, hist_diastole.history,
                                             [crps.tolist()], [val_crps.tolist()])
        training_metadata.save_to_json(experiment_path + '/' + META_DIR + '/' + TRAINING_JSON)

        # update charts
        if visualize:
            print("Updating graphs")
            plot.update_loss_val_graph(name=experiment_name + '/loss_graph', X=i,
                                       y_diastole_val_loss=val_loss_diastole, y_systole_val_loss=val_loss_systole,
                                       y_diastole_train_loss=loss_diastole, y_systole_train_loss=loss_systole)
            plot.update_crps_graph(name=experiment_name + '/crps_graph', X=i, y_crps=crps, y_val_crps=val_crps)

        print('Saving weights...')
        model_systole.save_weights(experiment_path + '/' + META_DIR + '/' + MODEL_SYS_W, overwrite=True)
        model_diastole.save_weights(experiment_path + '/' + META_DIR + '/' + MODEL_DIAS_W, overwrite=True)
        submitted = False
        # update best val_loss weights
        if hist_systole.history['val_loss'][-1] < systole_val_loss_min:
            systole_val_loss_min = hist_systole.history['val_loss'][-1]
            model_systole.save_weights(experiment_path + '/' + META_DIR + '/best_' + MODEL_SYS_W, overwrite=True)
            if graph and i >= 20 and submission_checkpoint and not submitted:
                # temporary way of getting submissions without loading
                submission(experiment_name, model_diastole, model_systole)
                submitted = True

        if hist_diastole.history['val_loss'][-1] < diastole_val_loss_min:
            diastole_val_loss_min = hist_diastole.history['val_loss'][-1]
            model_diastole.save_weights(experiment_path + '/' + META_DIR + '/best_' + MODEL_DIAS_W, overwrite=True)
            if graph and i >= 20 and submission_checkpoint and not submitted:
                # temporary way of getting submissions without loading
                submission(experiment_name, model_diastole, model_systole)
                submitted = True

        cloud.sync_experiments_upload()


def submission(experiment_name, model_diastole=None, model_systole=None):
    print('Creating submission for experiment \'{0}\''.format(experiment_name))
    experiment_path = os.path.join(EXPERIMENTS_DIR, experiment_name).replace('\\', '/')

    print('Loading models...')
    model_module = import_module(experiment_path, MODEL_PY)
    training_module = import_module(experiment_path, TRAINING_PY)
    training_metadata = training_module.get_training_metadata(True)
    if model_diastole is None or model_systole is None:
        model_systole = model_module.get_model()
        model_diastole = model_module.get_model()
        # load weights
        print('Loading weights...')
        model_systole.load_weights(experiment_path + '/' + META_DIR + '/best_' + MODEL_SYS_W)
        model_diastole.load_weights(experiment_path + '/' + META_DIR + '/best_' + MODEL_DIAS_W)
        print('Compiling models...')
        model_systole.compile(optimizer=training_metadata.optimizer, loss=training_metadata.loss)
        model_diastole.compile(optimizer=training_metadata.optimizer, loss=training_metadata.loss)

    # import pre-process module
    preprocess_module = import_module(experiment_path, PREPROCESS_PY)

    # import post-process module
    postprocess_module = import_module(experiment_path, POSTPROCESS_PY)

    graph = type(model_systole).__name__ == 'Graph'  # True if model is Graph

    ids, X, m = load.load_validation_data()

    # pre-process data
    print('Pre-processing data...')
    X, y = preprocess_module.pre_train(X)

    X_input = X
    if graph:
        X_input = {'conv_input': X, 'meta_input': m}

    # predict
    print('Predicting data...')
    systole_pred = model_systole.predict(X_input, batch_size=training_metadata.batch_size, verbose=1)
    diastole_pred = model_diastole.predict(X_input, batch_size=training_metadata.batch_size, verbose=1)

    if graph:
        systole_pred = systole_pred['output']
        diastole_pred = diastole_pred['output']

    systole_loss = np.min(training_metadata.performance['systole']['val_loss'])
    diastole_loss = np.min(training_metadata.performance['diastole']['val_loss'])

    systole_pred_cdf = postprocess_module.to_cdf(systole_pred, systole_loss)
    diastole_pred_cdf = postprocess_module.to_cdf(diastole_pred, diastole_loss)

    systole_sub = submission_utils.accumulate_study_results(ids, systole_pred_cdf)
    diastole_sub = submission_utils.accumulate_study_results(ids, diastole_pred_cdf)

    # write to submission file
    print('Writing to file {0}...'.format(SUBMISSION_CSV))
    submission_utils.create_submission(systole_sub, diastole_sub,
                                       experiment_path + '/' + META_DIR + '/' + SUBMISSION_CSV)
    print('Done')


def copy(existing_experiment_name, new_experiment_name):
    print('Copying existing experiment \'{0}\' to \'{1}\''.format(existing_experiment_name, new_experiment_name))
    existing_experiment_path = os.path.join(EXPERIMENTS_DIR, existing_experiment_name).replace('\\', '/')
    new_experiment_path = os.path.join(EXPERIMENTS_DIR, new_experiment_name).replace('\\', '/')

    dir_util.copy_tree(existing_experiment_path, new_experiment_path)
    for f in os.listdir(new_experiment_path):
        if f.endswith('.pyc'):
            os.remove(os.path.join(new_experiment_path, f))

    meta_dir = os.path.join(new_experiment_path, META_DIR)
    for f in os.listdir(meta_dir):
        os.remove(os.path.join(meta_dir, f))


def import_module(experiment_name, file_name):
    return importlib.import_module(experiment_name.replace('/', '.') + '.' + file_name.replace('.py', ''))

if __name__ == '__main__':

    if len(sys.argv) > 1:
        assert len(sys.argv) >= 3
        if sys.argv[1] == 'create':
            create(sys.argv[2])
        elif sys.argv[1] == 'run':
            run(sys.argv[2])
        elif sys.argv[1] == 'continue':
            run(sys.argv[2], cont=True)
        elif sys.argv[1] == 'submission':
            submission(sys.argv[2])
        elif sys.argv[1] == 'copy':
            copy(sys.argv[2], sys.argv[3])
