import uuid
from datetime import datetime
from os.path import join

import numpy as np
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.models import Model, save_model
from tensorflow.keras.optimizers import Adam, Adagrad, RMSprop, SGD

from GCNREC_model import build_gcnrec_model
from callbacks import UpdateInnerGraph, CustomEarlyStop
from citeu_data import CiteuData
from database_utils import auto_insert_database
from metrics import metrics_function
from other_utils import function_timer, now_time, send_email, print_dict, mkdir


def experiment(**kwargs):
    whole_begin_time = datetime.now()

    # param
    sim_percentile = kwargs['sim_percentile']
    construct_graph_with_feature = kwargs['construct_graph_with_feature']
    emb_dim = kwargs['emb_dim']
    graph_dim = kwargs['graph_dim']
    bpr_loss_lambda = kwargs['bpr_loss_lambda']
    learner = kwargs['learner']
    learning_rate = kwargs['learning_rate']
    epochs = kwargs['epochs']
    update_patience = kwargs['update_patience']
    update_sim_percentile = kwargs['update_sim_percentile']
    early_stop_metrics = kwargs['early_stop_metrics']
    early_stop_patience = kwargs['early_stop_patience']
    early_stop_min_delta = kwargs['early_stop_min_delta']
    early_stop_loss_min_delta = kwargs['early_stop_loss_min_delta']
    exp_data = {}
    exp_data.update(kwargs)

    # load data
    data: CiteuData
    data, load_time = function_timer(CiteuData)(sim_percentile=sim_percentile,
                                                construct_graph_with_feature=construct_graph_with_feature,
                                                graph_in_dense=False)
    print('load data time:', load_time)

    # build model
    def building():
        _model = build_gcnrec_model(num_users=data.num_user,
                                    num_items=data.num_item,
                                    graph_uu=data.train_graph_uu,
                                    graph_ii=data.train_graph_ii,
                                    graph_ui=data.train_graph_ui,
                                    item_features=data.item_features,
                                    emb_dim=emb_dim, graph_dim=graph_dim, bpr_loss_lambda=bpr_loss_lambda)

        optimizer = {'adagrad': Adagrad(lr=learning_rate),
                     'rmsprop': RMSprop(lr=learning_rate),
                     'adam': Adam(lr=learning_rate),
                     'sgd': SGD(lr=learning_rate)}[learner.lower()]

        _model.compile(optimizer=optimizer)
        return _model

    model: Model
    model, build_time = function_timer(building)()
    print('build model time:', build_time)

    # training
    step = update_patience
    epochs = epochs // step

    custom_early_stop = CustomEarlyStop(train_mat=data.train_graph_ui,
                                        val_mat=data.val_graph_ui,
                                        metrics=early_stop_metrics,
                                        min_delta=early_stop_min_delta,
                                        loss_min_delta=early_stop_loss_min_delta,
                                        patience=early_stop_patience)

    early_stop = EarlyStopping(monitor='loss')
    update_graph = UpdateInnerGraph(update_time='epoch',
                                    patience=1,
                                    sim_percentile=update_sim_percentile)

    pos_g, neg_g = data.get_pos_neg_sample_generator()
    user = np.arange(0, data.num_user)[np.newaxis, :]
    item = np.arange(0, data.num_item)[np.newaxis, :]

    def generator():
        while True:
            pos = next(pos_g)
            neg = next(neg_g)
            pos = np.array(pos)[np.newaxis, :]
            neg = np.array(neg)[np.newaxis, :]
            yield [user, item, pos, neg], None

    g = generator()

    history, train_time = function_timer(model.fit)(g,
                                                    steps_per_epoch=step,
                                                    callbacks=[early_stop, update_graph],
                                                    epochs=epochs,
                                                    batch_size=1,
                                                    shuffle=False)
    print('training time:', train_time)
    if early_stop.stopped_epoch > 0:
        exp_data['stopped_epoch'] = early_stop.stopped_epoch

    # evaluating
    ks = [10, 20, 40, 60]
    metrics = ['precision', 'recall', 'map', 'ndcg']
    metrics_at_k = ['{}@{}'.format(m, k) for m in metrics for k in ks]
    evaluate = metrics_function(train_mat=data.train_graph_ui,
                                test_mat=data.test_graph_ui,
                                metrics=metrics_at_k)

    def evaluating():
        rating = model.predict([user, item])
        return evaluate(rating)

    evaluation_result, evaluate_time = function_timer(evaluating)()
    print('evaluate time:', evaluate_time)
    print('evaluation_result:')
    print(print_dict(evaluation_result))

    # report and store
    save_model_root = './Trained'
    date_now, date_now_str = now_time()
    model_name = 'gcnrec_[{},{}]_{}'.format(emb_dim, graph_dim, date_now_str)
    model_uuid = uuid.uuid5(uuid.NAMESPACE_X500, model_name)
    model_uuid = ''.join(str(model_uuid).split('-'))
    model_save_path = join(save_model_root, model_name, model_uuid + '.h5')
    mkdir(join(save_model_root, model_name))
    _, save_model_time = function_timer(save_model)(model, model_save_path, save_format='h5')

    exp_data['model_path'] = model_save_path
    exp_data['model_uuid'] = model_uuid

    exec_time = {'datetime': date_now,
                 'load_time': load_time,
                 'train_time': train_time,
                 'evaluate_time': evaluate_time}

    exp_data.update(evaluation_result)
    exp_data.update(exec_time)
    from sensitive_info import database_config, email_config
    _, save_data_time = function_timer(auto_insert_database)(database_config, exp_data, 'gcnrec_test')

    save_time = save_model_time + save_data_time
    print('save time:', save_time)

    whole_time = datetime.now() - whole_begin_time

    report = 'whole time: {}'.format(whole_time) + '\n'
    if 'stopped_epoch' in exp_data:
        report += 'early stopped!! stopped epoch: {}'.format(exp_data['stopped_epoch']) + '\n'
    report += '=' * 20 + '\n'
    report += print_dict(exec_time) + '\n' + 'save time : {}'.format(save_time) + '\n'
    report += '=' * 20 + '\n'
    report += print_dict(evaluation_result)
    print(report)

    send_email(receiver='haoran.x@outlook.com',
               title='GCNREC实验结束',
               text=report,
               **email_config)


if __name__ == '__main__':
    import tensorflow as tf

    gpu_list = tf.config.list_physical_devices(device_type='GPU')
    cpu_list = tf.config.list_physical_devices(device_type='CPU')
    gpu = gpu_list[0]
    tf.config.set_visible_devices([gpu] + cpu_list)
    tf.config.experimental.set_memory_growth(gpu, enable=True)
    exp_param = {'sim_percentile': 99,
                 'construct_graph_with_feature': False,
                 'emb_dim': 64,
                 'graph_dim': 32,
                 'bpr_loss_lambda': 1e-4,
                 'learner': 'adam',
                 'learning_rate': 0.001,
                 'epochs': 10,
                 'update_patience': 5,
                 'update_sim_percentile': 99,
                 'early_stop_metrics': ['pre@20', 're@20', 'map@20', 'ndcg@20'],
                 'early_stop_min_delta': 0.0,
                 'early_stop_loss_min_delta': 0.01,
                 'early_stop_patience': 100}
    experiment(**exp_param)
