'''
Created on Mar 1, 2020
Pytorch Implementation of LightGCN in
Xiangnan He et al. LightGCN: Simplifying and Powering Graph Convolution Network for Recommendation

@author: Jianbai Ye (gusye@mail.ustc.edu.cn)
'''
import torch
from torch import nn, optim
import numpy as np
from torch import log
# from dataloader import BasicDataset
from time import time
# from models import LightGCN
# from models import PairWiseModel
from sklearn.metrics import roc_auc_score
import random
import os
# from Config import config, FILE_PATH, sampling
from Config import config

def cprint(words : str):
    print(f"\033[0;30;43m{words}\033[0m")


def set_seed(seed):
    np.random.seed(seed)
    if torch.cuda.is_available():
        torch.cuda.manual_seed(seed)
        torch.cuda.manual_seed_all(seed)
    torch.manual_seed(seed)



def shuffle(*arrays, **kwargs):

    require_indices = kwargs.get('indices', False)

    if len(set(len(x) for x in arrays)) != 1:
        raise ValueError('All inputs to shuffle must have '
                         'the same length.')

    shuffle_indices = np.arange(len(arrays[0]))
    np.random.shuffle(shuffle_indices)

    if len(arrays) == 1:
        result = arrays[0][shuffle_indices]
    else:
        result = tuple(x[shuffle_indices] for x in arrays)

    if require_indices:
        return result, shuffle_indices
    else:
        return result


class timer:
    """
    Time context manager for code block
        with timer():
            do something
        timer.get()
    """
    from time import time
    TAPE = [-1]  # global time record
    NAMED_TAPE = {}

    @staticmethod
    def get():
        if len(timer.TAPE) > 1:
            return timer.TAPE.pop()
        else:
            return -1

    @staticmethod
    def dict(select_keys=None):
        hint = "|"
        if select_keys is None:
            for key, value in timer.NAMED_TAPE.items():
                hint = hint + f"{key}:{value:.2f}s|"
        else:
            for key in select_keys:
                value = timer.NAMED_TAPE[key]
                hint = hint + f"{key}:{value:.2f}s|"
        return hint

    @staticmethod
    def zero(select_keys=None):
        if select_keys is None:
            for key, value in timer.NAMED_TAPE.items():
                timer.NAMED_TAPE[key] = 0
        else:
            for key in select_keys:
                timer.NAMED_TAPE[key] = 0

    def __init__(self, tape=None, **kwargs):
        if kwargs.get('name'):
            timer.NAMED_TAPE[kwargs['name']] = timer.NAMED_TAPE[
                kwargs['name']] if timer.NAMED_TAPE.get(kwargs['name']) else 0.
            self.named = kwargs['name']
            if kwargs.get("group"):
                #TODO: add group function
                pass
        else:
            self.named = False
            self.tape = tape or timer.TAPE

    def __enter__(self):
        self.start = timer.time()
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        if self.named:
            timer.NAMED_TAPE[self.named] += timer.time() - self.start
        else:
            self.tape.append(timer.time() - self.start)




def getLabel(ground_truth_list, pred_data):
    r = []
    for i in range(len(ground_truth_list)):
        groundTrue = ground_truth_list[i]
        predictTopK = pred_data[i]
        pred = list(map(lambda x: x in groundTrue, predictTopK))
        pred = np.array(pred).astype("float")
        r.append(pred)
    return np.array(r).astype('float')

# ====================end Metrics=============================
# =========================================================

def getFileName(config):
    if not os.path.exists(config["checkpoints_path"]):
        os.makedirs(config["checkpoints_path"], exist_ok=True)
    file_name = None
    sample_way = "random" if config["sample_way"] == "all_pos_random" else "decare"
    loss = config["loss"]
    if loss == "sauc_for_sample":
        loss = "sauc"
    if config["model"] == 'mf':
        file_name = f"mf-{config['dataset']}-{config['latent_dim_rec']}.pth.tar"
    elif config["model"] == 'lgn':
        file_name = f"lgn-{config['dataset']}-{sample_way}-{loss}-{config['lightGCN_n_layers']}-{config['latent_dim_rec']}.pth.tar"
    elif config["model"] == 'lgn_hash':
        file_name = f"lgn_hash-{config['dataset']}-{sample_way}-{loss}-{config['lightGCN_n_layers']}-{config['latent_dim_rec']}.pth.tar"
    else:
        file_name = f"othermodel-{config['dataset']}.pth.tar"
    return os.path.join(config["checkpoints_path"], file_name)

class EarlyStopping:
    '''
    Early stop the training if validation metric doesn't improve after
    a given patience
    from: https://github.com/shaheerzaman/Earlystopping_Pytorch/blob/master/pytool.py

    Example:
        # initialize the early_stopping object
        early_stopping = EarlyStopping(patience=5, verbose=True)
        for epoch in xxx:
            xxx
            # early_stopping needs the validation loss to check if it has decresed,
            # and if it has, it will make a checkpoint of the current model
            early_stopping(valid_loss, model)

            if early_stopping.early_stop:
                print("Early stopping")
                break
    '''

    def __init__(self, patience=7, verbose=False,
                 delta=0, path='checkpoint.pt'):
        self.patience = patience
        self.verbose = verbose
        self.counter = 0
        self.best_score = None
        self.early_stop = False
        self.val_loss_min = np.Inf
        self.delta = delta
        self.path = path

    def __call__(self, metric, model):
        score = metric

        if self.best_score is None:
            self.best_score = score
            self.save_checkpoint(model)
        elif score < self.best_score + self.delta:
            self.counter += 1
            print(f'EarlyStopping Counter: {self.counter} out of {self.patience}')
            if self.counter >= self.patience:
                self.early_stop = True
        else:
            self.best_score = score
            self.save_checkpoint(model)
            self.counter = 0

    def save_checkpoint(self, model):
        '''
        save model when validation loss decrease
        '''
        # if self.verbose:
        #     print(f'validation loss decrease ({self.val_loss_min:.6f})')
        # print("model_print", model)
        torch.save(model.state_dict(), self.path)
        # torch.save(model, self.path)
