from abc import ABC, abstractmethod
import json
import os
import time

from torch.utils import SummaryWriter

from ..runner import round_dict_value


class SubLogger(ABC):
    """
    manager的日志记录器
    """

    def __init__(self, log_dir):
        self.log_dir = log_dir

    @abstractmethod
    def log_train(self, loss: dict, idx_batch: int):
        """记录训练日志

        :param loss
            损失字典，key为损失名称，str型变量；value为损失数值，数值类型可能为None,float,Tensor
        :param idx_batch
            批序号
        """
        raise NotImplementedError

    @abstractmethod
    def log_val(self, loss: dict, metrics: dict, idx_epoch: int):
        """记录评估日志，评估过程中会产生损失字典和指标字典

        :param loss
            损失字典，key为损失名称，str型变量；value为损失数值，数值类型可能为None,float,Tensor
        :param metrics
            指标字典，key为指标名称，str型变量；value为指标数值
        :param idx_epoch
            代序号
        """
        raise NotImplementedError

    @abstractmethod
    def log_info(self, info: dict, idx_epoch: int):
        """记录学习率

        :param info
            学习率和稀疏化率构成的字典
        :param idx_epoch
            代序号
        """
        raise NotImplementedError


class TBLogger(SubLogger):
    """
    记录tensorboard日志，日志信息可以通过tensorboard --logdir=... --port=... --bind_all查看
    """

    KW_INFO = "info/"
    KW_TRAIN_LOSS = "train/loss/"
    KW_VAL_LOSS = "val/loss/"
    KW_VAL_METRIC = "val/metric/"

    def __init__(self, log_dir):
        super().__init__(log_dir)
        self.summary_writer = SummaryWriter(log_dir)

    def log(self, metrics, global_step, name_space):
        if metrics is None:
            return
        for k, v in metrics.items():
            if isinstance(v, Tensor):
                v = v.detach().cpu().item()
            if v is None:
                continue
            self.summary_writer.add_scalar(name_space + k, v, global_step)

    def log_train(self, loss, idx_batch):
        self.log(loss, idx_batch, self.KW_TRAIN_LOSS)

    def log_val(self, loss, metrics, idx_epoch):
        self.log(metrics, idx_epoch, self.KW_VAL_METRIC)
        self.log(loss, idx_epoch, self.KW_VAL_LOSS)

    def log_info(self, info, idx_epoch):
        self.log(info, idx_epoch, self.KW_INFO)

    def __del__(self):
        self.summary_writer.close()


class TXTLogger(SubLogger):
    """
    记录text日志，文件类型为*.txt，可通过tail命令或者下载的方式查看
    """

    KW_BATCH = "batch"
    KW_EPOCH = "epoch"

    def __init__(self, log_dir, period=50, n_digit=4):
        super().__init__(log_dir)
        os.makedirs(log_dir, exist_ok=True)
        self.log_file = open(os.path.join(log_dir, str(time.time()).split(".")[0] + ".log"), "w")
        self.date_fmt = "-----%y/%m/%d %H:%M:%S-----"
        self.countdown = self.period = period
        self.n_digit = n_digit

    def set_period(self, period):
        self.countdown = self.period = period

    def log(self, info, global_step, name_space, apply_round=True):
        info[name_space] = global_step
        if apply_round:
            info = round_dict_value(info, self.n_digit)
        print(
            time.strftime(self.date_fmt, time.localtime()),
            json.dumps(info, indent=4),
            file=self.log_file,
            flush=True,
        )

    def log_train(self, loss, idx_batch):
        self.countdown -= 1
        if loss is None or self.countdown > 0:
            return
        self.log(loss, idx_batch, self.KW_BATCH)
        self.countdown = self.period

    def log_val(self, loss, metrics, idx_epoch):
        self.log(dict(loss=loss, metrics=metrics), idx_epoch, self.KW_EPOCH)

    def log_info(self, info, idx_epoch):
        self.log(info, idx_epoch, self.KW_EPOCH, apply_round=False)

    def __del__(self):
        self.log_file.close()
