import os
import shutil
from datetime import datetime
from glob import glob
from torch.utils.tensorboard import SummaryWriter


class TensorboardManager:

    def __init__(self, base_dir=".run_tensorboard", max_runs=20):
        self.base_dir = base_dir
        self.max_runs = max_runs
        self.writer = None
        self.log_dir = None

        self._create_log_dir()
        self._cleanup_old_logs()

        self.writer:SummaryWriter = self.get_writer()

        # self.launch_tensorboard()

    # def __del__(self):
    #     self.close()

    def _create_log_dir(self):
        # 获取当前的日期和时间，并格式化为字符串，比如：2025_06_18_17_30
        timestamp = datetime.now().strftime("%Y_%m_%d_%H_%M")
        # 创建文件夹，比如：.run_tensorboard/exp_2025_06_18_17_30
        self.log_dir = os.path.join(self.base_dir, f"exp_{timestamp}")
        os.makedirs(self.log_dir, exist_ok=True)
        print(f"[TensorBoard] current log directory: {self.log_dir}")

    def _cleanup_old_logs(self):
        # 按照这些目录的修改时间（mtime）从旧到新排序，排在前面的就是最早生成的日志目录
        all_dirs = sorted(
            # 获取 用 exp 开头的 所有目录
            glob(os.path.join(self.base_dir, "exp_*")),
            key=os.path.getmtime
        )
        if len(all_dirs) > self.max_runs:
            # len(all_dirs) - self.max_runs 是超出了多少个 dir
            dirs_to_remove = all_dirs[:len(all_dirs) - self.max_runs]
            # 循环删除这些目录
            for d in dirs_to_remove:
                print(f"[TensorBoard] auto delete old directory: {d}")
                # 彻底删除该目录及其所有内容
                shutil.rmtree(d)

    def get_writer(self):
        if self.writer is None:
            self.writer = SummaryWriter(log_dir=self.log_dir)
        return self.writer

    def write_epoch(self, epoch:int):
        if self.writer is not None:
            self.writer.add_text(
                tag="epoch info",
                text_string = f"Current Epoch: {epoch} >>>>>>>>>>>>>>>>>>>>>>>>>>> ",
                global_step=0,
            )
        else:
            print("Warning: self.writer(epoch) is None.")

    def write_batch(self, batch:int):
        if self.writer is not None:
            self.writer.add_text(
                tag="batch info",
                text_string = f"Current Batch: {batch} >>>>>>>>>>>>>>>>>>>>>>>>>>> ",
                global_step=0,
            )
        else:
            print("Warning: self.writer(batch) is None.")

    def write_train_loss(self, i: int, loss: float):
        if self.writer is not None:
            # print(f"train -> id:{i}, loss:{loss}")
            self.writer.add_scalar("TrainLoss", loss, i)
        else:
            print("Warning: self.writer(train loss) is None.")

    def write_eval_loss(self, i: int, loss: float):
        if self.writer is not None:
            # print(f"eval -> id:{i}, loss:{loss}")
            self.writer.add_scalar("EvalLoss", loss, i)
        else:
            print("Warning: self.writer(eval loss) is None.")

    def write_lr(self, i: int, lr: float):
        if self.writer is not None:
            # print(f"learning rate -> id:{i}, loss:{lr}")
            self.writer.add_scalar("LearningRate", lr, i)
        else:
            print("Warning: self.writer(lr) is None.")

    def save(self):
        if self.writer is not None:
            self.writer.flush()

    def close(self):
        if self.writer is not None:
            self.writer.flush()
            self.writer.close()

    def launch_tensorboard(self, port=6006):
        import subprocess
        subprocess.Popen(["tensorboard", "--logdir", self.base_dir, "--port", str(port)])
        print(f"[TensorBoard] start -> http://localhost:{port}")


if __name__ == "__main__":
    import argparse

    parser = argparse.ArgumentParser(description="Launch TensorBoard to view saved logs.")
    parser.add_argument("-f", "--folder", type=str, required=True,
                        help="Path to the log directory (e.g., .run_tensorboard)")

    args = parser.parse_args()

    if not os.path.exists(args.folder):
        print(f"[Error] The folder '{args.folder}' does not exist.")
        exit(1)

    # 启动 tensorboard
    print(f"[TensorBoard] Launching TensorBoard for log folder: {args.folder}")
    import subprocess
    subprocess.Popen(["tensorboard", "--logdir", args.folder, "--port", "6006"])
    print(f"[TensorBoard] Running at http://localhost:6006")

    # 保持进程不退出（否则 TensorBoard 会立即关闭）
    try:
        while True:
            pass
    except KeyboardInterrupt:
        print("\n[TensorBoard] KeyboardInterrupt received. Exiting.")
