from pathlib import Path

import torch
from lightning.pytorch.cli import LightningCLI, SaveConfigCallback, LightningArgumentParser
from lightning import Trainer, LightningModule
from lightning.pytorch.callbacks import EarlyStopping, ModelCheckpoint, LearningRateMonitor, LearningRateFinder
from lightning.pytorch.tuner import Tuner
from lightning.pytorch.profilers import AdvancedProfiler

from model.model_interface import MInterface
from data.data_interface import DInterface


class MyLightningCLI(LightningCLI):
    def add_arguments_to_parser(self, parser):
        parser.add_argument("--plots", action="store_true", dest="plots", help="Enable plotting during training")
        parser.add_argument("--ckpt_path", type=str, default=None, help="Path to the checkpoint file")
        parser.add_argument("--find_lr", action="store_true", help="Run learning rate finder before training.")
        parser.add_argument("--warmup_epochs", type=int, help="Set warmup epochs for the model.")


def cli_main():
    # 优化GPU内存使用
    torch.backends.cudnn.benchmark = True  # 加速卷积运算
    torch.backends.cudnn.deterministic = False  # 允许非确定性算法以提升性能
    torch.backends.cuda.matmul.allow_tf32 = True  # 允许TF32

    cli = MyLightningCLI(model_class=MInterface,
                         datamodule_class=DInterface,
                         save_config_kwargs={"overwrite": True, "config_filename": "experiment_config.yaml"},
                         trainer_class=Trainer,
                         trainer_defaults=dict(devices="auto",
                                               accelerator="auto",
                                               ),
                         run=False,
                         )

    if cli.config.plots:  # hack: 保存训练过程中的图片
        cli.model.save_plots_dir = Path(cli.trainer.log_dir) / "plots"
        cli.model.save_plots_dir.mkdir(parents=True, exist_ok=True)

    # Learning Rate Finder
    # Accessing find_lr from the general config space where custom CLI args land
    if cli.config.get("find_lr", False):
        print("--- Running Learning Rate Finder ---")
        tuner = Tuner(cli.trainer)
        # Assuming cli.model and cli.datamodule are already instantiated by LightningCLI
        lr_finder_result = tuner.lr_find(cli.model, datamodule=cli.datamodule)
        
        suggested_lr = lr_finder_result.suggestion()
        if suggested_lr is not None:
            print(f"--- LR Finder suggested learning rate: {suggested_lr} ---")
            cli.model.learning_rate = suggested_lr # Update learning rate in the model
            print(f"--- Model learning_rate updated to: {cli.model.learning_rate} ---")
        else:
            print("--- LR Finder could not suggest a learning rate. Using configured LR. ---")
    
    if not cli.config.get("find_lr", False) or lr_finder_result.suggestion() is None:
        # 如果配置了 optimizer.init_args.lr，则使用配置文件中的 learning_rate
        try:
            cli.model.learning_rate = cli.config.optimizer.init_args.lr
        except:
            pass   
    
    cli.trainer.fit(cli.model, datamodule=cli.datamodule, ckpt_path=cli.config.ckpt_path)


if __name__ == "__main__":
    cli_main()
