import argparse

from lightning.pytorch import Trainer, seed_everything
from lightning.pytorch.loggers import TensorBoardLogger
from lightning.pytorch.callbacks import LearningRateMonitor, ModelCheckpoint

from src.utils import MetPolArgs, get_dataloader_and_statics
from src.modules import MetPolModule


def main(args):
    seed_everything(args.seed, workers=True)
    
    train_loader, statics = get_dataloader_and_statics('train', args)
    val_loader, _ = get_dataloader_and_statics('val', args)

    model = MetPolModule(args, statics)
    
    logger = TensorBoardLogger(save_dir="tb_logs", name="Train")
    
    lr_monitor = LearningRateMonitor(logging_interval="step")
    
    checkpoint_cb = ModelCheckpoint(
        monitor="val_loss",
        save_top_k=10,
        mode='min',
        filename="{epoch:03d}-{val_loss:.4f}",
        save_last=True
    )
    
    trainer = Trainer(
        accelerator='gpu',
        log_every_n_steps=10,
        logger=logger,
        callbacks=[lr_monitor, checkpoint_cb]
    )

    
    trainer.fit(model, train_loader, val_loader)


if __name__ == "__main__":
    
    args = MetPolArgs()

    parser = argparse.ArgumentParser(description="Training script with conditional update")
    parser.add_argument("--lead_time", type=int, default=None, help="set lead_time")
    parsed_args = parser.parse_args()

    for key, value in vars(parsed_args).items():
        if value is not None:
            setattr(args, key, value)
    print(args.lead_time)
    main(args)
