import argparse
import gc
import os

import MinkowskiEngine as ME
import pytorch_lightning as pl
from pytorch_lightning.callbacks import ModelCheckpoint
from pytorch_lightning.strategies import DDPStrategy

from downstream.dataloader_kitti import make_data_loader as make_data_loader_kitti
from downstream.dataloader_nuscenes import make_data_loader as make_data_loader_nuscenes
from downstream.evaluate import evaluate
from downstream.lightning_datamodule import DownstreamDataModule
from downstream.lightning_trainer import LightningDownstream
from downstream.model_builder import make_model
from utils.read_config import generate_config


def main():
    """
    Code for launching the downstream training
    """
    parser = argparse.ArgumentParser(description="arg parser")
    parser.add_argument(
        "--cfg_file",
        type=str,
        default="config/semseg_nuscenes.yaml",
        help="specify the config for training",
    )
    parser.add_argument(
        "--resume_path",
        type=str,
        default=None,
        help="provide a path to resume an incomplete training",
    )
    parser.add_argument(
        "--pretraining_path",
        type=str,
        default=None,
        help="provide a path to pre-trained weights",
    )
    parser.add_argument(
        "--dataset_skip_step",
        type=int,
        default=100,
        help="provide a path to pre-trained weights",
    )
    args = parser.parse_args()
    config = generate_config(args.cfg_file)
    if args.resume_path:
        config["resume_path"] = args.resume_path
    if args.pretraining_path:
        config["pretraining_path"] = args.pretraining_path

    # config['pretraining_path'] = "output/distil_12_MSE_KL_segnet/040423-0903/lightning_logs/version_0/checkpoints/lidarmodel-epoch= 39-m_IoU= 0.6172.ckpt"
    config["pretraining_path"] = (
        "output/semseg/nuscenes_da/1/lightning_logs/version_0/checkpoints/downstream-epoch= 89-m_IoU= 0.7696.ckpt"
    )

    config["dataset_skip_step"] = args.dataset_skip_step
    config["dataset_skip_step"] = 1

    if os.environ.get("LOCAL_RANK", 0) == 0:
        print(
            "\n" + "\n".join(list(map(lambda x: f"{x[0]:20}: {x[1]}", config.items())))
        )

    dm = DownstreamDataModule(config)
    model_points, model_classifier = make_model(config, config["pretraining_path"])
    if config["num_gpus"] > 1:
        model_points = ME.MinkowskiSyncBatchNorm.convert_sync_batchnorm(model_points)
    module = LightningDownstream(model_points, model_classifier, config)
    path = os.path.join(config["working_dir"], str(config["dataset_skip_step"]))
    trainer = pl.Trainer(
        devices=config["num_gpus"],
        accelerator="gpu",
        default_root_dir=path,
        enable_checkpointing=True,
        max_epochs=config["num_epochs"],
        strategy=DDPStrategy(find_unused_parameters=False),
        num_sanity_val_steps=2,
        check_val_every_n_epoch=2,
        callbacks=[
            ModelCheckpoint(
                monitor="m_IoU",
                save_last=True,
                save_top_k=2,
                mode="max",
                filename="downstream-{epoch: d}-{m_IoU: .4f}",
            )
        ],
    )
    print("Starting the training")
    trainer.fit(module, dm, ckpt_path=config["resume_path"])

    print("Training finished, now evaluating the results")
    del trainer
    del dm
    del module
    gc.collect()
    if config["dataset"].lower() == "nuscenes":
        phase = (
            "verifying"
            if config["training"] in ("parametrize", "parametrizing")
            else "val"
        )
        val_dataloader = make_data_loader_nuscenes(
            config, phase, num_threads=config["num_threads"]
        )
    elif config["dataset"].lower() == "kitti":
        val_dataloader = make_data_loader_kitti(
            config, "val", num_threads=config["num_threads"]
        )
    evaluate(model_points.to(0), model_classifier.to(0), val_dataloader, config)


if __name__ == "__main__":
    main()
