import argparse

import torch

from downstream.dataloader_kitti import make_data_loader as make_data_loader_kitti
from downstream.dataloader_nuscenes import make_data_loader as make_data_loader_nuscenes
from downstream.evaluate import evaluate
from downstream.model_builder import make_model
from utils.read_config import generate_config


def main():
    """
    Code for launching the downstream evaluation
    """
    parser = argparse.ArgumentParser(description="arg parser")
    parser.add_argument(
        "--cfg_file",
        type=str,
        default="config/distil.yaml",
        help="specify the config for training",
    )
    parser.add_argument(
        "--resume_path",
        type=str,
        default=None,
        help="provide a path to resume an incomplete training",
    )
    parser.add_argument(
        "--dataset", type=str, default=None, help="Choose between nuScenes and KITTI"
    )
    args = parser.parse_args()
    # if args.cfg_file is None and args.dataset is not None:
    #     if args.dataset.lower() == "kitti":
    #         args.cfg_file = "config/semseg_kitti.yaml"
    #     elif args.dataset.lower() == "nuscenes":
    #         args.cfg_file = "config/semseg_nuscenes.yaml"
    #     else:
    #         raise Exception(f"Dataset not recognized: {args.dataset}")
    # elif args.cfg_file is None:
    #     args.cfg_file = "config/semseg_nuscenes.yaml"

    config = generate_config(args.cfg_file)
    if args.resume_path:
        config["resume_path"] = args.resume_path

    print("\n" + "\n".join(list(map(lambda x: f"{x[0]:20}: {x[1]}", config.items()))))
    print("Creating the loaders")
    if config["dataset"].lower() == "nuscenes":
        phase = (
            "verifying"
            if config["training"] in ("parametrize", "parametrizing")
            else "val"
        )
        val_dataloader = make_data_loader_nuscenes(
            config, phase, num_threads=config["num_threads"]
        )
    elif config["dataset"].lower() == "kitti":
        val_dataloader = make_data_loader_kitti(
            config, "val", num_threads=config["num_threads"]
        )
    else:
        raise Exception(f"Dataset not recognized: {args.dataset}")
    print("Creating the model")

    # config["pretraining_path"] = "output/distil_12_MSE_0.5_segnet/240323-1618/lightning_logs/version_0/checkpoints/lidarmodel-epoch= 46-m_IoU= 0.2829.ckpt"
    # config["pretraining_path"] = "output/minkunet_slidr_1gpu.pt"
    # config["pretraining_path"] = "/root/autodl-tmp/SLidR-2/output/slidr_with_da_3l_relu_with_final/nuscenes/100523-1559/lightning_logs/version_0/checkpoints/lidarmodel-epoch= 49-val_loss= 3.5542.ckpt"
    # config["pretraining_path"] = "output/distil_12_MSE_0.5_KL_0.05_segnet/040423-0903/lightning_logs/version_0/checkpoints/lidarmodel-epoch= 34-m_IoU= 0.6173.ckpt"
    config["pretraining_path"] = (
        "/root/autodl-tmp/SLidR-2/output/slidr_with_da_3l_relu_with_final/nuscenes/100523-1559/lightning_logs/version_0/checkpoints/lidarmodel-epoch= 49-val_loss= 3.5542.ckpt"
    )

    model_points, model_classifier, model_da = make_model(
        config, config["pretraining_path"]
    )
    # checkpoint = torch.load(
    #     config["resume_path"], map_location=torch.device(0))
    # if "config" in checkpoint:
    #     for cfg in ("voxel_size", "cylindrical_coordinates"):
    #         assert checkpoint["config"][cfg] == config[cfg], (
    #             f"{cfg} is not consistant.\n"
    #             f"Checkpoint: {checkpoint['config'][cfg]}\n"
    #             f"Config: {config[cfg]}."
    #         )
    # try:
    #     model.load_state_dict(checkpoint["model_points"])
    # except KeyError:
    #     weights = {
    #         k.replace("model.", ""): v
    #         for k, v in checkpoint["state_dict"].items()
    #         if k.startswith("model.")
    #     }
    #     model.load_state_dict(weights)
    evaluate(
        model_points.to(0),
        model_classifier.to(0),
        model_da.to(0),
        val_dataloader,
        config,
    )


if __name__ == "__main__":
    main()
