import argparse
import torch.multiprocessing as mp
from pretrain_yoco import model_provider


def main():

    parser = argparse.ArgumentParser(
        description="Megatron Checkpoint Utility Arguments",
        allow_abbrev=False,
        conflict_handler="resolve",
    )
    parser.add_argument(
        "--model-type",
        type=str,
        required=True,
        choices=["GPT", "BERT"],
        help="Type of the model",
    )
    parser.add_argument(
        "--load-dir",
        type=str,
        required=True,
        help="Directory to load model checkpoint from",
    )
    parser.add_argument(
        "--save-dir",
        type=str,
        required=True,
        help="Directory to save model checkpoint to",
    )
    parser.add_argument(
        "--max-queue-size",
        type=int,
        default=50,
        help="Maximum number of tensors in the queue",
    )
    parser.add_argument(
        "--no-checking",
        action="store_false",
        help="Do not perform checking on the name and ordering of weights",
        dest="checking",
    )
    parser.add_argument(
        "--model-type-hf",
        type=str,
        default="yoco-moe",
        choices=[
            "baichuan",
            "baichuan2",
            "llama2",
            "mixtral",
            "chatglm3",
            "gemma",
            "gemma2",
            "bloom",
            "qwen",
            "internlm2",
            "deepseek2",
            "minicpm",
            "minicpm-moe",
            "deepseek2-lite",
            "qwen2-moe",
        ],
        help="model type of huggingface",
    )
    parser.add_argument(
        "--self-attn-layers",
        type=int,
        default=13,
    )

    import mindspeed.convert_yoco.loader_hf as loader
    import mindspeed.convert_yoco.saver as saver

    loader.add_arguments(parser)
    saver.add_arguments(parser)
    args = parser.parse_args()

    queue = mp.Queue(maxsize=args.max_queue_size)

    print("Starting saver...")
    saver_proc = mp.Process(
        target=saver.save_model_checkpoint, args=(model_provider, queue, args)
    )
    saver_proc.start()

    print("Starting loader...")
    loader.load_checkpoint(model_provider, queue, args)

    print("Waiting for saver to complete...")
    saver_proc.join()


if __name__ == "__main__":
    main()
