import os
import argparse
import numpy as np

import mindspore as ms
from mindspore.train import Model
from mindspore import load_checkpoint, load_param_into_net

from mindformers import MindFormerConfig, TransformerOpParallelConfig, LlamaTokenizer, LlamaForCausalLM, AutoModel
from mindformers import init_context, ContextConfig, ParallelContextConfig
from mindformers.trainer.utils import get_last_checkpoint
from mindformers.tools.utils import str2bool

def main(args):
    """main function."""
    # 多batch输入
    inputs = ["I love Beijing, because",
              "LLaMA is a",
              "Huawei is a company that"]

    # set model config
    config = MindFormerConfig(args.config_path)

    # 初始化环境
    init_context(use_parallel=config.use_parallel,
                 context_config=config.context,
                 parallel_config=config.parallel)

    model_config = config.model.model_config
    model_config.parallel_config = TransformerOpParallelConfig(**config.parallel_config)
    model_config.batch_size = len(inputs)
    model_config.use_past = args.use_past
    model_config.do_sample = args.do_sample
    model_config.top_k = args.top_k
    model_config.seq_length = args.seq_length
    config.model.model_config = model_config

    if args.transformed_checkpoint_path and not config.use_parallel:
        model_config.checkpoint_name_or_path = args.transformed_checkpoint_path
    print(f"config is: {config}")

    # build tokenizer
    tokenizer = LlamaTokenizer(args.tokenizer_path)
    # build model from config
    # breakpoint()
    model = AutoModel.from_config(config, download_checkpoint=False)

    # if use parallel, load distributed checkpoints
    if config.use_parallel:
        # find the sharded ckpt path for this rank
        ckpt_path = os.path.join(args.transformed_checkpoint_path, "rank_{}".format(os.getenv("RANK_ID", "0")))
        ckpt_path = get_last_checkpoint(ckpt_path)
        # shard model and load sharded ckpt
        warm_up_model = Model(model)
        warm_up_model.infer_predict_layout(ms.Tensor(np.ones(shape=(0, model_config.seq_length)), ms.int32))
    else:
        ckpt_path = args.ckpt_path
    print(f"ckpt path: {ckpt_path}")
    checkpoint_dict = load_checkpoint(ckpt_path)
    not_load_network_params = load_param_into_net(model, checkpoint_dict)
    print(f"Network parameters are not loaded: {not_load_network_params}")

    inputs_ids = tokenizer(inputs, max_length=model_config.seq_length, padding="max_length")["input_ids"]
    outputs = model.generate(inputs_ids,
                             max_length=model_config.max_decode_length,
                             do_sample=model_config.do_sample,
                             top_k=model_config.top_k,
                             top_p=model_config.top_p)
    for output in outputs:
        print(tokenizer.decode(output))


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument('--tokenizer_path', default='', type=str, required=True, help='set tokenizer path.')
    parser.add_argument('--device_id', default=0, type=int, help='set device id.')
    parser.add_argument('--transformed_checkpoint_path', default='', type=str, help='set transformed checkpoint path.')
    parser.add_argument('--ckpt_path', default='', type=str, help='set checkpoint path.')
    parser.add_argument('--use_past', default=True, type=str2bool, help='whether use past.')
    parser.add_argument('--config_path', default="", type=str, help='config file path')
    parser.add_argument('--seq_length', default=512, type=int, help='predict max length')
    parser.add_argument('--do_sample', default=True, type=str2bool, help='whether enable sample')
    parser.add_argument('--top_k', default=3, type=int, help='top_k parameter')
    args = parser.parse_args()

    main(args)