megatron_path=/apdcephfs_qy3/share_976139/users/adrenzhou/nlp_workdir/Megatron-LM
#load_path=hunyuan-7b-mega-ds-T2P2
#load_path=/apdcephfs_qy3/share_976139/users/adrenzhou/nlp_workdir/Megatron-DeepSpeed/hunyuan-7b-mega-ds-T8P1-16PROC-bsz1_seq4096
load_path=$1
save_path=$1/hf
#save_path=hunyuan-7b-back-hf
orig_hf_path=/apdcephfs_qy3/share_976139/users/adrenzhou/nlp_workdir/pretrained_models/tencent/Hunyuan-7b-32k-llama2-chat-hf

mkdir -p $save_path
cp -r $orig_hf_path/* $save_path/
rm $save_path/pytorch*
set -e
python tools/llama_checkpoint_conversion.py \
--convert_checkpoint_from_megatron_to_transformers \
--load_path $load_path \
--save_path $save_path \
--target_params_dtype "fp16" \
--make_vocab_size_divisible_by 1 \
--print-checkpoint-structure \
--megatron-path $megatron_path \
# --tokenizer-type HFTokenizer \
# --tokenizer-model $orig_hf_path


#python comp_ckp.py 
