megatron_path=/apdcephfs_qy3/share_976139/users/adrenzhou/nlp_workdir/Megatron-LM
load_path=$1
save_path=$1/hf
#save_path=hunyuan-7b-back-hf
#orig_hf_path=/apdcephfs_qy3/share_976139/users/adrenzhou/nlp_workdir/pretrained_models/thudm/chatglm3-6b-128k
orig_hf_path=/apdcephfs_qy3/share_976139/users/adrenzhou/nlp_workdir/pretrained_models/Qwen/Qwen2.5-7B
mkdir -p $save_path
find $orig_hf_path -type f ! -name 'model*' -exec cp {} $save_path/ \;

set -e
python tools/qwen_checkpoint_conversion.py \
--convert_checkpoint_from_megatron_to_transformers \
--load_path $load_path \
--save_path $save_path \
--target_params_dtype "bf16" \
--make_vocab_size_divisible_by 1 \
--print-checkpoint-structure \
--untie-word-embeddings \
--megatron-path $megatron_path \
# --tokenizer-type HFTokenizer \
# --tokenizer-model $orig_hf_path


#python comp_ckp.py 
