megatron_path=/apdcephfs/share_976139/users/hongfeixue/workspace/megatron-lm-meeting
# /teaspeech_ceph/share_976139/users/hongfeixue/checkpoint/mlcslm/megatron/IdealLLM_noctc_nolid_yiwei_TP4_PP1_CP1_MBZ8_GBSZ32_seq1024
load_path=$1
latest_num=`cat $1/latest_checkpointed_iteration.txt`
save_path=$1/hf_${latest_num}
#save_path=hunyuan-7b-back-hf
# orig_hf_path="/apdcephfs/share_976139/users/adrenzhou/nlp_workdir/pretrained_models/Qwen/Qwen2-Audio-7B-Instruct"
orig_hf_path="/apdcephfs/share_976139/users/hongfeixue/model/IdealLLM-qwen3-nolora"
#orig_hf_path="Qwen-audio-whisper-tiny-qwen-0.5B"
# /apdcephfs/private_hongfeixue/checkpoint/mlcslm/megatron/IdealLLM_v2_qwen3_TP1_PP1_CP1_MBZ8_GBSZ64_seq512
# /apdcephfs/share_976139/users/hongfeixue/model/Megatron/IdealLLM-qwen3-8b-megatron-TP1-PP1-TE/iter_0000001
mkdir -p $save_path
find $orig_hf_path -type f ! -name 'model*' -exec cp {} $save_path/ \;

set -e
python tools/IdealLLM_checkpoint_conversion_v2.py \
--convert_checkpoint_from_megatron_to_transformers \
--load_path $load_path \
--save_path $save_path \
--target_params_dtype "bf16" \
--make_vocab_size_divisible_by 1 \
--print-checkpoint-structure \
--megatron-path $megatron_path \
# --tokenizer-type HFTokenizer \
# --tokenizer-model $orig_hf_path


#python comp_ckp.py 
