megatron_path=/apdcephfs/private_kaixunhuang/data/pretrained_models/megatron
load_path=/apdcephfs/private_kaixunhuang/workspace/metagron-lm-meeting/exp/qwen2_5omni_thinker_train_v0.31_TP4_PP1_CP1_MBZ8_GBSZ2880_seq1024
save_path=$load_path/hf_model


orig_hf_path="/apdcephfs/private_kaixunhuang/data/pretrained_models/Qwen/Qwen2.5-Omni-7B"
#orig_hf_path="Qwen-audio-whisper-tiny-qwen-0.5B"

mkdir -p $save_path
# find $orig_hf_path -type f ! -name 'model*' -exec cp {} $save_path/ \;
rsync -a --exclude='model*' "$orig_hf_path"/ "$save_path"/

set -e
python tools/qwen2_5omni_checkpoint_conversion.py \
--convert_checkpoint_from_megatron_to_transformers \
--load_path $load_path \
--save_path $save_path \
--target_params_dtype "bf16" \
--make_vocab_size_divisible_by 1 \
--megatron-path $megatron_path \
# --tokenizer-type HFTokenizer \
# --tokenizer-model $orig_hf_path
# --print-checkpoint-structure \


#python comp_ckp.py 
