#!/bin/bash
test_set=
gpu_id=

while [[ $# -gt 0 ]]; do
  case $1 in
    --gpu_id)
      gpu_id="$2"
      shift # 移过参数名
      shift # 移过参数值
      ;;
    --test_set)
      if [ -z "$test_set" ]; then
        test_set="$2"
      else
        test_set="$test_set $2"
      fi
      shift # 移过参数名
      shift # 移过参数值
      ;;
    *) # 如果是未知参数
      shift # 移除未知参数
      ;;
  esac
done

# 使用或者打印变量来确认脚本已经正确处理了参数
echo "GPU ID: ${gpu_id}"
echo "Test sets: ${test_set}"

. ./path.sh || exit 1;

stage=6 # start from 0 if you need to start from data_list preparation
stop_stage=6

dir=/home/work_nfs11/hfxue/checkpoint/wenet_MLS2T_LLM/MLlibrispeech_whispermms_v4_fix_1/stage1
dir=/home/work_nfs11/hfxue/checkpoint/wenet_MLS2T_LLM/MLlibrispeech_whispermms_L_v10/stage2
dir=/home/work_nfs11/hfxue/checkpoint/wenet_MLS2T_LLM/Fleurs_whispermms_v11/stage1

data_type=shard # raw or shard
num_workers=8  # 数据加载的进程数
prefetch=200

average_checkpoint=false
decode_checkpoint=$dir/epoch_6.pt # epoch_0.pt
decode_checkpoint_name="epoch6pt"
average_num=10

decode_modes="salmonn_decode"


HOST_NODE_ADDR="localhost:0"

#cmvn=false   #cmvn的配置信息也转放到了configs
#do_delta=false

deepspeed_config=conf/ds_stage2.json
deepspeed_save_states="model_only"

# . tools/parse_options.sh || exit 1;

echo "开始打印主要变量，这些变量有命令行传入"
echo "dir=$dir"
echo "train_config=$train_config"
echo "decode_checkpoint=$decode_checkpoint"
echo "decode_checkpoint_name=$decode_checkpoint_name"


set -e
set -u
set -o pipefail

 # "mls_english" "mls_french" "mls_german")
# "mls_dutch" "mls_french" "mls_german") # "mls_italian" "mls_polish" "mls_portuguese" "mls_spanish"

# 采用llm的tokenizer不需要传入dict和bpemodel

export CUDA_VISIBLE_DEVICES=$gpu_id
if [ ${stage} -le 5 ] && [ ${stop_stage} -ge 5 ]; then
  decoding_chunk_size=
  ctc_weight=0.5
  # Polling GPU id begin with index 0
  echo "test this dataset: $test_set"
  test_dir=$dir/test_${decode_checkpoint_name}/${test_set}

  mkdir -p $test_dir
  python wenet/bin/recognize.py --gpu $gpu_id \
    --modes $decode_modes \
    --config $dir/train.yaml \
    --data_type $data_type \
    --test_data test_data/$test_set/data.list \
    --checkpoint $decode_checkpoint \
    --beam_size 10 \
    --batch_size 1 \
    --penalty 0.0 \
    --result_dir $test_dir \
    --ctc_weight $ctc_weight \

  #python tools/compute-wer.py --char=1 --v=1 \
  #  test_data/$test_set/text $test_dir/text > $test_dir/wer
  echo "$test_set has been decoded!"
fi

if [ ${stage} -le 6 ] && [ ${stop_stage} -ge 6 ]; then
    test_dir=$dir/test_${decode_checkpoint_name}/${test_set}
    checkpoint_dir=$dir/test_${decode_checkpoint_name}
    # cat $checkpoint_dir/fleurs*/text >> $checkpoint_dir/text
    python tools/compute-wer.py --char=1 --v=1 \
	    test_data/$test_set/text $checkpoint_dir/text > $checkpoint_dir/cer
fi
