set -ex

cd eval

PROMPT_TYPE=$1
MODEL_NAME_OR_PATH=$2


OUTPUT_DIR=${MODEL_NAME_OR_PATH}/math_eval
SPLIT="test"
NUM_TEST_SAMPLE=-1

# DATA_NAME="gsm8k,math-oai,svamp,asdiv,mawps,carp_en,tabmwp,minerva_math"
DATA_NAME="math-oai"
TOKENIZERS_PARALLELISM=false

for seed in 0 222 1024 114514 1919810 19260817 998244353 2147483647; do
python3 -u math_eval.py \
    --model_name_or_path ${MODEL_NAME_OR_PATH} \
    --data_name ${DATA_NAME} \
    --output_dir ${OUTPUT_DIR} \
    --split ${SPLIT} \
    --prompt_type ${PROMPT_TYPE} \
    --num_test_sample ${NUM_TEST_SAMPLE} \
    --max_tokens_per_call 4096 \
    --seed $seed \
    --temperature 0.7 \
    --n_sampling 1 \
    --top_p 0.7 \
    --start 0 \
    --end -1 \
    --use_vllm \
    --save_outputs \
    --overwrite 
done

# DATA_NAME="math"
# TOKENIZERS_PARALLELISM=false

# for seed in 0 222 1024 114514 1919810 19260817 998244353 2147483647; do
# python3 -u math_eval.py \
#     --model_name_or_path ${MODEL_NAME_OR_PATH} \
#     --data_name ${DATA_NAME} \
#     --output_dir ${OUTPUT_DIR} \
#     --split ${SPLIT} \
#     --prompt_type ${PROMPT_TYPE} \
#     --num_test_sample ${NUM_TEST_SAMPLE} \
#     --max_tokens_per_call 4096 \
#     --seed $seed \
#     --temperature 0.7 \
#     --n_sampling 1 \
#     --top_p 0.7 \
#     --start 0 \
#     --end -1 \
#     --use_vllm \
#     --save_outputs \
#     --overwrite 
# done

# DATA_NAME="aqua,sat_math,mmlu_stem"
# DATA_NAME="MMLU-Pro"
# TOKENIZERS_PARALLELISM=false

# for seed in 0 222 1024 114514 1919810 19260817 998244353 2147483647; do
# python3 -u math_eval.py \
#     --model_name_or_path ${MODEL_NAME_OR_PATH} \
#     --data_name ${DATA_NAME} \
#     --output_dir ${OUTPUT_DIR} \
#     --split ${SPLIT} \
#     --prompt_type ${PROMPT_TYPE} \
#     --num_test_sample ${NUM_TEST_SAMPLE} \
#     --max_tokens_per_call 4096 \
#     --seed $seed \
#     --temperature 0.7 \
#     --n_sampling 1 \
#     --top_p 0.7 \
#     --start 0 \
#     --end -1 \
#     --use_vllm \
#     --save_outputs \
#     --overwrite \
#     --num_shots 5
# done