foundation_model=/data2/pretrain/meta-llama/Llama-2-7b-hf
bsz=64 # 64 on 48G
dataset=realnews

python3 watermark_generate.py \
    --output_path output_text.json \
    --model_name ${foundation_model} \
    --dataset ${dataset} \
    --baseline unigram \
    --gamma 0.5 \
    --delta 2.0 \
    --do_generate \
    --batch_size ${bsz}

python3 watermark_generate.py \
    --wm_path output_text.json \
    --unwm_path dataset/realnews/human.json \
    --output_path output_score.json \
    --model_name ${foundation_model} \
    --dataset ${dataset} \
    --baseline unigram \
    --gamma 0.5 \
    --delta 2.0 \
    --do_detect \
    --batch_size ${bsz}