@echo off
call conda activate Ada_NLU  || (
    echo Failed to activate Conda environment.
    echo Please check environment name and ensure Conda is initialized.
    pause
    exit /b 1
)

set WANDB_MODE=offline

rem 配置参数
set task_name=cola
set seed_list=1 2 3
set lora_r_list=2 8
set use_sr_list=True False

echo Running with task: %task_name%

for %%s in (%seed_list%) do (
    for %%r in (%lora_r_list%) do (
        for %%u in (%use_sr_list%) do (
            echo.
            echo ========================================
            echo Running %task_name% with seed: %%s, lora_r: %%r, use_sr: %%u
            echo ========================================
            echo root_output_dir is .\results\%task_name%_seed_%%s_rank_%%r_sr_%%u\model

            python ../examples/text-classification/run_glue.py ^
            --model_name_or_path ../microsoft/deberta-v3-base ^
            --task_name %task_name% ^
            --apply_lora ^
            --lora_type svd ^
            --RS_flag True ^
            --mode noclampAda ^
            --target_rank 4   ^
            --lora_r %%r  ^
            --reg_orth_coef 0.1 ^
            --init_warmup 18 --final_warmup 35 ^
            --mask_interval 10 ^
            --beta1 0.85 --beta2 0.85 ^
            --lora_module query,key,value,intermediate,layer.output,attention.output ^
            --lora_alpha 32 ^
            --do_train ^
            --do_eval ^
            --max_seq_length 64 ^
            --per_device_train_batch_size 32 ^
            --learning_rate 8e-4 ^
            --num_train_epochs 25 ^
            --warmup_steps 100 ^
            --cls_dropout 0.10 ^
            --weight_decay 0.00 ^
            --evaluation_strategy steps ^
            --eval_steps 100 ^
            --save_strategy steps ^
            --save_steps 10000 ^
            --logging_steps 10 ^
            --tb_writter_loginterval 100 ^
            --report_to tensorboard ^
            --seed %%s ^
            --root_output_dir ./_results/%task_name%_seed_%%s_rank_%%r_sr_%%u ^
            --overwrite_output_dir ^
            --use_sr_rank_allocation %%u ^
            --calib_batch_size 16
            echo.
            echo Completed seed %%s, lora_r %%r, use_sr %%u successfully
        )
    )
)

echo.
echo ========================================
echo All %task_name% experiments completed!
echo Total seeds processed: %seed_list%
echo Total lora_r values: %lora_r_list%
echo Total use_sr_rank_allocation values: %use_sr_list%
echo ========================================
echo.
pause