@echo off
call conda activate NLU  || (
    echo Failed to activate Conda environment.
    echo Please check environment name and ensure Conda is initialized.
    pause
    exit /b 1
)

set WANDB_MODE=offline

::echo Running with task: cola, seed: 20, rank: 2, mode: noclampAda, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode noclampAda --target_rank 2   --lora_r 4  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --seed 20 --root_output_dir ../output/Ablation/Ada-none-RsTrue-noclamp/cola-seed20-rank2to4 --overwrite_output_dir

echo Running with task: cola, seed: 20, rank: 4, mode: noclampAda, Iidea: none, RS_flag: True
python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode noclampAda --target_rank 4   --lora_r 6  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --seed 20 --root_output_dir ../output/Ablation/Ada-none-RsTrue-noclamp/cola-seed20-rank4to6 --overwrite_output_dir

::echo Running with task: cola, seed: 20, rank: 8, mode: noclampAda, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode noclampAda --target_rank 8   --lora_r 12  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64  --per_device_train_batch_size 32 --learning_rate 8e-4   --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --seed 20 --root_output_dir ../output/Ablation/Ada-none-RsTrue-noclamp/cola-seed20-rank8to12 --overwrite_output_dir

::echo Running with task: cola, seed: 20, rank: 2, mode: Ada, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode Ada --target_rank 2   --lora_r 4  --seed 20 --root_output_dir ../output/Ablation/Ada-none-RsTrue/cola-seed20-rank2to4  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

echo Running with task: cola, seed: 20, rank: 4, mode: Ada, Iidea: none, RS_flag: True
python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode Ada --target_rank 4   --lora_r 6  --seed 20 --root_output_dir ../output/Ablation/Ada-none-RsTrue/cola-seed20-rank4to6  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 20, rank: 8, mode: Ada, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode Ada --target_rank 8   --lora_r 12 --seed 20 --root_output_dir ../output/Ablation/Ada-none-RsTrue/cola-seed20-rank8to12 --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64  --per_device_train_batch_size 32 --learning_rate 8e-4   --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 20, rank: 2, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 2   --lora_r 4  --seed 20 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed20-rank2to4  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 20, rank: 4, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 4   --lora_r 6  --seed 20 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed20-rank4to6  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 20, rank: 8, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 8   --lora_r 12 --seed 20 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed20-rank8to12 --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64  --per_device_train_batch_size 32 --learning_rate 8e-4   --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir


::echo Running with task: cola, seed: 25, rank: 2, mode: Ada, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode Ada --target_rank 2   --lora_r 4  --seed 25 --root_output_dir ../output/Ablation/Ada-none-RsTrue/cola-seed25-rank2to4  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 25, rank: 4, mode: Ada, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode Ada --target_rank 4   --lora_r 6  --seed 25 --root_output_dir ../output/Ablation/Ada-none-RsTrue/cola-seed25-rank4to6  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 25, rank: 8, mode: Ada, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode Ada --target_rank 8   --lora_r 12 --seed 25 --root_output_dir ../output/Ablation/Ada-none-RsTrue/cola-seed25-rank8to12 --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64  --per_device_train_batch_size 32 --learning_rate 8e-4   --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 25, rank: 2, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 2   --lora_r 4  --seed 25 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed25-rank2to4  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 25, rank: 4, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 4   --lora_r 6  --seed 25 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed25-rank4to6  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 25, rank: 8, mode: Ada, Iidea: none, RS_flag: Flase
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 8   --lora_r 12 --seed 25 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed25-rank8to12 --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64  --per_device_train_batch_size 32 --learning_rate 8e-4   --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir


::echo Running with task: cola, seed: 25, rank: 2, mode: noclampAda, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode noclampAda --target_rank 2   --lora_r 4  --seed 25 --root_output_dir ../output/Ablation/Ada-none-RsTrue-noclamp/cola-seed25-rank2to4  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 25, rank: 4, mode: noclampAda, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode noclampAda --target_rank 4   --lora_r 6  --seed 25 --root_output_dir ../output/Ablation/Ada-none-RsTrue-noclamp/cola-seed25-rank4to6  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 25, rank: 8, mode: noclampAda, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode noclampAda --target_rank 8   --lora_r 12 --seed 25 --root_output_dir ../output/Ablation/Ada-none-RsTrue-noclamp/cola-seed25-rank8to12 --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64  --per_device_train_batch_size 32 --learning_rate 8e-4   --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir


::echo Running with task: cola, seed: 30, rank: 2, mode: Ada, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode Ada --target_rank 2   --lora_r 4  --seed 30 --root_output_dir ../output/Ablation/Ada-none-RsTrue/cola-seed30-rank2to4  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 30, rank: 4, mode: Ada, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode Ada --target_rank 4   --lora_r 6  --seed 30 --root_output_dir ../output/Ablation/Ada-none-RsTrue/cola-seed30-rank4to6  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 30, rank: 8, mode: Ada, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode Ada --target_rank 8   --lora_r 12 --seed 30 --root_output_dir ../output/Ablation/Ada-none-RsTrue/cola-seed30-rank8to12 --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64  --per_device_train_batch_size 32 --learning_rate 8e-4   --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 30, rank: 2, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 2   --lora_r 4  --seed 30 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed30-rank2to4  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 30, rank: 4, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 4   --lora_r 6  --seed 30 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed30-rank4to6  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 30, rank: 8, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 8   --lora_r 12 --seed 30 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed30-rank8to12 --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64  --per_device_train_batch_size 32 --learning_rate 8e-4   --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 30, rank: 2, mode: noclampAda, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode noclampAda --target_rank 2   --lora_r 4  --seed 30 --root_output_dir ../output/Ablation/Ada-none-RsTrue-noclamp/cola-seed30-rank2to4  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 30, rank: 4, mode: noclampAda, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode noclampAda --target_rank 4   --lora_r 6  --seed 30 --root_output_dir ../output/Ablation/Ada-none-RsTrue-noclamp/cola-seed30-rank4to6  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 30, rank: 8, mode: noclampAda, Iidea: none, RS_flag: True
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag True --mode noclampAda --target_rank 8   --lora_r 12 --seed 30 --root_output_dir ../output/Ablation/Ada-none-RsTrue-noclamp/cola-seed30-rank8to12 --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64  --per_device_train_batch_size 32 --learning_rate 8e-4   --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir



::echo Running with task: cola, seed: 5, rank: 2, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 2   --lora_r 4  --seed 5 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed5-rank2to4  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 5, rank: 4, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 4   --lora_r 6  --seed 5 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed5-rank4to6  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 5, rank: 8, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 8   --lora_r 12 --seed 5 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed5-rank8to12 --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64  --per_device_train_batch_size 32 --learning_rate 8e-4   --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 10, rank: 2, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 2   --lora_r 4  --seed 10 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed10-rank2to4  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 10, rank: 4, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 4   --lora_r 6  --seed 10 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed10-rank4to6  --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64 --per_device_train_batch_size 32 --learning_rate 8e-4 --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

::echo Running with task: cola, seed: 10, rank: 8, mode: Ada, Iidea: none, RS_flag: False
::python ../examples/text-classification/run_glue.py --model_name_or_path ../microsoft/deberta-v3-base --task_name cola --apply_lora --apply_adalora --lora_type svd --RS_flag False --mode Ada --target_rank 8   --lora_r 12 --seed 10 --root_output_dir ../output/Ablation/Ada-none-RsFalse/cola-seed10-rank8to12 --reg_orth_coef 0.1 --init_warmup 800 --final_warmup 3500 --mask_interval 10 --beta1 0.85 --beta2 0.85 --lora_module query,key,value,intermediate,layer.output,attention.output --lora_alpha 32 --do_train --do_eval --max_seq_length 64  --per_device_train_batch_size 32 --learning_rate 8e-4   --num_train_epochs 25 --warmup_steps 100 --cls_dropout 0.10 --weight_decay 0.00 --evaluation_strategy steps --eval_steps 100 --save_strategy steps --save_steps 10000 --logging_steps 10 --tb_writter_loginterval 100 --report_to tensorboard --overwrite_output_dir

echo All scripts have been executed.
pause
