cd ..
export CUDA_VISIBLE_DEVICES=0 
export PYTHONUNBUFFERED=1

MODEL=BiLSTMAttn

GRAM=1


BATCH_SIZE=8


DP=0.5
WE=False

LR=0.25

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.3
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.5
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.6

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.7

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \


DP=0.4
WE=False

LR=0.25

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.3
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.5
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.6

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.7

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \


DP=0.6
WE=False

LR=0.25

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.3
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.5
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.6

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.7

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \



BATCH_SIZE=16


DP=0.5
WE=False

LR=0.25

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.3
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.5
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.6

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.7

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \


DP=0.4
WE=False

LR=0.25

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.3
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.5
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.6

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.7

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \


DP=0.6
WE=False

LR=0.25

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.3
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.5
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.6

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.7

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \


BATCH_SIZE=32


DP=0.5
WE=False

LR=0.25

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.3
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.5
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.6

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.7

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \


DP=0.4
WE=False

LR=0.25

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.3
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.5
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.6

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.7

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \


DP=0.6
WE=False

LR=0.25

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.3
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.5
python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.6

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \

LR=0.7

python main.py \
--model $MODEL \
--gram $GRAM \
--batch_size $BATCH_SIZE \
--learning_rate $LR \
--dropout $DP \
--word_Embedding $WE \
--save True \