pere commited on
Commit
d79df78
1 Parent(s): 9976013
batch_lrtest_bs32_decay10e.sh ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ PROJECT_DIR=${HOME}"/models/t5-nynorsk-norbench"
3
+ export PYTHONPATH=${PROJECT_DIR}
4
+ echo "PROJECT_DIR is set to: ${PROJECT_DIR}"
5
+
6
+
7
+ FINETUNE_STEPS=3120
8
+ EVAL_PREFIX="norbench/eval_lr_translate_ltgstyle_decay10e"
9
+ MODEL_BUCKET_DIR="gs://pere-north-t5x/finetuned/"
10
+
11
+ CHECKPOINT_LIST=(
12
+ "pere-north-t5x/pretrained_models/base/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
13
+ "pere-north-t5x/pretrained_models/base/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
14
+ "pere-north-t5x/pretrained_models/base/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
15
+ "pere-north-t5x/pretrained_models/base/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
16
+ "pere-north-t5x/pretrained_models/base/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
17
+ "pere-north-t5x/pretrained_models/base/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
18
+ )
19
+
20
+ NAME_LIST=(
21
+ "north_t5_base_NCC_LR_0_00001"
22
+ "north_t5_base_NCC_LR_0_0001"
23
+ "north_t5_base_NCC_LR_0_0005"
24
+ "north_t5_base_NCC_LR_0_00075"
25
+ "north_t5_base_NCC_LR_0_001"
26
+ "north_t5_base_NCC_LR_0_002"
27
+ )
28
+
29
+ TASK_LIST=("translate_mt5" "translate_mt5" "translate_mt5" "translate_mt5" "translate_mt5" "translate_mt5")
30
+ LR_LIST=(0.00001 0.0001 0.0005 0.00075 0.001 0.002)
31
+
32
+
33
+ GIN_LIST=(
34
+ "finetune_translate_base_mt5_lr_ltgstyle_decay10.gin"
35
+ "finetune_translate_base_mt5_lr_ltgstyle_decay10e.gin"
36
+ "finetune_translate_base_mt5_lr_ltgstyle_decay10e.gin"
37
+ "finetune_translate_base_mt5_lr_ltgstyle_decay10e.gin"
38
+ "finetune_translate_base_mt5_lr_ltgstyle_decay10e.gin"
39
+ "finetune_translate_base_mt5_lr_ltgstyle_decay10e.gin"
40
+ "finetune_translate_base_mt5_lr_ltgstyle_decay10e.gin"
41
+ )
42
+
43
+ START_LIST=(1500000 1500000 1500000 1500000 1500000 1500000)
44
+ EXP_LIST=(141 142 143 144 145 146)
45
+
46
+
47
+ VERSION_LIST=("1")
48
+
49
+ index=$(($1 + 0))
50
+
51
+ if [ $index -lt 1 ] || [ $index -gt ${#CHECKPOINT_LIST[@]} ] || [ $# -ne 1 ]; then
52
+ echo "Error: You need to provide the number of the checkpoints below as a parameter."
53
+ for i in "${!CHECKPOINT_LIST[@]}"; do
54
+ echo "$((i+1)). ${CHECKPOINT_LIST[i]}"
55
+ done
56
+
57
+ exit 1
58
+ fi
59
+
60
+ for v in "${VERSION_LIST[@]}"; do
61
+ i=($index-1)
62
+ INITIAL_CHECKPOINT_PATH="gs://${CHECKPOINT_LIST[i]}"
63
+
64
+ TRAIN_STEPS=$((START_LIST[i]+FINETUNE_STEPS))
65
+ GIN_FILE=${GIN_LIST[i]}
66
+ MIXTURE_OR_TASK_NAME=${TASK_LIST[i]}
67
+ LR=${LR_LIST[i]}
68
+ MODEL_DIR="${MODEL_BUCKET_DIR}${EVAL_PREFIX}_exp${EXP_LIST[i]}_${NAME_LIST[i]}_v${v}"
69
+
70
+ command="python3 ../../t5x/t5x/train.py --gin_search_paths=\"./\" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin.LR=${LR} --gin_file=${GIN_FILE} --gin.INITIAL_CHECKPOINT_PATH=\\\"${INITIAL_CHECKPOINT_PATH}\\\" --gin.MIXTURE_OR_TASK_NAME=\\\"${MIXTURE_OR_TASK_NAME}\\\" --gin.MODEL_DIR=\\\"${MODEL_DIR}\\\""
71
+ echo "${command}"
72
+ # Uncomment the next line to run the command:
73
+ eval "${command}"
74
+ done
75
+
76
+ #python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_translate_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"translate_long\" --gin.MODEL_DIR=\"gs://nb-t5x-us-central2/finetuned/nynorsk_NCC_base_v1\" &&
77
+ #python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_translate_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"translate_long\" --gin.MODEL_DIR=\"gs://nb-t5x-us-central2/finetuned/nynorsk_NCC_base_v2\" &&
78
+ #python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_translate_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"translate_long\" --gin.MODEL_DIR=\"gs://nb-t5x-us-central2/finetuned/nynorsk_NCC_base_v3\" &&
79
+ #python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_translate_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"translate_long\" --gin.MODEL_DIR=\"gs://nb-t5x-us-central2/finetuned/nynorsk_NCC_base_v4\" &&
80
+ #python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_translate_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"translate_long\" --gin.MODEL_DIR=\"gs://nb-t5x-us-central2/finetuned/nynorsk_NCC_base_v5\"
81
+
finetune_translate_base_mt5_lr_decay.gin CHANGED
@@ -31,7 +31,6 @@ utils.SaveCheckpointConfig:
31
 
32
  utils.create_learning_rate_scheduler:
33
  factors = 'constant * rsqrt_decay'
34
- factors = 'constant'
35
  base_learning_rate = %LR
36
  warmup_steps = 1000
37
 
 
31
 
32
  utils.create_learning_rate_scheduler:
33
  factors = 'constant * rsqrt_decay'
 
34
  base_learning_rate = %LR
35
  warmup_steps = 1000
36
 
finetune_translate_base_mt5_lr_ltgstyle_decay10e.gin ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __gin__ import dynamic_registration
2
+ import tasks
3
+ import seqio
4
+ import optax
5
+
6
+ import __main__ as train_script
7
+ from t5.data import mixtures
8
+ from t5x import models
9
+ from t5x import partitioning
10
+ from t5x import utils
11
+
12
+ include 't5x/examples/t5/mt5/base.gin'
13
+ include "t5x/configs/runs/finetune.gin"
14
+
15
+ MIXTURE_OR_TASK_NAME = %gin.REQUIRED
16
+ TASK_FEATURE_LENGTHS = {"inputs": 256, "targets": 256}
17
+ INITIAL_CHECKPOINT_PATH = %gin.REQUIRED
18
+ LR = %gin.REQUIRED
19
+ TRAIN_STEPS = %gin.REQUIRED # 1000000 pre-trained steps + 10000 fine-tuning steps.
20
+ USE_CACHED_TASKS = False
21
+ DROPOUT_RATE = 0.1
22
+ RANDOM_SEED = 0
23
+ BATCH_SIZE = 32
24
+
25
+ #Fixing a small error
26
+ infer_eval/utils.DatasetConfig:
27
+ task_feature_lengths = %TASK_FEATURE_LENGTHS
28
+
29
+ #Saving every 500 steps
30
+ utils.SaveCheckpointConfig:
31
+ period = 300
32
+ keep = 1 # number of checkpoints to keep
33
+
34
+
35
+ #optax.adamw.weight_decay = 0.1
36
+ #OPTIMIZER = @optax.adamw
37
+ #import t5x.optimizers
38
+ #OPTIMIZER = @optax.adamw
39
+ #optax.adamw.learning_rate = %LR
40
+ #optax.adamw.weight_decay = 0.1
41
+
42
+
43
+ utils.create_learning_rate_scheduler:
44
+ factors = 'constant * rsqrt_decay'
45
+ base_learning_rate = %LR
46
+ warmup_steps = 187
47
+
48
+ # Might have to ba changed based on architecture
49
+ # partitioning.PjitPartitioner.num_partitions = 1
50
+
51
+