pere commited on
Commit
4daa20a
1 Parent(s): 4f68aa6

training scripts

Browse files
batch_norcola.sentiment.sh ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ PROJECT_DIR=${HOME}"/models/t5-nynorsk-norbench"
3
+ export PYTHONPATH=${PROJECT_DIR}
4
+ echo "PROJECT_DIR is set to: ${PROJECT_DIR}"
5
+
6
+
7
+ FINETUNE_STEPS=30000
8
+ EVAL_PREFIX="norbench/eval_norcola_sentiment_"
9
+ MODEL_BUCKET_DIR="gs://pere-north-t5x/finetuned/"
10
+
11
+ CHECKPOINT_LIST=(
12
+ "pere-north-t5x/pretrained_models/base/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
13
+ "pere-north-t5x/pretrained_models/base/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
14
+ "pere-north-t5x/pretrained_models/base/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
15
+ "pere-north-t5x/pretrained_models/base/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
16
+ "pere-north-t5x/pretrained_models/base/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
17
+ "pere-north-t5x/pretrained_models/base/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
18
+ )
19
+
20
+ NAME_LIST=(
21
+ "north_t5_base_NCC_LR_0_00001"
22
+ "north_t5_base_NCC_LR_0_0001"
23
+ "north_t5_base_NCC_LR_0_0005"
24
+ "north_t5_base_NCC_LR_0_00075"
25
+ "north_t5_base_NCC_LR_0_001"
26
+ "north_t5_base_NCC_LR_0_002"
27
+ )
28
+
29
+ TASK_LIST=("norcola" "norcola" "norcola" "norcola" "norcola" "norcola")
30
+ LR_LIST=(0.00001 0.0001 0.0005 0.00075 0.001 0.002)
31
+
32
+
33
+ GIN_LIST=(
34
+ "finetune_mt5_norcola_128.gin"
35
+ "finetune_mt5_norcola_128.gin"
36
+ "finetune_mt5_norcola_128.gin"
37
+ "finetune_mt5_norcola_128.gin"
38
+ "finetune_mt5_norcola_128.gin"
39
+ "finetune_mt5_norcola_128.gin"
40
+ )
41
+
42
+ START_LIST=(1500000 1500000 1500000 1500000 1500000 1500000)
43
+ EXP_LIST=(1191 1192 1193 1194 1195 1196)
44
+
45
+
46
+ VERSION_LIST=("1")
47
+
48
+ index=$(($1 + 0))
49
+
50
+ if [ $index -lt 1 ] || [ $index -gt ${#CHECKPOINT_LIST[@]} ] || [ $# -ne 1 ]; then
51
+ echo "Error: You need to provide the number of the checkpoints below as a parameter."
52
+ for i in "${!CHECKPOINT_LIST[@]}"; do
53
+ echo "$((i+1)). ${CHECKPOINT_LIST[i]}"
54
+ done
55
+
56
+ exit 1
57
+ fi
58
+
59
+ for v in "${VERSION_LIST[@]}"; do
60
+ i=($index-1)
61
+ INITIAL_CHECKPOINT_PATH="gs://${CHECKPOINT_LIST[i]}"
62
+
63
+ TRAIN_STEPS=$((START_LIST[i]+FINETUNE_STEPS))
64
+ GIN_FILE=${GIN_LIST[i]}
65
+ MIXTURE_OR_TASK_NAME=${TASK_LIST[i]}
66
+ LR=${LR_LIST[i]}
67
+ MODEL_DIR="${MODEL_BUCKET_DIR}${EVAL_PREFIX}_exp${EXP_LIST[i]}_${NAME_LIST[i]}_v${v}"
68
+
69
+ command="python3 ../../t5x/t5x/train.py --gin_search_paths=\"./\" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin.LR=${LR} --gin_file=${GIN_FILE} --gin.INITIAL_CHECKPOINT_PATH=\\\"${INITIAL_CHECKPOINT_PATH}\\\" --gin.MIXTURE_OR_TASK_NAME=\\\"${MIXTURE_OR_TASK_NAME}\\\" --gin.MODEL_DIR=\\\"${MODEL_DIR}\\\""
70
+ echo "${command}"
71
+ # Uncomment the next line to run the command:
72
+ eval "${command}"
73
+ done
74
+
75
+ #python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_translate_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"translate_long\" --gin.MODEL_DIR=\"gs://nb-t5x-us-central2/finetuned/nynorsk_NCC_base_v1\" &&
76
+ #python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_translate_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"translate_long\" --gin.MODEL_DIR=\"gs://nb-t5x-us-central2/finetuned/nynorsk_NCC_base_v2\" &&
77
+ #python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_translate_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"translate_long\" --gin.MODEL_DIR=\"gs://nb-t5x-us-central2/finetuned/nynorsk_NCC_base_v3\" &&
78
+ #python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_translate_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"translate_long\" --gin.MODEL_DIR=\"gs://nb-t5x-us-central2/finetuned/nynorsk_NCC_base_v4\" &&
79
+ #python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_translate_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"translate_long\" --gin.MODEL_DIR=\"gs://nb-t5x-us-central2/finetuned/nynorsk_NCC_base_v5\"
80
+
finetune_mt5_norcola_128.gin ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __gin__ import dynamic_registration
2
+ import tasks
3
+ import seqio
4
+ import optax
5
+
6
+ import __main__ as train_script
7
+ from t5.data import mixtures
8
+ from t5x import models
9
+ from t5x import partitioning
10
+ from t5x import utils
11
+
12
+ include 't5x/examples/t5/mt5/base.gin'
13
+ include "t5x/configs/runs/finetune.gin"
14
+
15
+ MIXTURE_OR_TASK_NAME = %gin.REQUIRED
16
+ TASK_FEATURE_LENGTHS = {"inputs": 256, "targets": 256}
17
+ INITIAL_CHECKPOINT_PATH = %gin.REQUIRED
18
+ LR = %gin.REQUIRED
19
+ TRAIN_STEPS = %gin.REQUIRED # 1000000 pre-trained steps + 10000 fine-tuning steps.
20
+ USE_CACHED_TASKS = False
21
+ DROPOUT_RATE = 0.1
22
+ RANDOM_SEED = 0
23
+ BATCH_SIZE = 128
24
+ EVAL_PERIOD = 907
25
+
26
+ #Fixing a small error
27
+ infer_eval/utils.DatasetConfig:
28
+ task_feature_lengths = %TASK_FEATURE_LENGTHS
29
+
30
+ #Saving every 500 steps
31
+ utils.SaveCheckpointConfig:
32
+ period = 907
33
+ keep = 1 # number of checkpoints to keep
34
+
35
+
36
+ #optax.adamw.weight_decay = 0.1
37
+ #OPTIMIZER = @optax.adamw
38
+ #import t5x.optimizers
39
+ #OPTIMIZER = @optax.adamw
40
+ #optax.adamw.learning_rate = %LR
41
+ #optax.adamw.weight_decay = 0.1
42
+
43
+
44
+ utils.create_learning_rate_scheduler:
45
+ factors = 'constant'
46
+ base_learning_rate = %LR
47
+ warmup_steps = 1000
48
+
49
+ # Might have to ba changed based on architecture
50
+ # partitioning.PjitPartitioner.num_partitions = 1
51
+
52
+