pere commited on
Commit
59d3c8f
1 Parent(s): 6c2ab04
finetune_byt5_sentencefix.gin CHANGED
@@ -12,7 +12,7 @@ include "t5x/configs/runs/finetune.gin"
12
 
13
  MIXTURE_OR_TASK_NAME = "sentencefix"
14
  TASK_FEATURE_LENGTHS = {"inputs": 256, "targets": 256}
15
- TRAIN_STEPS = 1_100_000 # 1000000 pre-trained steps + 100000 fine-tuning steps.
16
  USE_CACHED_TASKS = False
17
  DROPOUT_RATE = 0.0
18
  RANDOM_SEED = 0
 
12
 
13
  MIXTURE_OR_TASK_NAME = "sentencefix"
14
  TASK_FEATURE_LENGTHS = {"inputs": 256, "targets": 256}
15
+ TRAIN_STEPS = 1_200_000 # 1000000 pre-trained steps + 100000 fine-tuning steps.
16
  USE_CACHED_TASKS = False
17
  DROPOUT_RATE = 0.0
18
  RANDOM_SEED = 0
finetune_large_byt5_sentencefix.gin ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __gin__ import dynamic_registration
2
+ import tasks
3
+
4
+ import __main__ as train_script
5
+ from t5.data import mixtures
6
+ from t5x import models
7
+ from t5x import partitioning
8
+ from t5x import utils
9
+
10
+ include "t5x/examples/t5/byt5/large.gin"
11
+ include "t5x/configs/runs/finetune.gin"
12
+
13
+ MIXTURE_OR_TASK_NAME = "sentencefix"
14
+ TASK_FEATURE_LENGTHS = {"inputs": 256, "targets": 256}
15
+ TRAIN_STEPS = 1_200_000 # 1000000 pre-trained steps + 100000 fine-tuning steps.
16
+ USE_CACHED_TASKS = False
17
+ DROPOUT_RATE = 0.0
18
+ RANDOM_SEED = 0
19
+
20
+ # `LOSS_NORMALIZING_FACTOR`: When fine-tuning a model that was pre-trained
21
+ # using Mesh Tensorflow (e.g. the public T5 / mT5 / ByT5 models), this should be
22
+ # set to `pretraining batch_size` * `target_token_length`. For T5 and T5.1.1:
23
+ # `2048 * 114`. For mT5: `1024 * 229`. For ByT5: `1024 * 189`.
24
+ LOSS_NORMALIZING_FACTOR =193536
25
+ INITIAL_CHECKPOINT_PATH = "gs://t5-data/pretrained_models/byt5/large/model.ckpt-1000000"
26
+
27
+ train_script.train:
28
+ eval_period = 500
29
+ partitioner = @partitioning.ModelBasedPjitPartitioner()
30
+
31
+ # `num_decodes` is equivalent to a beam size in a beam search decoding.
32
+ models.EncoderDecoderModel.predict_batch_with_aux.num_decodes = 4
33
+
34
+ partitioning.ModelBasedPjitPartitioner.num_partitions = 2
35
+
36
+
37
+ #from t5.models import mesh_transformer
38
+ #import t5.models
39
+ #mesh_transformer.learning_rate_schedules.constant_learning_rate.learning_rate = 0.0005
40
+ #run.learning_rate_schedule = @learning_rate_schedules.constant_learning_rate
41
+
train.sh CHANGED
@@ -1,6 +1,5 @@
1
  PROJECT_DIR=${HOME}"/models/multi-sentencefix-byt5"
2
  T5X_DIR="../../t5x" # directory where the t5x is cloned.
3
- TFDS_DATA_DIR="gs://nb-t5x/corpus_multi_sentencefix_byt5"
4
  MODEL_DIR="gs://nb-t5x/model_multi_sentencefix_byt5"
5
  export PYTHONPATH=${PROJECT_DIR}
6
 
 
1
  PROJECT_DIR=${HOME}"/models/multi-sentencefix-byt5"
2
  T5X_DIR="../../t5x" # directory where the t5x is cloned.
 
3
  MODEL_DIR="gs://nb-t5x/model_multi_sentencefix_byt5"
4
  export PYTHONPATH=${PROJECT_DIR}
5
 
train_large.sh ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ PROJECT_DIR=${HOME}"/models/multi-sentencefix-byt5"
2
+ MODEL_DIR="gs://nb-t5x/model_murge_lti_sentencefix_byt5"
3
+ export PYTHONPATH=${PROJECT_DIR}
4
+
5
+ python3 ${T5X_DIR}/t5x/train.py \
6
+ --gin_search_paths=${PROJECT_DIR} \
7
+ --gin_file="finetune_large_byt5_sentencefix.gin" \
8
+ --gin.MODEL_DIR="'${MODEL_DIR}'" \
9
+ --tfds_data_dir=${TFDS_DATA_DIR}
10
+