from __gin__ import dynamic_registration import tasks import __main__ as train_script from t5.data import mixtures from t5x import models from t5x import partitioning from t5x import utils include "t5x/examples/t5/mt5/large.gin" include "t5x/configs/runs/finetune.gin" MIXTURE_OR_TASK_NAME = "sentencefix" TASK_FEATURE_LENGTHS = {"inputs": 256, "targets": 256} TRAIN_STEPS = 1_200_000 # 1000000 pre-trained steps + 20000 fine-tuning steps. USE_CACHED_TASKS = False DROPOUT_RATE = 0.0 RANDOM_SEED = 0 BATCH_SIZE = 64 # `LOSS_NORMALIZING_FACTOR`: When fine-tuning a model that was pre-trained # using Mesh Tensorflow (e.g. the public T5 / mT5 / ByT5 models), this should be # set to `pretraining batch_size` * `target_token_length`. For T5 and T5.1.1: # `2048 * 114`. For mT5: `1024 * 229`. For ByT5: `1024 * 189`. #LOSS_NORMALIZING_FACTOR = 234496 INITIAL_CHECKPOINT_PATH = "gs://t5-data/pretrained_models/t5x/mt5_large/checkpoint_1000000" train_script.train: eval_period = 500 partitioner = @partitioning.ModelBasedPjitPartitioner() # `num_decodes` is equivalent to a beam size in a beam search decoding. models.EncoderDecoderModel.predict_batch_with_aux.num_decodes = 4 partitioning.ModelBasedPjitPartitioner.num_partitions = 2 #from t5.models import mesh_transformer #import t5.models #mesh_transformer.learning_rate_schedules.constant_learning_rate.learning_rate = 0.0005 #run.learning_rate_schedule = @learning_rate_schedules.constant_learning_rate