File size: 1,796 Bytes
0a8986a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
# LongT5 Base model. Config based on T5.1.1 Base model.
# Provides MODEL
from __gin__ import dynamic_registration

import seqio
from t5x import adafactor
from t5x import models
import tasks

ARCHITECTURE = %gin.REQUIRED

include 'flaxformer/t5x/configs/longt5/architectures/longt5_1_1_flaxformer.gin'

include 't5x/configs/runs/pretrain.gin'
#include 'pretrain_cont.gin'

MIXTURE_OR_TASK_NAME = "ncc_scandinavian_span_corruption_stream"
TASK_FEATURE_LENGTHS = {"inputs": 4048, "targets": 910}
# CORRECT IS 128!!
BATCH_SIZE=32
TRAIN_STEPS = 1_000_000
DROPOUT_RATE = 0.0 # Changed from the default since T5-1.1 recomments this.
#INITIAL_CHECKPOINT_PATH = "gs://nb-t5x-us-central2/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
#PjitPartitioner.num_partitions = 1


# Architecture overrides
NUM_HEADS = 12
NUM_ENCODER_LAYERS = 12
NUM_DECODER_LAYERS = 12
HEAD_DIM = 64
EMBED_DIM = 768
MLP_DIM = 2048

# Loss HParam defaults
Z_LOSS = 0.0001
LABEL_SMOOTHING = 0.0
LOSS_NORMALIZING_FACTOR = None

# Vocabulary (shared by encoder and decoder)
VOCABULARY = @seqio.SentencePieceVocabulary()
seqio.SentencePieceVocabulary.sentencepiece_model_file = "gs://t5-data/vocabs/cc_all.32000.100extra/sentencepiece.model"
NUM_EMBEDDINGS = 32128  # vocab size rounded to a multiple of 128 for TPU efficiency

# Optimizer
# `learning_rate` is set by `Trainer.learning_rate_fn`.
OPTIMIZER = @adafactor.Adafactor()
adafactor.Adafactor:
  decay_rate = 0.8
  step_offset = 0

# Model
MODEL = @models.EncoderDecoderModel()
models.EncoderDecoderModel:
  module = %ARCHITECTURE  # provided by longt5_flaxformer
  input_vocabulary = %VOCABULARY
  output_vocabulary = %VOCABULARY
  optimizer_def = %OPTIMIZER
  z_loss = %Z_LOSS
  label_smoothing = %LABEL_SMOOTHING
  loss_normalizing_factor = %LOSS_NORMALIZING_FACTOR