pere commited on
Commit
2d58b74
1 Parent(s): fc7e00b

Commit from model create scripts

Browse files
Files changed (45) hide show
  1. .gitattributes +1 -0
  2. config.gin +152 -0
  3. config.json +30 -0
  4. flax_model.msgpack +3 -0
  5. model-info.txt +0 -0
  6. pytorch_model.bin +3 -0
  7. special_tokens_map.json +1 -0
  8. spiece.model +3 -0
  9. tf_model.h5 +3 -0
  10. tokenizer.json +3 -0
  11. tokenizer_config.json +1 -0
  12. train/events.out.tfevents.1650190955.t1v-n-6662dcba-w-0.3144387.0.v2 +3 -0
  13. train/events.out.tfevents.1650215678.t1v-n-6662dcba-w-0.3296289.0.v2 +3 -0
  14. train/events.out.tfevents.1650226182.t1v-n-6662dcba-w-0.3347961.0.v2 +3 -0
  15. train/events.out.tfevents.1650261561.t1v-n-6662dcba-w-0.3552115.0.v2 +3 -0
  16. train/events.out.tfevents.1650288063.t1v-n-9dce3dae-w-0.92778.0.v2 +3 -0
  17. train/events.out.tfevents.1650310356.t1v-n-9dce3dae-w-0.189410.0.v2 +3 -0
  18. train/events.out.tfevents.1650349932.t1v-n-9dce3dae-w-0.371713.0.v2 +3 -0
  19. train/events.out.tfevents.1650404082.t1v-n-9dce3dae-w-0.648263.0.v2 +3 -0
  20. train/events.out.tfevents.1650441615.t1v-n-bbc8ba53-w-1.123916.0.v2 +3 -0
  21. train/events.out.tfevents.1650450190.t1v-n-49630a28-w-1.65684.0.v2 +3 -0
  22. train/events.out.tfevents.1650474819.t1v-n-49630a28-w-1.170547.0.v2 +3 -0
  23. train/events.out.tfevents.1650489796.t1v-n-49645e71-w-0.96081.0.v2 +3 -0
  24. train/events.out.tfevents.1650520896.t1v-n-49645e71-w-0.224011.0.v2 +3 -0
  25. train/events.out.tfevents.1650539135.t1v-n-49645e71-w-0.360467.0.v2 +3 -0
  26. train/events.out.tfevents.1650559718.t1v-n-49645e71-w-0.458243.0.v2 +3 -0
  27. train/events.out.tfevents.1650559992.t1v-n-49645e71-w-0.462178.0.v2 +3 -0
  28. train/events.out.tfevents.1650688722.t1v-n-f3804d66-w-0.184783.0.v2 +3 -0
  29. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650190956.t1v-n-6662dcba-w-0.3144387.1.v2 +3 -0
  30. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650215678.t1v-n-6662dcba-w-0.3296289.1.v2 +3 -0
  31. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650226183.t1v-n-6662dcba-w-0.3347961.1.v2 +3 -0
  32. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650261562.t1v-n-6662dcba-w-0.3552115.1.v2 +3 -0
  33. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650288064.t1v-n-9dce3dae-w-0.92778.1.v2 +3 -0
  34. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650310356.t1v-n-9dce3dae-w-0.189410.1.v2 +3 -0
  35. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650349932.t1v-n-9dce3dae-w-0.371713.1.v2 +3 -0
  36. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650404082.t1v-n-9dce3dae-w-0.648263.1.v2 +3 -0
  37. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650441615.t1v-n-bbc8ba53-w-1.123916.1.v2 +3 -0
  38. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650450190.t1v-n-49630a28-w-1.65684.1.v2 +3 -0
  39. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650474819.t1v-n-49630a28-w-1.170547.1.v2 +3 -0
  40. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650489797.t1v-n-49645e71-w-0.96081.1.v2 +3 -0
  41. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650520896.t1v-n-49645e71-w-0.224011.1.v2 +3 -0
  42. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650539135.t1v-n-49645e71-w-0.360467.1.v2 +3 -0
  43. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650559718.t1v-n-49645e71-w-0.458243.1.v2 +3 -0
  44. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650559993.t1v-n-49645e71-w-0.462178.1.v2 +3 -0
  45. training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650688722.t1v-n-f3804d66-w-0.184783.1.v2 +3 -0
.gitattributes CHANGED
@@ -25,3 +25,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
25
  *.zip filter=lfs diff=lfs merge=lfs -text
26
  *.zstandard filter=lfs diff=lfs merge=lfs -text
27
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
25
  *.zip filter=lfs diff=lfs merge=lfs -text
26
  *.zstandard filter=lfs diff=lfs merge=lfs -text
27
  *tfevents* filter=lfs diff=lfs merge=lfs -text
28
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
config.gin ADDED
@@ -0,0 +1,152 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __gin__ import dynamic_registration
2
+ import __main__ as train_script
3
+ import seqio
4
+ import t5.data.mixtures
5
+ from t5x import adafactor
6
+ from t5x.examples.t5 import network
7
+ from t5x import gin_utils
8
+ from t5x import models
9
+ from t5x import partitioning
10
+ from t5x import trainer
11
+ from t5x import utils
12
+ import tasks
13
+
14
+ # Macros:
15
+ # ==============================================================================
16
+ BATCH_SIZE = 128
17
+ DROPOUT_RATE = 0.0
18
+ INITIAL_CHECKPOINT_PATH = \
19
+ 'gs://nb-t5x-us-central2/norwegian_NCC_plus_English_t5x_large/checkpoint_1500000'
20
+ LABEL_SMOOTHING = 0.0
21
+ LOSS_NORMALIZING_FACTOR = None
22
+ MIXTURE_OR_TASK_MODULE = None
23
+ MIXTURE_OR_TASK_NAME = 'balanced_bokmaal_nynorsk_span_corruption_stream'
24
+ MODEL = @models.EncoderDecoderModel()
25
+ MODEL_DIR = \
26
+ 'gs://nb-t5x-us-central2/norwegian_NCC_plus_English_pluss200k_balanced_bokmaal_nynorsk_t5x_large'
27
+ OPTIMIZER = @adafactor.Adafactor()
28
+ RANDOM_SEED = None
29
+ SHUFFLE_TRAIN_EXAMPLES = True
30
+ TASK_FEATURE_LENGTHS = {'inputs': 512, 'targets': 512}
31
+ TRAIN_STEPS = 1700000
32
+ USE_CACHED_TASKS = True
33
+ USE_HARDWARE_RNG = False
34
+ VOCABULARY = @seqio.SentencePieceVocabulary()
35
+ Z_LOSS = 0.0001
36
+
37
+ # Parameters for adafactor.Adafactor:
38
+ # ==============================================================================
39
+ adafactor.Adafactor.decay_rate = 0.8
40
+ adafactor.Adafactor.logical_factor_rules = \
41
+ @adafactor.standard_logical_factor_rules()
42
+ adafactor.Adafactor.step_offset = 0
43
+
44
+ # Parameters for utils.CheckpointConfig:
45
+ # ==============================================================================
46
+ utils.CheckpointConfig.restore = @utils.RestoreCheckpointConfig()
47
+ utils.CheckpointConfig.save = @utils.SaveCheckpointConfig()
48
+
49
+ # Parameters for utils.create_learning_rate_scheduler:
50
+ # ==============================================================================
51
+ utils.create_learning_rate_scheduler.base_learning_rate = 0.5
52
+ utils.create_learning_rate_scheduler.factors = 'constant * rsqrt_decay'
53
+ utils.create_learning_rate_scheduler.warmup_steps = 10000
54
+
55
+ # Parameters for train/utils.DatasetConfig:
56
+ # ==============================================================================
57
+ train/utils.DatasetConfig.batch_size = %BATCH_SIZE
58
+ train/utils.DatasetConfig.mixture_or_task_name = %MIXTURE_OR_TASK_NAME
59
+ train/utils.DatasetConfig.module = %MIXTURE_OR_TASK_MODULE
60
+ train/utils.DatasetConfig.pack = True
61
+ train/utils.DatasetConfig.seed = None
62
+ train/utils.DatasetConfig.shuffle = %SHUFFLE_TRAIN_EXAMPLES
63
+ train/utils.DatasetConfig.split = 'train'
64
+ train/utils.DatasetConfig.task_feature_lengths = %TASK_FEATURE_LENGTHS
65
+ train/utils.DatasetConfig.use_cached = %USE_CACHED_TASKS
66
+
67
+ # Parameters for train_eval/utils.DatasetConfig:
68
+ # ==============================================================================
69
+ train_eval/utils.DatasetConfig.batch_size = %BATCH_SIZE
70
+ train_eval/utils.DatasetConfig.mixture_or_task_name = %MIXTURE_OR_TASK_NAME
71
+ train_eval/utils.DatasetConfig.module = %MIXTURE_OR_TASK_MODULE
72
+ train_eval/utils.DatasetConfig.pack = True
73
+ train_eval/utils.DatasetConfig.seed = 42
74
+ train_eval/utils.DatasetConfig.shuffle = False
75
+ train_eval/utils.DatasetConfig.split = 'validation'
76
+ train_eval/utils.DatasetConfig.task_feature_lengths = %TASK_FEATURE_LENGTHS
77
+ train_eval/utils.DatasetConfig.use_cached = %USE_CACHED_TASKS
78
+
79
+ # Parameters for models.EncoderDecoderModel:
80
+ # ==============================================================================
81
+ models.EncoderDecoderModel.input_vocabulary = %VOCABULARY
82
+ models.EncoderDecoderModel.label_smoothing = %LABEL_SMOOTHING
83
+ models.EncoderDecoderModel.loss_normalizing_factor = %LOSS_NORMALIZING_FACTOR
84
+ models.EncoderDecoderModel.module = @network.Transformer()
85
+ models.EncoderDecoderModel.optimizer_def = %OPTIMIZER
86
+ models.EncoderDecoderModel.output_vocabulary = %VOCABULARY
87
+ models.EncoderDecoderModel.z_loss = %Z_LOSS
88
+
89
+ # Parameters for partitioning.PjitPartitioner:
90
+ # ==============================================================================
91
+ partitioning.PjitPartitioner.logical_axis_rules = \
92
+ @partitioning.standard_logical_axis_rules()
93
+ partitioning.PjitPartitioner.model_parallel_submesh = None
94
+ partitioning.PjitPartitioner.num_partitions = 2
95
+
96
+ # Parameters for utils.RestoreCheckpointConfig:
97
+ # ==============================================================================
98
+ utils.RestoreCheckpointConfig.dtype = 'float32'
99
+ utils.RestoreCheckpointConfig.mode = 'specific'
100
+ utils.RestoreCheckpointConfig.path = %INITIAL_CHECKPOINT_PATH
101
+
102
+ # Parameters for utils.SaveCheckpointConfig:
103
+ # ==============================================================================
104
+ utils.SaveCheckpointConfig.dtype = 'float32'
105
+ utils.SaveCheckpointConfig.keep = 3
106
+ utils.SaveCheckpointConfig.period = 5000
107
+ utils.SaveCheckpointConfig.save_dataset = False
108
+
109
+ # Parameters for seqio.SentencePieceVocabulary:
110
+ # ==============================================================================
111
+ seqio.SentencePieceVocabulary.sentencepiece_model_file = \
112
+ 'gs://t5-data/vocabs/mc4.250000.100extra/sentencepiece.model'
113
+
114
+ # Parameters for network.T5Config:
115
+ # ==============================================================================
116
+ network.T5Config.dropout_rate = %DROPOUT_RATE
117
+ network.T5Config.dtype = 'bfloat16'
118
+ network.T5Config.emb_dim = 1024
119
+ network.T5Config.head_dim = 64
120
+ network.T5Config.logits_via_embedding = False
121
+ network.T5Config.mlp_activations = ('gelu', 'linear')
122
+ network.T5Config.mlp_dim = 2816
123
+ network.T5Config.num_decoder_layers = 24
124
+ network.T5Config.num_encoder_layers = 24
125
+ network.T5Config.num_heads = 16
126
+ network.T5Config.vocab_size = 250112
127
+
128
+ # Parameters for train_script.train:
129
+ # ==============================================================================
130
+ train_script.train.checkpoint_cfg = @utils.CheckpointConfig()
131
+ train_script.train.eval_period = 1000
132
+ train_script.train.eval_steps = 20
133
+ train_script.train.infer_eval_dataset_cfg = None
134
+ train_script.train.model = %MODEL
135
+ train_script.train.model_dir = %MODEL_DIR
136
+ train_script.train.partitioner = @partitioning.PjitPartitioner()
137
+ train_script.train.random_seed = %RANDOM_SEED
138
+ train_script.train.summarize_config_fn = @gin_utils.summarize_gin_config
139
+ train_script.train.total_steps = %TRAIN_STEPS
140
+ train_script.train.train_dataset_cfg = @train/utils.DatasetConfig()
141
+ train_script.train.train_eval_dataset_cfg = @train_eval/utils.DatasetConfig()
142
+ train_script.train.trainer_cls = @trainer.Trainer
143
+ train_script.train.use_hardware_rng = %USE_HARDWARE_RNG
144
+
145
+ # Parameters for trainer.Trainer:
146
+ # ==============================================================================
147
+ trainer.Trainer.learning_rate_fn = @utils.create_learning_rate_scheduler()
148
+ trainer.Trainer.num_microbatches = None
149
+
150
+ # Parameters for network.Transformer:
151
+ # ==============================================================================
152
+ network.Transformer.config = @network.T5Config()
config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/home/patrick/hugging_face/t5/mt5-large",
3
+ "architectures": [
4
+ "T5ForConditionalGeneration"
5
+ ],
6
+ "d_ff": 2816,
7
+ "d_kv": 64,
8
+ "d_model": 1024,
9
+ "decoder_start_token_id": 0,
10
+ "dropout_rate": 0.1,
11
+ "eos_token_id": 1,
12
+ "feed_forward_proj": "gated-gelu",
13
+ "initializer_factor": 1.0,
14
+ "is_encoder_decoder": true,
15
+ "layer_norm_epsilon": 1e-06,
16
+ "model_type": "t5",
17
+ "num_decoder_layers": 24,
18
+ "num_heads": 16,
19
+ "num_layers": 24,
20
+ "output_past": true,
21
+ "pad_token_id": 0,
22
+ "relative_attention_max_distance": 128,
23
+ "relative_attention_num_buckets": 32,
24
+ "tie_word_embeddings": false,
25
+ "tokenizer_class": "T5Tokenizer",
26
+ "torch_dtype": "float32",
27
+ "transformers_version": "4.19.2",
28
+ "use_cache": true,
29
+ "vocab_size": 250112
30
+ }
flax_model.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a95d2cda656139327d2104c2947e19a13fa5647f45ae56f7ad724749320ad438
3
+ size 4918349339
model-info.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:535939b8d69d5d76429959af37d7f6e36c26195ebf721af01519484cf18229b9
3
+ size 4918507641
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>"}
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef78f86560d809067d12bac6c09f19a462cb3af3f54d2b8acbba26e1433125d6
3
+ size 4309802
tf_model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13edf9346263c231557abbd4b42fd847571a957f938e4f48b2d23ec2f1c58acc
3
+ size 9760
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93c3578052e1605d8332eb961bc08d72e246071974e4cc54aa6991826b802aa5
3
+ size 16330369
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "extra_ids": 0, "additional_special_tokens": null, "special_tokens_map_file": "/home/patrick/.cache/torch/transformers/685ac0ca8568ec593a48b61b0a3c272beee9bc194a3c7241d15dcadb5f875e53.f76030f3ec1b96a8199b2593390c610e76ca8028ef3d24680000619ffb646276", "name_or_path": "/home/perk/models/t5_large_NCC_modern", "sp_model_kwargs": {}, "tokenizer_class": "T5Tokenizer"}
train/events.out.tfevents.1650190955.t1v-n-6662dcba-w-0.3144387.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:771244905e0d5fcf65dbffc5b9b6c3376b5cbfd007a35ad9a6113874bbd386d6
3
+ size 41510
train/events.out.tfevents.1650215678.t1v-n-6662dcba-w-0.3296289.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7af0f773cf35ac89654ca82f9bbc63bb59843a811a032f9bf73a20df65b33ca6
3
+ size 12250
train/events.out.tfevents.1650226182.t1v-n-6662dcba-w-0.3347961.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a77df9c1e33837441dc08db54e4440b05ae9991f0bacd8455afffb151b2c6fd
3
+ size 48825
train/events.out.tfevents.1650261561.t1v-n-6662dcba-w-0.3552115.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6c134656d544a21ea585c019979495930a3c78cb1113265071ff026bf9c9484
3
+ size 23017
train/events.out.tfevents.1650288063.t1v-n-9dce3dae-w-0.92778.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b7eab38bfa1e8d2d95290364e3a198979d4d7ad2c3ce2ed44ee2cb0130fba99e
3
+ size 35661
train/events.out.tfevents.1650310356.t1v-n-9dce3dae-w-0.189410.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07493d433cb0f0fff775f77fd16384cfb1e71c652ed60ac016ac962350b532a5
3
+ size 64926
train/events.out.tfevents.1650349932.t1v-n-9dce3dae-w-0.371713.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8ee46dfc6f4f128ea7a16d9afab53bb862ca1b866b68885bd74225f42de4459
3
+ size 64926
train/events.out.tfevents.1650404082.t1v-n-9dce3dae-w-0.648263.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd39bc9e6ce64f4579099d5014c68f58882800f3fb6ae1184ad4c9a7ae132414
3
+ size 6481
train/events.out.tfevents.1650441615.t1v-n-bbc8ba53-w-1.123916.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abfa796342094b70c8e5bcd3dbc66fb556d2be1eb7851f4eab1e8e331fde19a9
3
+ size 10858
train/events.out.tfevents.1650450190.t1v-n-49630a28-w-1.65684.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61550229b66ebf832bc50c087c8dd820979b59ee30dd461dbc09e704bd953eee
3
+ size 35661
train/events.out.tfevents.1650474819.t1v-n-49630a28-w-1.170547.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:949242d8d2a1dd7478f6d9fe8e52ea6bb620838b30e3ba2e9a2fb9ef670ddd22
3
+ size 8879
train/events.out.tfevents.1650489796.t1v-n-49645e71-w-0.96081.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4570e26fb5a7870a9fbbdaf285cfd7e60e6b9507e313025c39154f183e21543e
3
+ size 21155
train/events.out.tfevents.1650520896.t1v-n-49645e71-w-0.224011.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f7952e5b8855ed58548f2d6dd10e18c6d29faf65b59190fcbaaa53423930952
3
+ size 35915
train/events.out.tfevents.1650539135.t1v-n-49645e71-w-0.360467.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:24baa5ffb0b5a0cf8832744c899a09ef8daf050fe00bf1d6b6fbeea085a1ccba
3
+ size 35915
train/events.out.tfevents.1650559718.t1v-n-49645e71-w-0.458243.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e85e3b544cead7f8fe70b1ff87ba62d4d37ccd286d0086351492ce1fe1f5834
3
+ size 6394
train/events.out.tfevents.1650559992.t1v-n-49645e71-w-0.462178.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:32faae73c050030ce817a922513cef1696da708aab7f288147d643920bdfce1d
3
+ size 111344
train/events.out.tfevents.1650688722.t1v-n-f3804d66-w-0.184783.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4613735e1c556f4f671366d9f065b29f88e502bb7398dd0b04ff57c4c67bb46
3
+ size 6480
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650190956.t1v-n-6662dcba-w-0.3144387.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79b4caa550d0a8505a1b910925bf12f6c3bc0d951b8e1f76d79e40550225a510
3
+ size 30141
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650215678.t1v-n-6662dcba-w-0.3296289.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d14d7719938e671cafdcbdc0bd8a5d94d1fff6f8f50f0ec83158aed586494737
3
+ size 4041
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650226183.t1v-n-6662dcba-w-0.3347961.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecae28bad4d351c74067eb37eb1ba2e10953f4a3f63dafc010e7b940b391bc3d
3
+ size 36666
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650261562.t1v-n-6662dcba-w-0.3552115.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b03a75209136f7d86c9ddbd9df1db5a439e6310c0e973803497b248518e68483
3
+ size 15786
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650288064.t1v-n-9dce3dae-w-0.92778.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8612a3595eb2d6b2c67b05a5c90fc858118c88568e3de5574b8995d2a8f7f341
3
+ size 24921
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650310356.t1v-n-9dce3dae-w-0.189410.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d902afd33b8fbaa289568b60d8a4d43e700bcdc53765f7663a2b82c231fdee3e
3
+ size 51021
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650349932.t1v-n-9dce3dae-w-0.371713.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97e1c9e6729e38a83ac5a72acc77f8b24737dce36c7bed66fface0bc562e2cf0
3
+ size 51021
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650404082.t1v-n-9dce3dae-w-0.648263.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d86a7f028378e4383638379208d92ce31ec607f12a6c702f32c83ec3d4b6c58d
3
+ size 40
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650441615.t1v-n-bbc8ba53-w-1.123916.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f7a413e75e845b12f66f44483a69a15691a3bffac5d063dc0cc7ec50a6df804
3
+ size 4041
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650450190.t1v-n-49630a28-w-1.65684.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff677930e570d36c7ad6cd4be5ec64d2340a8bbfc77206126c3e255bdd000da8
3
+ size 24921
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650474819.t1v-n-49630a28-w-1.170547.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f118145946e09c0d6234b22fab2cc3fafbd4ceb8b512104f6151766dc734f30f
3
+ size 1431
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650489797.t1v-n-49645e71-w-0.96081.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1fe573fb80ab570ddcc78e2bd882081e6407d3e67bd5f22a3ec6f14403ac6f8
3
+ size 11871
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650520896.t1v-n-49645e71-w-0.224011.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d633c4d1757d5efa513e6be7d69b2ea0a97d9f8fa625ad01fee577014b1a93a8
3
+ size 24921
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650539135.t1v-n-49645e71-w-0.360467.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:092c5fc1e7ff5f00cb6b3838c2af8fcb7c1332fa0fd36d32fcbe9543b996b795
3
+ size 24921
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650559718.t1v-n-49645e71-w-0.458243.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93101cf0de67ef873e831defe6f1bf8a1e70127e98e7301c7002622657ba5b0b
3
+ size 40
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650559993.t1v-n-49645e71-w-0.462178.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fcfe2ff1c673a455b8c0346458f04bbff4d76fbd5bf66a93c8a0f1e9e7fd9c4d
3
+ size 92781
training_eval/balanced_bokmaal_nynorsk_span_corruption_stream/events.out.tfevents.1650688722.t1v-n-f3804d66-w-0.184783.1.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd8dfd17907ca4843143284e3ddaf9db33c9ddfb27b1cad4acc4e95a7509ee73
3
+ size 40