Michielo commited on
Commit
421f0cd
1 Parent(s): 8efcba2

Upload 12 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "google/mt5-small",
3
+ "architectures": [
4
+ "MT5ForConditionalGeneration"
5
+ ],
6
+ "classifier_dropout": 0.0,
7
+ "d_ff": 1024,
8
+ "d_kv": 64,
9
+ "d_model": 512,
10
+ "decoder_start_token_id": 0,
11
+ "dense_act_fn": "gelu_new",
12
+ "dropout_rate": 0.1,
13
+ "eos_token_id": 1,
14
+ "feed_forward_proj": "gated-gelu",
15
+ "initializer_factor": 1.0,
16
+ "is_encoder_decoder": true,
17
+ "is_gated_act": true,
18
+ "layer_norm_epsilon": 1e-06,
19
+ "max_length": 1024,
20
+ "model_type": "mt5",
21
+ "num_decoder_layers": 8,
22
+ "num_heads": 6,
23
+ "num_layers": 8,
24
+ "pad_token_id": 0,
25
+ "relative_attention_max_distance": 128,
26
+ "relative_attention_num_buckets": 32,
27
+ "tie_word_embeddings": false,
28
+ "tokenizer_class": "T5Tokenizer",
29
+ "torch_dtype": "float32",
30
+ "transformers_version": "4.37.2",
31
+ "use_cache": true,
32
+ "vocab_size": 250112
33
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "decoder_start_token_id": 0,
3
+ "eos_token_id": 1,
4
+ "max_length": 1024,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.37.2"
7
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c95ad1a49dd4a6a975f7124006822d255d8197df7cbc7862b7824a2a9e00db8b
3
+ size 1200729512
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08f17f9b17033787f46b303f16f8fd40f6777f416f5e5658486ab0de566c4a05
3
+ size 2401574330
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:307bbe15658713574c0b996b26e29048ee2e31bf283d936219e28dd7975d7832
3
+ size 14244
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35c5df2030891c0dfdaeabdb7935c129f217c8ddcc79ea0dd94e98567de8cdfb
3
+ size 1064
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eos_token": {
3
+ "content": "</s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "pad_token": {
10
+ "content": "<pad>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef78f86560d809067d12bac6c09f19a462cb3af3f54d2b8acbba26e1433125d6
3
+ size 4309802
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:722bf80470f2ae6a7cfeaaaf876fb7ef9ad33b2e9a514ded463498f4a8bbd6be
3
+ size 16330661
tokenizer_config.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<pad>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "</s>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "<unk>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ }
27
+ },
28
+ "additional_special_tokens": [],
29
+ "clean_up_tokenization_spaces": true,
30
+ "eos_token": "</s>",
31
+ "extra_ids": 0,
32
+ "legacy": true,
33
+ "model_max_length": 1024,
34
+ "pad_token": "<pad>",
35
+ "sp_model_kwargs": {},
36
+ "tokenizer_class": "T5Tokenizer",
37
+ "unk_token": "<unk>"
38
+ }
trainer_state.json ADDED
@@ -0,0 +1,2461 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 26.5388,
3
+ "best_model_checkpoint": "output/checkpoint-200000",
4
+ "epoch": 14.498006524102935,
5
+ "eval_steps": 50000,
6
+ "global_step": 200000,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.04,
13
+ "learning_rate": 1.9951673311586325e-05,
14
+ "loss": 12.9031,
15
+ "step": 500
16
+ },
17
+ {
18
+ "epoch": 0.07,
19
+ "learning_rate": 1.9903346623172648e-05,
20
+ "loss": 4.995,
21
+ "step": 1000
22
+ },
23
+ {
24
+ "epoch": 0.11,
25
+ "learning_rate": 1.985501993475897e-05,
26
+ "loss": 4.013,
27
+ "step": 1500
28
+ },
29
+ {
30
+ "epoch": 0.14,
31
+ "learning_rate": 1.9806693246345298e-05,
32
+ "loss": 3.7433,
33
+ "step": 2000
34
+ },
35
+ {
36
+ "epoch": 0.18,
37
+ "learning_rate": 1.9758366557931618e-05,
38
+ "loss": 3.5754,
39
+ "step": 2500
40
+ },
41
+ {
42
+ "epoch": 0.22,
43
+ "learning_rate": 1.9710039869517944e-05,
44
+ "loss": 3.4371,
45
+ "step": 3000
46
+ },
47
+ {
48
+ "epoch": 0.25,
49
+ "learning_rate": 1.9661713181104268e-05,
50
+ "loss": 3.3418,
51
+ "step": 3500
52
+ },
53
+ {
54
+ "epoch": 0.29,
55
+ "learning_rate": 1.961338649269059e-05,
56
+ "loss": 3.2642,
57
+ "step": 4000
58
+ },
59
+ {
60
+ "epoch": 0.33,
61
+ "learning_rate": 1.9565059804276914e-05,
62
+ "loss": 3.2133,
63
+ "step": 4500
64
+ },
65
+ {
66
+ "epoch": 0.36,
67
+ "learning_rate": 1.9516733115863237e-05,
68
+ "loss": 3.1533,
69
+ "step": 5000
70
+ },
71
+ {
72
+ "epoch": 0.4,
73
+ "learning_rate": 1.946840642744956e-05,
74
+ "loss": 3.1304,
75
+ "step": 5500
76
+ },
77
+ {
78
+ "epoch": 0.43,
79
+ "learning_rate": 1.9420079739035884e-05,
80
+ "loss": 3.0707,
81
+ "step": 6000
82
+ },
83
+ {
84
+ "epoch": 0.47,
85
+ "learning_rate": 1.9371753050622207e-05,
86
+ "loss": 3.0091,
87
+ "step": 6500
88
+ },
89
+ {
90
+ "epoch": 0.51,
91
+ "learning_rate": 1.932342636220853e-05,
92
+ "loss": 2.9992,
93
+ "step": 7000
94
+ },
95
+ {
96
+ "epoch": 0.54,
97
+ "learning_rate": 1.9275099673794853e-05,
98
+ "loss": 2.9653,
99
+ "step": 7500
100
+ },
101
+ {
102
+ "epoch": 0.58,
103
+ "learning_rate": 1.922677298538118e-05,
104
+ "loss": 2.9463,
105
+ "step": 8000
106
+ },
107
+ {
108
+ "epoch": 0.62,
109
+ "learning_rate": 1.91784462969675e-05,
110
+ "loss": 2.9304,
111
+ "step": 8500
112
+ },
113
+ {
114
+ "epoch": 0.65,
115
+ "learning_rate": 1.9130119608553827e-05,
116
+ "loss": 2.8825,
117
+ "step": 9000
118
+ },
119
+ {
120
+ "epoch": 0.69,
121
+ "learning_rate": 1.908179292014015e-05,
122
+ "loss": 2.863,
123
+ "step": 9500
124
+ },
125
+ {
126
+ "epoch": 0.72,
127
+ "learning_rate": 1.9033466231726473e-05,
128
+ "loss": 2.8367,
129
+ "step": 10000
130
+ },
131
+ {
132
+ "epoch": 0.76,
133
+ "learning_rate": 1.8985139543312796e-05,
134
+ "loss": 2.8211,
135
+ "step": 10500
136
+ },
137
+ {
138
+ "epoch": 0.8,
139
+ "learning_rate": 1.893681285489912e-05,
140
+ "loss": 2.8159,
141
+ "step": 11000
142
+ },
143
+ {
144
+ "epoch": 0.83,
145
+ "learning_rate": 1.8888486166485443e-05,
146
+ "loss": 2.7894,
147
+ "step": 11500
148
+ },
149
+ {
150
+ "epoch": 0.87,
151
+ "learning_rate": 1.8840159478071766e-05,
152
+ "loss": 2.7596,
153
+ "step": 12000
154
+ },
155
+ {
156
+ "epoch": 0.91,
157
+ "learning_rate": 1.8791832789658093e-05,
158
+ "loss": 2.7576,
159
+ "step": 12500
160
+ },
161
+ {
162
+ "epoch": 0.94,
163
+ "learning_rate": 1.8743506101244412e-05,
164
+ "loss": 2.7496,
165
+ "step": 13000
166
+ },
167
+ {
168
+ "epoch": 0.98,
169
+ "learning_rate": 1.869517941283074e-05,
170
+ "loss": 2.728,
171
+ "step": 13500
172
+ },
173
+ {
174
+ "epoch": 1.01,
175
+ "learning_rate": 1.8646852724417062e-05,
176
+ "loss": 2.6927,
177
+ "step": 14000
178
+ },
179
+ {
180
+ "epoch": 1.05,
181
+ "learning_rate": 1.8598526036003382e-05,
182
+ "loss": 2.7037,
183
+ "step": 14500
184
+ },
185
+ {
186
+ "epoch": 1.09,
187
+ "learning_rate": 1.855019934758971e-05,
188
+ "loss": 2.6919,
189
+ "step": 15000
190
+ },
191
+ {
192
+ "epoch": 1.12,
193
+ "learning_rate": 1.8501872659176032e-05,
194
+ "loss": 2.66,
195
+ "step": 15500
196
+ },
197
+ {
198
+ "epoch": 1.16,
199
+ "learning_rate": 1.8453545970762355e-05,
200
+ "loss": 2.6546,
201
+ "step": 16000
202
+ },
203
+ {
204
+ "epoch": 1.2,
205
+ "learning_rate": 1.840521928234868e-05,
206
+ "loss": 2.6415,
207
+ "step": 16500
208
+ },
209
+ {
210
+ "epoch": 1.23,
211
+ "learning_rate": 1.8356892593935e-05,
212
+ "loss": 2.6423,
213
+ "step": 17000
214
+ },
215
+ {
216
+ "epoch": 1.27,
217
+ "learning_rate": 1.8308565905521325e-05,
218
+ "loss": 2.625,
219
+ "step": 17500
220
+ },
221
+ {
222
+ "epoch": 1.3,
223
+ "learning_rate": 1.8260239217107648e-05,
224
+ "loss": 2.6041,
225
+ "step": 18000
226
+ },
227
+ {
228
+ "epoch": 1.34,
229
+ "learning_rate": 1.8211912528693975e-05,
230
+ "loss": 2.5971,
231
+ "step": 18500
232
+ },
233
+ {
234
+ "epoch": 1.38,
235
+ "learning_rate": 1.8163585840280295e-05,
236
+ "loss": 2.5993,
237
+ "step": 19000
238
+ },
239
+ {
240
+ "epoch": 1.41,
241
+ "learning_rate": 1.811525915186662e-05,
242
+ "loss": 2.5707,
243
+ "step": 19500
244
+ },
245
+ {
246
+ "epoch": 1.45,
247
+ "learning_rate": 1.8066932463452944e-05,
248
+ "loss": 2.5933,
249
+ "step": 20000
250
+ },
251
+ {
252
+ "epoch": 1.49,
253
+ "learning_rate": 1.8018605775039268e-05,
254
+ "loss": 2.5669,
255
+ "step": 20500
256
+ },
257
+ {
258
+ "epoch": 1.52,
259
+ "learning_rate": 1.797027908662559e-05,
260
+ "loss": 2.5681,
261
+ "step": 21000
262
+ },
263
+ {
264
+ "epoch": 1.56,
265
+ "learning_rate": 1.7921952398211914e-05,
266
+ "loss": 2.5648,
267
+ "step": 21500
268
+ },
269
+ {
270
+ "epoch": 1.59,
271
+ "learning_rate": 1.7873625709798237e-05,
272
+ "loss": 2.5403,
273
+ "step": 22000
274
+ },
275
+ {
276
+ "epoch": 1.63,
277
+ "learning_rate": 1.782529902138456e-05,
278
+ "loss": 2.5314,
279
+ "step": 22500
280
+ },
281
+ {
282
+ "epoch": 1.67,
283
+ "learning_rate": 1.7776972332970884e-05,
284
+ "loss": 2.5094,
285
+ "step": 23000
286
+ },
287
+ {
288
+ "epoch": 1.7,
289
+ "learning_rate": 1.7728645644557207e-05,
290
+ "loss": 2.537,
291
+ "step": 23500
292
+ },
293
+ {
294
+ "epoch": 1.74,
295
+ "learning_rate": 1.768031895614353e-05,
296
+ "loss": 2.5389,
297
+ "step": 24000
298
+ },
299
+ {
300
+ "epoch": 1.78,
301
+ "learning_rate": 1.7631992267729857e-05,
302
+ "loss": 2.5243,
303
+ "step": 24500
304
+ },
305
+ {
306
+ "epoch": 1.81,
307
+ "learning_rate": 1.7583665579316177e-05,
308
+ "loss": 2.5064,
309
+ "step": 25000
310
+ },
311
+ {
312
+ "epoch": 1.85,
313
+ "learning_rate": 1.7535338890902503e-05,
314
+ "loss": 2.5106,
315
+ "step": 25500
316
+ },
317
+ {
318
+ "epoch": 1.88,
319
+ "learning_rate": 1.7487012202488827e-05,
320
+ "loss": 2.4955,
321
+ "step": 26000
322
+ },
323
+ {
324
+ "epoch": 1.92,
325
+ "learning_rate": 1.743868551407515e-05,
326
+ "loss": 2.488,
327
+ "step": 26500
328
+ },
329
+ {
330
+ "epoch": 1.96,
331
+ "learning_rate": 1.7390358825661473e-05,
332
+ "loss": 2.4869,
333
+ "step": 27000
334
+ },
335
+ {
336
+ "epoch": 1.99,
337
+ "learning_rate": 1.7342032137247796e-05,
338
+ "loss": 2.4838,
339
+ "step": 27500
340
+ },
341
+ {
342
+ "epoch": 2.03,
343
+ "learning_rate": 1.729370544883412e-05,
344
+ "loss": 2.4756,
345
+ "step": 28000
346
+ },
347
+ {
348
+ "epoch": 2.07,
349
+ "learning_rate": 1.7245378760420443e-05,
350
+ "loss": 2.4512,
351
+ "step": 28500
352
+ },
353
+ {
354
+ "epoch": 2.1,
355
+ "learning_rate": 1.719705207200677e-05,
356
+ "loss": 2.4456,
357
+ "step": 29000
358
+ },
359
+ {
360
+ "epoch": 2.14,
361
+ "learning_rate": 1.714872538359309e-05,
362
+ "loss": 2.4485,
363
+ "step": 29500
364
+ },
365
+ {
366
+ "epoch": 2.17,
367
+ "learning_rate": 1.7100398695179416e-05,
368
+ "loss": 2.4304,
369
+ "step": 30000
370
+ },
371
+ {
372
+ "epoch": 2.21,
373
+ "learning_rate": 1.705207200676574e-05,
374
+ "loss": 2.4417,
375
+ "step": 30500
376
+ },
377
+ {
378
+ "epoch": 2.25,
379
+ "learning_rate": 1.7003745318352062e-05,
380
+ "loss": 2.43,
381
+ "step": 31000
382
+ },
383
+ {
384
+ "epoch": 2.28,
385
+ "learning_rate": 1.6955418629938385e-05,
386
+ "loss": 2.443,
387
+ "step": 31500
388
+ },
389
+ {
390
+ "epoch": 2.32,
391
+ "learning_rate": 1.690709194152471e-05,
392
+ "loss": 2.4195,
393
+ "step": 32000
394
+ },
395
+ {
396
+ "epoch": 2.36,
397
+ "learning_rate": 1.6858765253111032e-05,
398
+ "loss": 2.4103,
399
+ "step": 32500
400
+ },
401
+ {
402
+ "epoch": 2.39,
403
+ "learning_rate": 1.6810438564697355e-05,
404
+ "loss": 2.4133,
405
+ "step": 33000
406
+ },
407
+ {
408
+ "epoch": 2.43,
409
+ "learning_rate": 1.676211187628368e-05,
410
+ "loss": 2.4059,
411
+ "step": 33500
412
+ },
413
+ {
414
+ "epoch": 2.46,
415
+ "learning_rate": 1.671378518787e-05,
416
+ "loss": 2.4059,
417
+ "step": 34000
418
+ },
419
+ {
420
+ "epoch": 2.5,
421
+ "learning_rate": 1.6665458499456325e-05,
422
+ "loss": 2.4109,
423
+ "step": 34500
424
+ },
425
+ {
426
+ "epoch": 2.54,
427
+ "learning_rate": 1.661713181104265e-05,
428
+ "loss": 2.4025,
429
+ "step": 35000
430
+ },
431
+ {
432
+ "epoch": 2.57,
433
+ "learning_rate": 1.656880512262897e-05,
434
+ "loss": 2.397,
435
+ "step": 35500
436
+ },
437
+ {
438
+ "epoch": 2.61,
439
+ "learning_rate": 1.6520478434215298e-05,
440
+ "loss": 2.3958,
441
+ "step": 36000
442
+ },
443
+ {
444
+ "epoch": 2.65,
445
+ "learning_rate": 1.647215174580162e-05,
446
+ "loss": 2.3681,
447
+ "step": 36500
448
+ },
449
+ {
450
+ "epoch": 2.68,
451
+ "learning_rate": 1.6423825057387944e-05,
452
+ "loss": 2.4033,
453
+ "step": 37000
454
+ },
455
+ {
456
+ "epoch": 2.72,
457
+ "learning_rate": 1.6375498368974268e-05,
458
+ "loss": 2.3801,
459
+ "step": 37500
460
+ },
461
+ {
462
+ "epoch": 2.75,
463
+ "learning_rate": 1.632717168056059e-05,
464
+ "loss": 2.3898,
465
+ "step": 38000
466
+ },
467
+ {
468
+ "epoch": 2.79,
469
+ "learning_rate": 1.6278844992146914e-05,
470
+ "loss": 2.3613,
471
+ "step": 38500
472
+ },
473
+ {
474
+ "epoch": 2.83,
475
+ "learning_rate": 1.6230518303733237e-05,
476
+ "loss": 2.3879,
477
+ "step": 39000
478
+ },
479
+ {
480
+ "epoch": 2.86,
481
+ "learning_rate": 1.6182191615319564e-05,
482
+ "loss": 2.362,
483
+ "step": 39500
484
+ },
485
+ {
486
+ "epoch": 2.9,
487
+ "learning_rate": 1.6133864926905884e-05,
488
+ "loss": 2.3625,
489
+ "step": 40000
490
+ },
491
+ {
492
+ "epoch": 2.94,
493
+ "learning_rate": 1.608553823849221e-05,
494
+ "loss": 2.3635,
495
+ "step": 40500
496
+ },
497
+ {
498
+ "epoch": 2.97,
499
+ "learning_rate": 1.6037211550078534e-05,
500
+ "loss": 2.3615,
501
+ "step": 41000
502
+ },
503
+ {
504
+ "epoch": 3.01,
505
+ "learning_rate": 1.5988884861664853e-05,
506
+ "loss": 2.3327,
507
+ "step": 41500
508
+ },
509
+ {
510
+ "epoch": 3.04,
511
+ "learning_rate": 1.594055817325118e-05,
512
+ "loss": 2.3424,
513
+ "step": 42000
514
+ },
515
+ {
516
+ "epoch": 3.08,
517
+ "learning_rate": 1.5892231484837503e-05,
518
+ "loss": 2.3462,
519
+ "step": 42500
520
+ },
521
+ {
522
+ "epoch": 3.12,
523
+ "learning_rate": 1.5843904796423827e-05,
524
+ "loss": 2.3395,
525
+ "step": 43000
526
+ },
527
+ {
528
+ "epoch": 3.15,
529
+ "learning_rate": 1.579557810801015e-05,
530
+ "loss": 2.3287,
531
+ "step": 43500
532
+ },
533
+ {
534
+ "epoch": 3.19,
535
+ "learning_rate": 1.5747251419596473e-05,
536
+ "loss": 2.3408,
537
+ "step": 44000
538
+ },
539
+ {
540
+ "epoch": 3.23,
541
+ "learning_rate": 1.5698924731182796e-05,
542
+ "loss": 2.3126,
543
+ "step": 44500
544
+ },
545
+ {
546
+ "epoch": 3.26,
547
+ "learning_rate": 1.565059804276912e-05,
548
+ "loss": 2.3234,
549
+ "step": 45000
550
+ },
551
+ {
552
+ "epoch": 3.3,
553
+ "learning_rate": 1.5602271354355446e-05,
554
+ "loss": 2.324,
555
+ "step": 45500
556
+ },
557
+ {
558
+ "epoch": 3.33,
559
+ "learning_rate": 1.5553944665941766e-05,
560
+ "loss": 2.3156,
561
+ "step": 46000
562
+ },
563
+ {
564
+ "epoch": 3.37,
565
+ "learning_rate": 1.5505617977528093e-05,
566
+ "loss": 2.3264,
567
+ "step": 46500
568
+ },
569
+ {
570
+ "epoch": 3.41,
571
+ "learning_rate": 1.5457291289114416e-05,
572
+ "loss": 2.3086,
573
+ "step": 47000
574
+ },
575
+ {
576
+ "epoch": 3.44,
577
+ "learning_rate": 1.540896460070074e-05,
578
+ "loss": 2.3095,
579
+ "step": 47500
580
+ },
581
+ {
582
+ "epoch": 3.48,
583
+ "learning_rate": 1.5360637912287062e-05,
584
+ "loss": 2.3015,
585
+ "step": 48000
586
+ },
587
+ {
588
+ "epoch": 3.52,
589
+ "learning_rate": 1.5312311223873385e-05,
590
+ "loss": 2.302,
591
+ "step": 48500
592
+ },
593
+ {
594
+ "epoch": 3.55,
595
+ "learning_rate": 1.526398453545971e-05,
596
+ "loss": 2.3164,
597
+ "step": 49000
598
+ },
599
+ {
600
+ "epoch": 3.59,
601
+ "learning_rate": 1.5215657847046032e-05,
602
+ "loss": 2.3067,
603
+ "step": 49500
604
+ },
605
+ {
606
+ "epoch": 3.62,
607
+ "learning_rate": 1.5167331158632357e-05,
608
+ "loss": 2.2705,
609
+ "step": 50000
610
+ },
611
+ {
612
+ "epoch": 3.62,
613
+ "eval_bleu": 23.2646,
614
+ "eval_gen_len": 26.1331,
615
+ "eval_loss": 1.9072902202606201,
616
+ "eval_runtime": 5550.0392,
617
+ "eval_samples_per_second": 9.942,
618
+ "eval_steps_per_second": 1.243,
619
+ "step": 50000
620
+ },
621
+ {
622
+ "epoch": 3.66,
623
+ "learning_rate": 1.5119004470218678e-05,
624
+ "loss": 2.2769,
625
+ "step": 50500
626
+ },
627
+ {
628
+ "epoch": 3.7,
629
+ "learning_rate": 1.5070677781805003e-05,
630
+ "loss": 2.3089,
631
+ "step": 51000
632
+ },
633
+ {
634
+ "epoch": 3.73,
635
+ "learning_rate": 1.5022351093391328e-05,
636
+ "loss": 2.2817,
637
+ "step": 51500
638
+ },
639
+ {
640
+ "epoch": 3.77,
641
+ "learning_rate": 1.497402440497765e-05,
642
+ "loss": 2.2757,
643
+ "step": 52000
644
+ },
645
+ {
646
+ "epoch": 3.81,
647
+ "learning_rate": 1.4925697716563973e-05,
648
+ "loss": 2.287,
649
+ "step": 52500
650
+ },
651
+ {
652
+ "epoch": 3.84,
653
+ "learning_rate": 1.4877371028150298e-05,
654
+ "loss": 2.289,
655
+ "step": 53000
656
+ },
657
+ {
658
+ "epoch": 3.88,
659
+ "learning_rate": 1.482904433973662e-05,
660
+ "loss": 2.268,
661
+ "step": 53500
662
+ },
663
+ {
664
+ "epoch": 3.91,
665
+ "learning_rate": 1.4780717651322944e-05,
666
+ "loss": 2.2766,
667
+ "step": 54000
668
+ },
669
+ {
670
+ "epoch": 3.95,
671
+ "learning_rate": 1.473239096290927e-05,
672
+ "loss": 2.2775,
673
+ "step": 54500
674
+ },
675
+ {
676
+ "epoch": 3.99,
677
+ "learning_rate": 1.4684064274495591e-05,
678
+ "loss": 2.2795,
679
+ "step": 55000
680
+ },
681
+ {
682
+ "epoch": 4.02,
683
+ "learning_rate": 1.4635737586081916e-05,
684
+ "loss": 2.2683,
685
+ "step": 55500
686
+ },
687
+ {
688
+ "epoch": 4.06,
689
+ "learning_rate": 1.4587410897668239e-05,
690
+ "loss": 2.2551,
691
+ "step": 56000
692
+ },
693
+ {
694
+ "epoch": 4.1,
695
+ "learning_rate": 1.4539084209254562e-05,
696
+ "loss": 2.2511,
697
+ "step": 56500
698
+ },
699
+ {
700
+ "epoch": 4.13,
701
+ "learning_rate": 1.4490757520840885e-05,
702
+ "loss": 2.258,
703
+ "step": 57000
704
+ },
705
+ {
706
+ "epoch": 4.17,
707
+ "learning_rate": 1.444243083242721e-05,
708
+ "loss": 2.2593,
709
+ "step": 57500
710
+ },
711
+ {
712
+ "epoch": 4.2,
713
+ "learning_rate": 1.4394104144013532e-05,
714
+ "loss": 2.2481,
715
+ "step": 58000
716
+ },
717
+ {
718
+ "epoch": 4.24,
719
+ "learning_rate": 1.4345777455599857e-05,
720
+ "loss": 2.2485,
721
+ "step": 58500
722
+ },
723
+ {
724
+ "epoch": 4.28,
725
+ "learning_rate": 1.429745076718618e-05,
726
+ "loss": 2.2594,
727
+ "step": 59000
728
+ },
729
+ {
730
+ "epoch": 4.31,
731
+ "learning_rate": 1.4249124078772503e-05,
732
+ "loss": 2.2593,
733
+ "step": 59500
734
+ },
735
+ {
736
+ "epoch": 4.35,
737
+ "learning_rate": 1.4200797390358827e-05,
738
+ "loss": 2.242,
739
+ "step": 60000
740
+ },
741
+ {
742
+ "epoch": 4.39,
743
+ "learning_rate": 1.4152470701945151e-05,
744
+ "loss": 2.2294,
745
+ "step": 60500
746
+ },
747
+ {
748
+ "epoch": 4.42,
749
+ "learning_rate": 1.4104144013531473e-05,
750
+ "loss": 2.2517,
751
+ "step": 61000
752
+ },
753
+ {
754
+ "epoch": 4.46,
755
+ "learning_rate": 1.4055817325117798e-05,
756
+ "loss": 2.2243,
757
+ "step": 61500
758
+ },
759
+ {
760
+ "epoch": 4.49,
761
+ "learning_rate": 1.4007490636704121e-05,
762
+ "loss": 2.2484,
763
+ "step": 62000
764
+ },
765
+ {
766
+ "epoch": 4.53,
767
+ "learning_rate": 1.3959163948290444e-05,
768
+ "loss": 2.228,
769
+ "step": 62500
770
+ },
771
+ {
772
+ "epoch": 4.57,
773
+ "learning_rate": 1.3910837259876768e-05,
774
+ "loss": 2.2314,
775
+ "step": 63000
776
+ },
777
+ {
778
+ "epoch": 4.6,
779
+ "learning_rate": 1.3862510571463093e-05,
780
+ "loss": 2.2317,
781
+ "step": 63500
782
+ },
783
+ {
784
+ "epoch": 4.64,
785
+ "learning_rate": 1.3814183883049414e-05,
786
+ "loss": 2.2187,
787
+ "step": 64000
788
+ },
789
+ {
790
+ "epoch": 4.68,
791
+ "learning_rate": 1.3765857194635739e-05,
792
+ "loss": 2.2325,
793
+ "step": 64500
794
+ },
795
+ {
796
+ "epoch": 4.71,
797
+ "learning_rate": 1.3717530506222064e-05,
798
+ "loss": 2.2221,
799
+ "step": 65000
800
+ },
801
+ {
802
+ "epoch": 4.75,
803
+ "learning_rate": 1.3669203817808385e-05,
804
+ "loss": 2.2218,
805
+ "step": 65500
806
+ },
807
+ {
808
+ "epoch": 4.78,
809
+ "learning_rate": 1.3620877129394709e-05,
810
+ "loss": 2.2156,
811
+ "step": 66000
812
+ },
813
+ {
814
+ "epoch": 4.82,
815
+ "learning_rate": 1.3572550440981034e-05,
816
+ "loss": 2.2232,
817
+ "step": 66500
818
+ },
819
+ {
820
+ "epoch": 4.86,
821
+ "learning_rate": 1.3524223752567355e-05,
822
+ "loss": 2.2084,
823
+ "step": 67000
824
+ },
825
+ {
826
+ "epoch": 4.89,
827
+ "learning_rate": 1.347589706415368e-05,
828
+ "loss": 2.2374,
829
+ "step": 67500
830
+ },
831
+ {
832
+ "epoch": 4.93,
833
+ "learning_rate": 1.3427570375740005e-05,
834
+ "loss": 2.2046,
835
+ "step": 68000
836
+ },
837
+ {
838
+ "epoch": 4.97,
839
+ "learning_rate": 1.3379243687326327e-05,
840
+ "loss": 2.2109,
841
+ "step": 68500
842
+ },
843
+ {
844
+ "epoch": 5.0,
845
+ "learning_rate": 1.3330916998912651e-05,
846
+ "loss": 2.2115,
847
+ "step": 69000
848
+ },
849
+ {
850
+ "epoch": 5.04,
851
+ "learning_rate": 1.3282590310498975e-05,
852
+ "loss": 2.2053,
853
+ "step": 69500
854
+ },
855
+ {
856
+ "epoch": 5.07,
857
+ "learning_rate": 1.3234263622085298e-05,
858
+ "loss": 2.1956,
859
+ "step": 70000
860
+ },
861
+ {
862
+ "epoch": 5.11,
863
+ "learning_rate": 1.3185936933671621e-05,
864
+ "loss": 2.2031,
865
+ "step": 70500
866
+ },
867
+ {
868
+ "epoch": 5.15,
869
+ "learning_rate": 1.3137610245257946e-05,
870
+ "loss": 2.1959,
871
+ "step": 71000
872
+ },
873
+ {
874
+ "epoch": 5.18,
875
+ "learning_rate": 1.3089283556844268e-05,
876
+ "loss": 2.2023,
877
+ "step": 71500
878
+ },
879
+ {
880
+ "epoch": 5.22,
881
+ "learning_rate": 1.3040956868430593e-05,
882
+ "loss": 2.197,
883
+ "step": 72000
884
+ },
885
+ {
886
+ "epoch": 5.26,
887
+ "learning_rate": 1.2992630180016916e-05,
888
+ "loss": 2.212,
889
+ "step": 72500
890
+ },
891
+ {
892
+ "epoch": 5.29,
893
+ "learning_rate": 1.2944303491603239e-05,
894
+ "loss": 2.204,
895
+ "step": 73000
896
+ },
897
+ {
898
+ "epoch": 5.33,
899
+ "learning_rate": 1.2895976803189562e-05,
900
+ "loss": 2.1871,
901
+ "step": 73500
902
+ },
903
+ {
904
+ "epoch": 5.36,
905
+ "learning_rate": 1.2847650114775887e-05,
906
+ "loss": 2.1852,
907
+ "step": 74000
908
+ },
909
+ {
910
+ "epoch": 5.4,
911
+ "learning_rate": 1.2799323426362209e-05,
912
+ "loss": 2.1804,
913
+ "step": 74500
914
+ },
915
+ {
916
+ "epoch": 5.44,
917
+ "learning_rate": 1.2750996737948534e-05,
918
+ "loss": 2.1949,
919
+ "step": 75000
920
+ },
921
+ {
922
+ "epoch": 5.47,
923
+ "learning_rate": 1.2702670049534857e-05,
924
+ "loss": 2.1662,
925
+ "step": 75500
926
+ },
927
+ {
928
+ "epoch": 5.51,
929
+ "learning_rate": 1.265434336112118e-05,
930
+ "loss": 2.1871,
931
+ "step": 76000
932
+ },
933
+ {
934
+ "epoch": 5.55,
935
+ "learning_rate": 1.2606016672707503e-05,
936
+ "loss": 2.1841,
937
+ "step": 76500
938
+ },
939
+ {
940
+ "epoch": 5.58,
941
+ "learning_rate": 1.2557689984293828e-05,
942
+ "loss": 2.1719,
943
+ "step": 77000
944
+ },
945
+ {
946
+ "epoch": 5.62,
947
+ "learning_rate": 1.250936329588015e-05,
948
+ "loss": 2.1755,
949
+ "step": 77500
950
+ },
951
+ {
952
+ "epoch": 5.65,
953
+ "learning_rate": 1.2461036607466475e-05,
954
+ "loss": 2.1589,
955
+ "step": 78000
956
+ },
957
+ {
958
+ "epoch": 5.69,
959
+ "learning_rate": 1.24127099190528e-05,
960
+ "loss": 2.1785,
961
+ "step": 78500
962
+ },
963
+ {
964
+ "epoch": 5.73,
965
+ "learning_rate": 1.2364383230639121e-05,
966
+ "loss": 2.1931,
967
+ "step": 79000
968
+ },
969
+ {
970
+ "epoch": 5.76,
971
+ "learning_rate": 1.2316056542225444e-05,
972
+ "loss": 2.1827,
973
+ "step": 79500
974
+ },
975
+ {
976
+ "epoch": 5.8,
977
+ "learning_rate": 1.226772985381177e-05,
978
+ "loss": 2.1628,
979
+ "step": 80000
980
+ },
981
+ {
982
+ "epoch": 5.84,
983
+ "learning_rate": 1.2219403165398091e-05,
984
+ "loss": 2.1702,
985
+ "step": 80500
986
+ },
987
+ {
988
+ "epoch": 5.87,
989
+ "learning_rate": 1.2171076476984416e-05,
990
+ "loss": 2.161,
991
+ "step": 81000
992
+ },
993
+ {
994
+ "epoch": 5.91,
995
+ "learning_rate": 1.212274978857074e-05,
996
+ "loss": 2.172,
997
+ "step": 81500
998
+ },
999
+ {
1000
+ "epoch": 5.94,
1001
+ "learning_rate": 1.2074423100157062e-05,
1002
+ "loss": 2.1731,
1003
+ "step": 82000
1004
+ },
1005
+ {
1006
+ "epoch": 5.98,
1007
+ "learning_rate": 1.2026096411743387e-05,
1008
+ "loss": 2.1582,
1009
+ "step": 82500
1010
+ },
1011
+ {
1012
+ "epoch": 6.02,
1013
+ "learning_rate": 1.197776972332971e-05,
1014
+ "loss": 2.1739,
1015
+ "step": 83000
1016
+ },
1017
+ {
1018
+ "epoch": 6.05,
1019
+ "learning_rate": 1.1929443034916034e-05,
1020
+ "loss": 2.1447,
1021
+ "step": 83500
1022
+ },
1023
+ {
1024
+ "epoch": 6.09,
1025
+ "learning_rate": 1.1881116346502357e-05,
1026
+ "loss": 2.159,
1027
+ "step": 84000
1028
+ },
1029
+ {
1030
+ "epoch": 6.13,
1031
+ "learning_rate": 1.1832789658088682e-05,
1032
+ "loss": 2.17,
1033
+ "step": 84500
1034
+ },
1035
+ {
1036
+ "epoch": 6.16,
1037
+ "learning_rate": 1.1784462969675003e-05,
1038
+ "loss": 2.1441,
1039
+ "step": 85000
1040
+ },
1041
+ {
1042
+ "epoch": 6.2,
1043
+ "learning_rate": 1.1736136281261328e-05,
1044
+ "loss": 2.1447,
1045
+ "step": 85500
1046
+ },
1047
+ {
1048
+ "epoch": 6.23,
1049
+ "learning_rate": 1.1687809592847651e-05,
1050
+ "loss": 2.1529,
1051
+ "step": 86000
1052
+ },
1053
+ {
1054
+ "epoch": 6.27,
1055
+ "learning_rate": 1.1639482904433975e-05,
1056
+ "loss": 2.1558,
1057
+ "step": 86500
1058
+ },
1059
+ {
1060
+ "epoch": 6.31,
1061
+ "learning_rate": 1.1591156216020298e-05,
1062
+ "loss": 2.1356,
1063
+ "step": 87000
1064
+ },
1065
+ {
1066
+ "epoch": 6.34,
1067
+ "learning_rate": 1.1542829527606623e-05,
1068
+ "loss": 2.1593,
1069
+ "step": 87500
1070
+ },
1071
+ {
1072
+ "epoch": 6.38,
1073
+ "learning_rate": 1.1494502839192944e-05,
1074
+ "loss": 2.1467,
1075
+ "step": 88000
1076
+ },
1077
+ {
1078
+ "epoch": 6.42,
1079
+ "learning_rate": 1.144617615077927e-05,
1080
+ "loss": 2.1533,
1081
+ "step": 88500
1082
+ },
1083
+ {
1084
+ "epoch": 6.45,
1085
+ "learning_rate": 1.1397849462365593e-05,
1086
+ "loss": 2.1261,
1087
+ "step": 89000
1088
+ },
1089
+ {
1090
+ "epoch": 6.49,
1091
+ "learning_rate": 1.1349522773951916e-05,
1092
+ "loss": 2.1519,
1093
+ "step": 89500
1094
+ },
1095
+ {
1096
+ "epoch": 6.52,
1097
+ "learning_rate": 1.1301196085538239e-05,
1098
+ "loss": 2.1513,
1099
+ "step": 90000
1100
+ },
1101
+ {
1102
+ "epoch": 6.56,
1103
+ "learning_rate": 1.1252869397124564e-05,
1104
+ "loss": 2.1404,
1105
+ "step": 90500
1106
+ },
1107
+ {
1108
+ "epoch": 6.6,
1109
+ "learning_rate": 1.1204542708710885e-05,
1110
+ "loss": 2.1427,
1111
+ "step": 91000
1112
+ },
1113
+ {
1114
+ "epoch": 6.63,
1115
+ "learning_rate": 1.115621602029721e-05,
1116
+ "loss": 2.138,
1117
+ "step": 91500
1118
+ },
1119
+ {
1120
+ "epoch": 6.67,
1121
+ "learning_rate": 1.1107889331883535e-05,
1122
+ "loss": 2.1382,
1123
+ "step": 92000
1124
+ },
1125
+ {
1126
+ "epoch": 6.71,
1127
+ "learning_rate": 1.1059562643469857e-05,
1128
+ "loss": 2.1344,
1129
+ "step": 92500
1130
+ },
1131
+ {
1132
+ "epoch": 6.74,
1133
+ "learning_rate": 1.101123595505618e-05,
1134
+ "loss": 2.1474,
1135
+ "step": 93000
1136
+ },
1137
+ {
1138
+ "epoch": 6.78,
1139
+ "learning_rate": 1.0962909266642505e-05,
1140
+ "loss": 2.1197,
1141
+ "step": 93500
1142
+ },
1143
+ {
1144
+ "epoch": 6.81,
1145
+ "learning_rate": 1.0914582578228827e-05,
1146
+ "loss": 2.146,
1147
+ "step": 94000
1148
+ },
1149
+ {
1150
+ "epoch": 6.85,
1151
+ "learning_rate": 1.0866255889815151e-05,
1152
+ "loss": 2.1313,
1153
+ "step": 94500
1154
+ },
1155
+ {
1156
+ "epoch": 6.89,
1157
+ "learning_rate": 1.0817929201401476e-05,
1158
+ "loss": 2.1374,
1159
+ "step": 95000
1160
+ },
1161
+ {
1162
+ "epoch": 6.92,
1163
+ "learning_rate": 1.0769602512987798e-05,
1164
+ "loss": 2.1329,
1165
+ "step": 95500
1166
+ },
1167
+ {
1168
+ "epoch": 6.96,
1169
+ "learning_rate": 1.0721275824574123e-05,
1170
+ "loss": 2.132,
1171
+ "step": 96000
1172
+ },
1173
+ {
1174
+ "epoch": 7.0,
1175
+ "learning_rate": 1.0672949136160446e-05,
1176
+ "loss": 2.1142,
1177
+ "step": 96500
1178
+ },
1179
+ {
1180
+ "epoch": 7.03,
1181
+ "learning_rate": 1.062462244774677e-05,
1182
+ "loss": 2.1338,
1183
+ "step": 97000
1184
+ },
1185
+ {
1186
+ "epoch": 7.07,
1187
+ "learning_rate": 1.0576295759333093e-05,
1188
+ "loss": 2.1137,
1189
+ "step": 97500
1190
+ },
1191
+ {
1192
+ "epoch": 7.1,
1193
+ "learning_rate": 1.0527969070919417e-05,
1194
+ "loss": 2.1221,
1195
+ "step": 98000
1196
+ },
1197
+ {
1198
+ "epoch": 7.14,
1199
+ "learning_rate": 1.0479642382505739e-05,
1200
+ "loss": 2.1382,
1201
+ "step": 98500
1202
+ },
1203
+ {
1204
+ "epoch": 7.18,
1205
+ "learning_rate": 1.0431315694092064e-05,
1206
+ "loss": 2.1207,
1207
+ "step": 99000
1208
+ },
1209
+ {
1210
+ "epoch": 7.21,
1211
+ "learning_rate": 1.0382989005678387e-05,
1212
+ "loss": 2.1163,
1213
+ "step": 99500
1214
+ },
1215
+ {
1216
+ "epoch": 7.25,
1217
+ "learning_rate": 1.033466231726471e-05,
1218
+ "loss": 2.1263,
1219
+ "step": 100000
1220
+ },
1221
+ {
1222
+ "epoch": 7.25,
1223
+ "eval_bleu": 25.2752,
1224
+ "eval_gen_len": 24.8098,
1225
+ "eval_loss": 1.8044791221618652,
1226
+ "eval_runtime": 4767.4438,
1227
+ "eval_samples_per_second": 11.574,
1228
+ "eval_steps_per_second": 1.447,
1229
+ "step": 100000
1230
+ },
1231
+ {
1232
+ "epoch": 7.29,
1233
+ "learning_rate": 1.0286335628851034e-05,
1234
+ "loss": 2.0977,
1235
+ "step": 100500
1236
+ },
1237
+ {
1238
+ "epoch": 7.32,
1239
+ "learning_rate": 1.0238008940437359e-05,
1240
+ "loss": 2.1114,
1241
+ "step": 101000
1242
+ },
1243
+ {
1244
+ "epoch": 7.36,
1245
+ "learning_rate": 1.018968225202368e-05,
1246
+ "loss": 2.1208,
1247
+ "step": 101500
1248
+ },
1249
+ {
1250
+ "epoch": 7.39,
1251
+ "learning_rate": 1.0141355563610005e-05,
1252
+ "loss": 2.1006,
1253
+ "step": 102000
1254
+ },
1255
+ {
1256
+ "epoch": 7.43,
1257
+ "learning_rate": 1.0093028875196328e-05,
1258
+ "loss": 2.1091,
1259
+ "step": 102500
1260
+ },
1261
+ {
1262
+ "epoch": 7.47,
1263
+ "learning_rate": 1.0044702186782651e-05,
1264
+ "loss": 2.1347,
1265
+ "step": 103000
1266
+ },
1267
+ {
1268
+ "epoch": 7.5,
1269
+ "learning_rate": 9.996375498368975e-06,
1270
+ "loss": 2.1106,
1271
+ "step": 103500
1272
+ },
1273
+ {
1274
+ "epoch": 7.54,
1275
+ "learning_rate": 9.948048809955298e-06,
1276
+ "loss": 2.1071,
1277
+ "step": 104000
1278
+ },
1279
+ {
1280
+ "epoch": 7.58,
1281
+ "learning_rate": 9.899722121541621e-06,
1282
+ "loss": 2.0954,
1283
+ "step": 104500
1284
+ },
1285
+ {
1286
+ "epoch": 7.61,
1287
+ "learning_rate": 9.851395433127946e-06,
1288
+ "loss": 2.1072,
1289
+ "step": 105000
1290
+ },
1291
+ {
1292
+ "epoch": 7.65,
1293
+ "learning_rate": 9.80306874471427e-06,
1294
+ "loss": 2.1051,
1295
+ "step": 105500
1296
+ },
1297
+ {
1298
+ "epoch": 7.68,
1299
+ "learning_rate": 9.754742056300593e-06,
1300
+ "loss": 2.1066,
1301
+ "step": 106000
1302
+ },
1303
+ {
1304
+ "epoch": 7.72,
1305
+ "learning_rate": 9.706415367886916e-06,
1306
+ "loss": 2.1122,
1307
+ "step": 106500
1308
+ },
1309
+ {
1310
+ "epoch": 7.76,
1311
+ "learning_rate": 9.658088679473239e-06,
1312
+ "loss": 2.0952,
1313
+ "step": 107000
1314
+ },
1315
+ {
1316
+ "epoch": 7.79,
1317
+ "learning_rate": 9.609761991059562e-06,
1318
+ "loss": 2.085,
1319
+ "step": 107500
1320
+ },
1321
+ {
1322
+ "epoch": 7.83,
1323
+ "learning_rate": 9.561435302645887e-06,
1324
+ "loss": 2.1076,
1325
+ "step": 108000
1326
+ },
1327
+ {
1328
+ "epoch": 7.87,
1329
+ "learning_rate": 9.51310861423221e-06,
1330
+ "loss": 2.1236,
1331
+ "step": 108500
1332
+ },
1333
+ {
1334
+ "epoch": 7.9,
1335
+ "learning_rate": 9.464781925818534e-06,
1336
+ "loss": 2.0932,
1337
+ "step": 109000
1338
+ },
1339
+ {
1340
+ "epoch": 7.94,
1341
+ "learning_rate": 9.416455237404859e-06,
1342
+ "loss": 2.0892,
1343
+ "step": 109500
1344
+ },
1345
+ {
1346
+ "epoch": 7.97,
1347
+ "learning_rate": 9.368128548991182e-06,
1348
+ "loss": 2.0778,
1349
+ "step": 110000
1350
+ },
1351
+ {
1352
+ "epoch": 8.01,
1353
+ "learning_rate": 9.319801860577505e-06,
1354
+ "loss": 2.1035,
1355
+ "step": 110500
1356
+ },
1357
+ {
1358
+ "epoch": 8.05,
1359
+ "learning_rate": 9.271475172163828e-06,
1360
+ "loss": 2.0876,
1361
+ "step": 111000
1362
+ },
1363
+ {
1364
+ "epoch": 8.08,
1365
+ "learning_rate": 9.223148483750151e-06,
1366
+ "loss": 2.0869,
1367
+ "step": 111500
1368
+ },
1369
+ {
1370
+ "epoch": 8.12,
1371
+ "learning_rate": 9.174821795336475e-06,
1372
+ "loss": 2.0972,
1373
+ "step": 112000
1374
+ },
1375
+ {
1376
+ "epoch": 8.16,
1377
+ "learning_rate": 9.1264951069228e-06,
1378
+ "loss": 2.0948,
1379
+ "step": 112500
1380
+ },
1381
+ {
1382
+ "epoch": 8.19,
1383
+ "learning_rate": 9.078168418509123e-06,
1384
+ "loss": 2.0742,
1385
+ "step": 113000
1386
+ },
1387
+ {
1388
+ "epoch": 8.23,
1389
+ "learning_rate": 9.029841730095446e-06,
1390
+ "loss": 2.085,
1391
+ "step": 113500
1392
+ },
1393
+ {
1394
+ "epoch": 8.26,
1395
+ "learning_rate": 8.98151504168177e-06,
1396
+ "loss": 2.0885,
1397
+ "step": 114000
1398
+ },
1399
+ {
1400
+ "epoch": 8.3,
1401
+ "learning_rate": 8.933188353268093e-06,
1402
+ "loss": 2.0667,
1403
+ "step": 114500
1404
+ },
1405
+ {
1406
+ "epoch": 8.34,
1407
+ "learning_rate": 8.884861664854416e-06,
1408
+ "loss": 2.1004,
1409
+ "step": 115000
1410
+ },
1411
+ {
1412
+ "epoch": 8.37,
1413
+ "learning_rate": 8.83653497644074e-06,
1414
+ "loss": 2.099,
1415
+ "step": 115500
1416
+ },
1417
+ {
1418
+ "epoch": 8.41,
1419
+ "learning_rate": 8.788208288027064e-06,
1420
+ "loss": 2.0988,
1421
+ "step": 116000
1422
+ },
1423
+ {
1424
+ "epoch": 8.45,
1425
+ "learning_rate": 8.739881599613387e-06,
1426
+ "loss": 2.0861,
1427
+ "step": 116500
1428
+ },
1429
+ {
1430
+ "epoch": 8.48,
1431
+ "learning_rate": 8.69155491119971e-06,
1432
+ "loss": 2.0722,
1433
+ "step": 117000
1434
+ },
1435
+ {
1436
+ "epoch": 8.52,
1437
+ "learning_rate": 8.643228222786034e-06,
1438
+ "loss": 2.0951,
1439
+ "step": 117500
1440
+ },
1441
+ {
1442
+ "epoch": 8.55,
1443
+ "learning_rate": 8.594901534372357e-06,
1444
+ "loss": 2.0915,
1445
+ "step": 118000
1446
+ },
1447
+ {
1448
+ "epoch": 8.59,
1449
+ "learning_rate": 8.546574845958682e-06,
1450
+ "loss": 2.099,
1451
+ "step": 118500
1452
+ },
1453
+ {
1454
+ "epoch": 8.63,
1455
+ "learning_rate": 8.498248157545005e-06,
1456
+ "loss": 2.0805,
1457
+ "step": 119000
1458
+ },
1459
+ {
1460
+ "epoch": 8.66,
1461
+ "learning_rate": 8.449921469131328e-06,
1462
+ "loss": 2.0857,
1463
+ "step": 119500
1464
+ },
1465
+ {
1466
+ "epoch": 8.7,
1467
+ "learning_rate": 8.401594780717651e-06,
1468
+ "loss": 2.0849,
1469
+ "step": 120000
1470
+ },
1471
+ {
1472
+ "epoch": 8.74,
1473
+ "learning_rate": 8.353268092303975e-06,
1474
+ "loss": 2.0936,
1475
+ "step": 120500
1476
+ },
1477
+ {
1478
+ "epoch": 8.77,
1479
+ "learning_rate": 8.304941403890298e-06,
1480
+ "loss": 2.0781,
1481
+ "step": 121000
1482
+ },
1483
+ {
1484
+ "epoch": 8.81,
1485
+ "learning_rate": 8.256614715476623e-06,
1486
+ "loss": 2.0824,
1487
+ "step": 121500
1488
+ },
1489
+ {
1490
+ "epoch": 8.84,
1491
+ "learning_rate": 8.208288027062946e-06,
1492
+ "loss": 2.0824,
1493
+ "step": 122000
1494
+ },
1495
+ {
1496
+ "epoch": 8.88,
1497
+ "learning_rate": 8.15996133864927e-06,
1498
+ "loss": 2.0822,
1499
+ "step": 122500
1500
+ },
1501
+ {
1502
+ "epoch": 8.92,
1503
+ "learning_rate": 8.111634650235594e-06,
1504
+ "loss": 2.0873,
1505
+ "step": 123000
1506
+ },
1507
+ {
1508
+ "epoch": 8.95,
1509
+ "learning_rate": 8.063307961821917e-06,
1510
+ "loss": 2.0679,
1511
+ "step": 123500
1512
+ },
1513
+ {
1514
+ "epoch": 8.99,
1515
+ "learning_rate": 8.01498127340824e-06,
1516
+ "loss": 2.077,
1517
+ "step": 124000
1518
+ },
1519
+ {
1520
+ "epoch": 9.03,
1521
+ "learning_rate": 7.966654584994564e-06,
1522
+ "loss": 2.0701,
1523
+ "step": 124500
1524
+ },
1525
+ {
1526
+ "epoch": 9.06,
1527
+ "learning_rate": 7.918327896580887e-06,
1528
+ "loss": 2.0566,
1529
+ "step": 125000
1530
+ },
1531
+ {
1532
+ "epoch": 9.1,
1533
+ "learning_rate": 7.87000120816721e-06,
1534
+ "loss": 2.0904,
1535
+ "step": 125500
1536
+ },
1537
+ {
1538
+ "epoch": 9.13,
1539
+ "learning_rate": 7.821674519753535e-06,
1540
+ "loss": 2.0787,
1541
+ "step": 126000
1542
+ },
1543
+ {
1544
+ "epoch": 9.17,
1545
+ "learning_rate": 7.773347831339859e-06,
1546
+ "loss": 2.0832,
1547
+ "step": 126500
1548
+ },
1549
+ {
1550
+ "epoch": 9.21,
1551
+ "learning_rate": 7.725021142926182e-06,
1552
+ "loss": 2.0615,
1553
+ "step": 127000
1554
+ },
1555
+ {
1556
+ "epoch": 9.24,
1557
+ "learning_rate": 7.676694454512505e-06,
1558
+ "loss": 2.0707,
1559
+ "step": 127500
1560
+ },
1561
+ {
1562
+ "epoch": 9.28,
1563
+ "learning_rate": 7.628367766098828e-06,
1564
+ "loss": 2.0756,
1565
+ "step": 128000
1566
+ },
1567
+ {
1568
+ "epoch": 9.31,
1569
+ "learning_rate": 7.5800410776851515e-06,
1570
+ "loss": 2.0687,
1571
+ "step": 128500
1572
+ },
1573
+ {
1574
+ "epoch": 9.35,
1575
+ "learning_rate": 7.531714389271476e-06,
1576
+ "loss": 2.0657,
1577
+ "step": 129000
1578
+ },
1579
+ {
1580
+ "epoch": 9.39,
1581
+ "learning_rate": 7.4833877008578e-06,
1582
+ "loss": 2.0559,
1583
+ "step": 129500
1584
+ },
1585
+ {
1586
+ "epoch": 9.42,
1587
+ "learning_rate": 7.435061012444123e-06,
1588
+ "loss": 2.0727,
1589
+ "step": 130000
1590
+ },
1591
+ {
1592
+ "epoch": 9.46,
1593
+ "learning_rate": 7.386734324030447e-06,
1594
+ "loss": 2.0532,
1595
+ "step": 130500
1596
+ },
1597
+ {
1598
+ "epoch": 9.5,
1599
+ "learning_rate": 7.33840763561677e-06,
1600
+ "loss": 2.0783,
1601
+ "step": 131000
1602
+ },
1603
+ {
1604
+ "epoch": 9.53,
1605
+ "learning_rate": 7.290080947203093e-06,
1606
+ "loss": 2.0657,
1607
+ "step": 131500
1608
+ },
1609
+ {
1610
+ "epoch": 9.57,
1611
+ "learning_rate": 7.2417542587894175e-06,
1612
+ "loss": 2.0554,
1613
+ "step": 132000
1614
+ },
1615
+ {
1616
+ "epoch": 9.6,
1617
+ "learning_rate": 7.193427570375741e-06,
1618
+ "loss": 2.0649,
1619
+ "step": 132500
1620
+ },
1621
+ {
1622
+ "epoch": 9.64,
1623
+ "learning_rate": 7.145100881962064e-06,
1624
+ "loss": 2.0642,
1625
+ "step": 133000
1626
+ },
1627
+ {
1628
+ "epoch": 9.68,
1629
+ "learning_rate": 7.096774193548388e-06,
1630
+ "loss": 2.0416,
1631
+ "step": 133500
1632
+ },
1633
+ {
1634
+ "epoch": 9.71,
1635
+ "learning_rate": 7.048447505134711e-06,
1636
+ "loss": 2.0413,
1637
+ "step": 134000
1638
+ },
1639
+ {
1640
+ "epoch": 9.75,
1641
+ "learning_rate": 7.0001208167210345e-06,
1642
+ "loss": 2.0675,
1643
+ "step": 134500
1644
+ },
1645
+ {
1646
+ "epoch": 9.79,
1647
+ "learning_rate": 6.9517941283073586e-06,
1648
+ "loss": 2.034,
1649
+ "step": 135000
1650
+ },
1651
+ {
1652
+ "epoch": 9.82,
1653
+ "learning_rate": 6.903467439893682e-06,
1654
+ "loss": 2.0619,
1655
+ "step": 135500
1656
+ },
1657
+ {
1658
+ "epoch": 9.86,
1659
+ "learning_rate": 6.855140751480005e-06,
1660
+ "loss": 2.0584,
1661
+ "step": 136000
1662
+ },
1663
+ {
1664
+ "epoch": 9.89,
1665
+ "learning_rate": 6.806814063066329e-06,
1666
+ "loss": 2.0691,
1667
+ "step": 136500
1668
+ },
1669
+ {
1670
+ "epoch": 9.93,
1671
+ "learning_rate": 6.758487374652652e-06,
1672
+ "loss": 2.0519,
1673
+ "step": 137000
1674
+ },
1675
+ {
1676
+ "epoch": 9.97,
1677
+ "learning_rate": 6.7101606862389756e-06,
1678
+ "loss": 2.0675,
1679
+ "step": 137500
1680
+ },
1681
+ {
1682
+ "epoch": 10.0,
1683
+ "learning_rate": 6.6618339978253e-06,
1684
+ "loss": 2.0675,
1685
+ "step": 138000
1686
+ },
1687
+ {
1688
+ "epoch": 10.04,
1689
+ "learning_rate": 6.613507309411623e-06,
1690
+ "loss": 2.049,
1691
+ "step": 138500
1692
+ },
1693
+ {
1694
+ "epoch": 10.08,
1695
+ "learning_rate": 6.565180620997946e-06,
1696
+ "loss": 2.0531,
1697
+ "step": 139000
1698
+ },
1699
+ {
1700
+ "epoch": 10.11,
1701
+ "learning_rate": 6.51685393258427e-06,
1702
+ "loss": 2.0482,
1703
+ "step": 139500
1704
+ },
1705
+ {
1706
+ "epoch": 10.15,
1707
+ "learning_rate": 6.468527244170593e-06,
1708
+ "loss": 2.0685,
1709
+ "step": 140000
1710
+ },
1711
+ {
1712
+ "epoch": 10.18,
1713
+ "learning_rate": 6.420200555756917e-06,
1714
+ "loss": 2.0612,
1715
+ "step": 140500
1716
+ },
1717
+ {
1718
+ "epoch": 10.22,
1719
+ "learning_rate": 6.3718738673432416e-06,
1720
+ "loss": 2.0456,
1721
+ "step": 141000
1722
+ },
1723
+ {
1724
+ "epoch": 10.26,
1725
+ "learning_rate": 6.323547178929564e-06,
1726
+ "loss": 2.0311,
1727
+ "step": 141500
1728
+ },
1729
+ {
1730
+ "epoch": 10.29,
1731
+ "learning_rate": 6.275220490515887e-06,
1732
+ "loss": 2.0582,
1733
+ "step": 142000
1734
+ },
1735
+ {
1736
+ "epoch": 10.33,
1737
+ "learning_rate": 6.226893802102212e-06,
1738
+ "loss": 2.0563,
1739
+ "step": 142500
1740
+ },
1741
+ {
1742
+ "epoch": 10.37,
1743
+ "learning_rate": 6.178567113688535e-06,
1744
+ "loss": 2.0671,
1745
+ "step": 143000
1746
+ },
1747
+ {
1748
+ "epoch": 10.4,
1749
+ "learning_rate": 6.1302404252748586e-06,
1750
+ "loss": 2.0511,
1751
+ "step": 143500
1752
+ },
1753
+ {
1754
+ "epoch": 10.44,
1755
+ "learning_rate": 6.081913736861183e-06,
1756
+ "loss": 2.0436,
1757
+ "step": 144000
1758
+ },
1759
+ {
1760
+ "epoch": 10.47,
1761
+ "learning_rate": 6.033587048447506e-06,
1762
+ "loss": 2.0459,
1763
+ "step": 144500
1764
+ },
1765
+ {
1766
+ "epoch": 10.51,
1767
+ "learning_rate": 5.985260360033829e-06,
1768
+ "loss": 2.0462,
1769
+ "step": 145000
1770
+ },
1771
+ {
1772
+ "epoch": 10.55,
1773
+ "learning_rate": 5.936933671620153e-06,
1774
+ "loss": 2.0258,
1775
+ "step": 145500
1776
+ },
1777
+ {
1778
+ "epoch": 10.58,
1779
+ "learning_rate": 5.888606983206476e-06,
1780
+ "loss": 2.0513,
1781
+ "step": 146000
1782
+ },
1783
+ {
1784
+ "epoch": 10.62,
1785
+ "learning_rate": 5.8402802947928e-06,
1786
+ "loss": 2.0566,
1787
+ "step": 146500
1788
+ },
1789
+ {
1790
+ "epoch": 10.66,
1791
+ "learning_rate": 5.791953606379124e-06,
1792
+ "loss": 2.0485,
1793
+ "step": 147000
1794
+ },
1795
+ {
1796
+ "epoch": 10.69,
1797
+ "learning_rate": 5.743626917965447e-06,
1798
+ "loss": 2.0351,
1799
+ "step": 147500
1800
+ },
1801
+ {
1802
+ "epoch": 10.73,
1803
+ "learning_rate": 5.69530022955177e-06,
1804
+ "loss": 2.0449,
1805
+ "step": 148000
1806
+ },
1807
+ {
1808
+ "epoch": 10.76,
1809
+ "learning_rate": 5.646973541138094e-06,
1810
+ "loss": 2.0635,
1811
+ "step": 148500
1812
+ },
1813
+ {
1814
+ "epoch": 10.8,
1815
+ "learning_rate": 5.5986468527244175e-06,
1816
+ "loss": 2.0325,
1817
+ "step": 149000
1818
+ },
1819
+ {
1820
+ "epoch": 10.84,
1821
+ "learning_rate": 5.550320164310741e-06,
1822
+ "loss": 2.0581,
1823
+ "step": 149500
1824
+ },
1825
+ {
1826
+ "epoch": 10.87,
1827
+ "learning_rate": 5.501993475897065e-06,
1828
+ "loss": 2.0223,
1829
+ "step": 150000
1830
+ },
1831
+ {
1832
+ "epoch": 10.87,
1833
+ "eval_bleu": 26.2063,
1834
+ "eval_gen_len": 24.6095,
1835
+ "eval_loss": 1.7641891241073608,
1836
+ "eval_runtime": 4661.5713,
1837
+ "eval_samples_per_second": 11.837,
1838
+ "eval_steps_per_second": 1.48,
1839
+ "step": 150000
1840
+ },
1841
+ {
1842
+ "epoch": 10.91,
1843
+ "learning_rate": 5.453666787483388e-06,
1844
+ "loss": 2.0329,
1845
+ "step": 150500
1846
+ },
1847
+ {
1848
+ "epoch": 10.95,
1849
+ "learning_rate": 5.405340099069711e-06,
1850
+ "loss": 2.0588,
1851
+ "step": 151000
1852
+ },
1853
+ {
1854
+ "epoch": 10.98,
1855
+ "learning_rate": 5.357013410656035e-06,
1856
+ "loss": 2.0424,
1857
+ "step": 151500
1858
+ },
1859
+ {
1860
+ "epoch": 11.02,
1861
+ "learning_rate": 5.3086867222423586e-06,
1862
+ "loss": 2.0397,
1863
+ "step": 152000
1864
+ },
1865
+ {
1866
+ "epoch": 11.05,
1867
+ "learning_rate": 5.260360033828682e-06,
1868
+ "loss": 2.0162,
1869
+ "step": 152500
1870
+ },
1871
+ {
1872
+ "epoch": 11.09,
1873
+ "learning_rate": 5.212033345415006e-06,
1874
+ "loss": 2.0444,
1875
+ "step": 153000
1876
+ },
1877
+ {
1878
+ "epoch": 11.13,
1879
+ "learning_rate": 5.163706657001329e-06,
1880
+ "loss": 2.0421,
1881
+ "step": 153500
1882
+ },
1883
+ {
1884
+ "epoch": 11.16,
1885
+ "learning_rate": 5.115379968587652e-06,
1886
+ "loss": 2.0251,
1887
+ "step": 154000
1888
+ },
1889
+ {
1890
+ "epoch": 11.2,
1891
+ "learning_rate": 5.067053280173977e-06,
1892
+ "loss": 2.0599,
1893
+ "step": 154500
1894
+ },
1895
+ {
1896
+ "epoch": 11.24,
1897
+ "learning_rate": 5.0187265917603005e-06,
1898
+ "loss": 2.0569,
1899
+ "step": 155000
1900
+ },
1901
+ {
1902
+ "epoch": 11.27,
1903
+ "learning_rate": 4.970399903346624e-06,
1904
+ "loss": 2.0439,
1905
+ "step": 155500
1906
+ },
1907
+ {
1908
+ "epoch": 11.31,
1909
+ "learning_rate": 4.922073214932947e-06,
1910
+ "loss": 2.0327,
1911
+ "step": 156000
1912
+ },
1913
+ {
1914
+ "epoch": 11.34,
1915
+ "learning_rate": 4.873746526519271e-06,
1916
+ "loss": 2.0487,
1917
+ "step": 156500
1918
+ },
1919
+ {
1920
+ "epoch": 11.38,
1921
+ "learning_rate": 4.825419838105594e-06,
1922
+ "loss": 2.0561,
1923
+ "step": 157000
1924
+ },
1925
+ {
1926
+ "epoch": 11.42,
1927
+ "learning_rate": 4.7770931496919175e-06,
1928
+ "loss": 2.0341,
1929
+ "step": 157500
1930
+ },
1931
+ {
1932
+ "epoch": 11.45,
1933
+ "learning_rate": 4.7287664612782416e-06,
1934
+ "loss": 2.0343,
1935
+ "step": 158000
1936
+ },
1937
+ {
1938
+ "epoch": 11.49,
1939
+ "learning_rate": 4.680439772864565e-06,
1940
+ "loss": 2.0393,
1941
+ "step": 158500
1942
+ },
1943
+ {
1944
+ "epoch": 11.53,
1945
+ "learning_rate": 4.632113084450888e-06,
1946
+ "loss": 2.0165,
1947
+ "step": 159000
1948
+ },
1949
+ {
1950
+ "epoch": 11.56,
1951
+ "learning_rate": 4.583786396037212e-06,
1952
+ "loss": 2.0432,
1953
+ "step": 159500
1954
+ },
1955
+ {
1956
+ "epoch": 11.6,
1957
+ "learning_rate": 4.535459707623535e-06,
1958
+ "loss": 2.0254,
1959
+ "step": 160000
1960
+ },
1961
+ {
1962
+ "epoch": 11.63,
1963
+ "learning_rate": 4.4871330192098586e-06,
1964
+ "loss": 2.0027,
1965
+ "step": 160500
1966
+ },
1967
+ {
1968
+ "epoch": 11.67,
1969
+ "learning_rate": 4.438806330796183e-06,
1970
+ "loss": 2.0255,
1971
+ "step": 161000
1972
+ },
1973
+ {
1974
+ "epoch": 11.71,
1975
+ "learning_rate": 4.390479642382507e-06,
1976
+ "loss": 2.0347,
1977
+ "step": 161500
1978
+ },
1979
+ {
1980
+ "epoch": 11.74,
1981
+ "learning_rate": 4.342152953968829e-06,
1982
+ "loss": 2.0265,
1983
+ "step": 162000
1984
+ },
1985
+ {
1986
+ "epoch": 11.78,
1987
+ "learning_rate": 4.293826265555153e-06,
1988
+ "loss": 2.0319,
1989
+ "step": 162500
1990
+ },
1991
+ {
1992
+ "epoch": 11.82,
1993
+ "learning_rate": 4.245499577141477e-06,
1994
+ "loss": 2.0332,
1995
+ "step": 163000
1996
+ },
1997
+ {
1998
+ "epoch": 11.85,
1999
+ "learning_rate": 4.1971728887278005e-06,
2000
+ "loss": 2.0418,
2001
+ "step": 163500
2002
+ },
2003
+ {
2004
+ "epoch": 11.89,
2005
+ "learning_rate": 4.148846200314124e-06,
2006
+ "loss": 2.0511,
2007
+ "step": 164000
2008
+ },
2009
+ {
2010
+ "epoch": 11.92,
2011
+ "learning_rate": 4.100519511900448e-06,
2012
+ "loss": 2.0353,
2013
+ "step": 164500
2014
+ },
2015
+ {
2016
+ "epoch": 11.96,
2017
+ "learning_rate": 4.052192823486771e-06,
2018
+ "loss": 2.0393,
2019
+ "step": 165000
2020
+ },
2021
+ {
2022
+ "epoch": 12.0,
2023
+ "learning_rate": 4.003866135073094e-06,
2024
+ "loss": 2.016,
2025
+ "step": 165500
2026
+ },
2027
+ {
2028
+ "epoch": 12.03,
2029
+ "learning_rate": 3.955539446659418e-06,
2030
+ "loss": 2.0318,
2031
+ "step": 166000
2032
+ },
2033
+ {
2034
+ "epoch": 12.07,
2035
+ "learning_rate": 3.9072127582457416e-06,
2036
+ "loss": 2.0342,
2037
+ "step": 166500
2038
+ },
2039
+ {
2040
+ "epoch": 12.11,
2041
+ "learning_rate": 3.858886069832065e-06,
2042
+ "loss": 1.9975,
2043
+ "step": 167000
2044
+ },
2045
+ {
2046
+ "epoch": 12.14,
2047
+ "learning_rate": 3.810559381418389e-06,
2048
+ "loss": 2.0222,
2049
+ "step": 167500
2050
+ },
2051
+ {
2052
+ "epoch": 12.18,
2053
+ "learning_rate": 3.762232693004712e-06,
2054
+ "loss": 2.0455,
2055
+ "step": 168000
2056
+ },
2057
+ {
2058
+ "epoch": 12.21,
2059
+ "learning_rate": 3.7139060045910358e-06,
2060
+ "loss": 2.028,
2061
+ "step": 168500
2062
+ },
2063
+ {
2064
+ "epoch": 12.25,
2065
+ "learning_rate": 3.6655793161773594e-06,
2066
+ "loss": 2.0267,
2067
+ "step": 169000
2068
+ },
2069
+ {
2070
+ "epoch": 12.29,
2071
+ "learning_rate": 3.6172526277636827e-06,
2072
+ "loss": 2.0249,
2073
+ "step": 169500
2074
+ },
2075
+ {
2076
+ "epoch": 12.32,
2077
+ "learning_rate": 3.5689259393500063e-06,
2078
+ "loss": 2.0451,
2079
+ "step": 170000
2080
+ },
2081
+ {
2082
+ "epoch": 12.36,
2083
+ "learning_rate": 3.52059925093633e-06,
2084
+ "loss": 2.0133,
2085
+ "step": 170500
2086
+ },
2087
+ {
2088
+ "epoch": 12.4,
2089
+ "learning_rate": 3.472272562522653e-06,
2090
+ "loss": 2.0377,
2091
+ "step": 171000
2092
+ },
2093
+ {
2094
+ "epoch": 12.43,
2095
+ "learning_rate": 3.423945874108977e-06,
2096
+ "loss": 2.024,
2097
+ "step": 171500
2098
+ },
2099
+ {
2100
+ "epoch": 12.47,
2101
+ "learning_rate": 3.375619185695301e-06,
2102
+ "loss": 2.0368,
2103
+ "step": 172000
2104
+ },
2105
+ {
2106
+ "epoch": 12.5,
2107
+ "learning_rate": 3.327292497281624e-06,
2108
+ "loss": 2.0463,
2109
+ "step": 172500
2110
+ },
2111
+ {
2112
+ "epoch": 12.54,
2113
+ "learning_rate": 3.278965808867948e-06,
2114
+ "loss": 2.0359,
2115
+ "step": 173000
2116
+ },
2117
+ {
2118
+ "epoch": 12.58,
2119
+ "learning_rate": 3.230639120454271e-06,
2120
+ "loss": 2.0283,
2121
+ "step": 173500
2122
+ },
2123
+ {
2124
+ "epoch": 12.61,
2125
+ "learning_rate": 3.1823124320405947e-06,
2126
+ "loss": 2.035,
2127
+ "step": 174000
2128
+ },
2129
+ {
2130
+ "epoch": 12.65,
2131
+ "learning_rate": 3.1339857436269183e-06,
2132
+ "loss": 2.0163,
2133
+ "step": 174500
2134
+ },
2135
+ {
2136
+ "epoch": 12.69,
2137
+ "learning_rate": 3.0856590552132416e-06,
2138
+ "loss": 2.0234,
2139
+ "step": 175000
2140
+ },
2141
+ {
2142
+ "epoch": 12.72,
2143
+ "learning_rate": 3.0373323667995652e-06,
2144
+ "loss": 2.0273,
2145
+ "step": 175500
2146
+ },
2147
+ {
2148
+ "epoch": 12.76,
2149
+ "learning_rate": 2.989005678385889e-06,
2150
+ "loss": 2.0323,
2151
+ "step": 176000
2152
+ },
2153
+ {
2154
+ "epoch": 12.79,
2155
+ "learning_rate": 2.940678989972212e-06,
2156
+ "loss": 2.019,
2157
+ "step": 176500
2158
+ },
2159
+ {
2160
+ "epoch": 12.83,
2161
+ "learning_rate": 2.8923523015585358e-06,
2162
+ "loss": 2.0145,
2163
+ "step": 177000
2164
+ },
2165
+ {
2166
+ "epoch": 12.87,
2167
+ "learning_rate": 2.8440256131448594e-06,
2168
+ "loss": 2.0078,
2169
+ "step": 177500
2170
+ },
2171
+ {
2172
+ "epoch": 12.9,
2173
+ "learning_rate": 2.7956989247311827e-06,
2174
+ "loss": 2.0162,
2175
+ "step": 178000
2176
+ },
2177
+ {
2178
+ "epoch": 12.94,
2179
+ "learning_rate": 2.7473722363175063e-06,
2180
+ "loss": 2.0227,
2181
+ "step": 178500
2182
+ },
2183
+ {
2184
+ "epoch": 12.98,
2185
+ "learning_rate": 2.6990455479038304e-06,
2186
+ "loss": 2.0291,
2187
+ "step": 179000
2188
+ },
2189
+ {
2190
+ "epoch": 13.01,
2191
+ "learning_rate": 2.650718859490153e-06,
2192
+ "loss": 2.0248,
2193
+ "step": 179500
2194
+ },
2195
+ {
2196
+ "epoch": 13.05,
2197
+ "learning_rate": 2.6023921710764773e-06,
2198
+ "loss": 1.996,
2199
+ "step": 180000
2200
+ },
2201
+ {
2202
+ "epoch": 13.08,
2203
+ "learning_rate": 2.554065482662801e-06,
2204
+ "loss": 2.012,
2205
+ "step": 180500
2206
+ },
2207
+ {
2208
+ "epoch": 13.12,
2209
+ "learning_rate": 2.505738794249124e-06,
2210
+ "loss": 2.002,
2211
+ "step": 181000
2212
+ },
2213
+ {
2214
+ "epoch": 13.16,
2215
+ "learning_rate": 2.457412105835448e-06,
2216
+ "loss": 2.0191,
2217
+ "step": 181500
2218
+ },
2219
+ {
2220
+ "epoch": 13.19,
2221
+ "learning_rate": 2.4090854174217715e-06,
2222
+ "loss": 2.0173,
2223
+ "step": 182000
2224
+ },
2225
+ {
2226
+ "epoch": 13.23,
2227
+ "learning_rate": 2.360758729008095e-06,
2228
+ "loss": 2.0347,
2229
+ "step": 182500
2230
+ },
2231
+ {
2232
+ "epoch": 13.27,
2233
+ "learning_rate": 2.3124320405944183e-06,
2234
+ "loss": 2.0357,
2235
+ "step": 183000
2236
+ },
2237
+ {
2238
+ "epoch": 13.3,
2239
+ "learning_rate": 2.264105352180742e-06,
2240
+ "loss": 2.0414,
2241
+ "step": 183500
2242
+ },
2243
+ {
2244
+ "epoch": 13.34,
2245
+ "learning_rate": 2.2157786637670657e-06,
2246
+ "loss": 2.036,
2247
+ "step": 184000
2248
+ },
2249
+ {
2250
+ "epoch": 13.37,
2251
+ "learning_rate": 2.167451975353389e-06,
2252
+ "loss": 2.0063,
2253
+ "step": 184500
2254
+ },
2255
+ {
2256
+ "epoch": 13.41,
2257
+ "learning_rate": 2.119125286939713e-06,
2258
+ "loss": 2.0062,
2259
+ "step": 185000
2260
+ },
2261
+ {
2262
+ "epoch": 13.45,
2263
+ "learning_rate": 2.070798598526036e-06,
2264
+ "loss": 2.0191,
2265
+ "step": 185500
2266
+ },
2267
+ {
2268
+ "epoch": 13.48,
2269
+ "learning_rate": 2.02247191011236e-06,
2270
+ "loss": 2.0148,
2271
+ "step": 186000
2272
+ },
2273
+ {
2274
+ "epoch": 13.52,
2275
+ "learning_rate": 1.9741452216986835e-06,
2276
+ "loss": 2.0332,
2277
+ "step": 186500
2278
+ },
2279
+ {
2280
+ "epoch": 13.56,
2281
+ "learning_rate": 1.9258185332850067e-06,
2282
+ "loss": 2.0137,
2283
+ "step": 187000
2284
+ },
2285
+ {
2286
+ "epoch": 13.59,
2287
+ "learning_rate": 1.8774918448713304e-06,
2288
+ "loss": 2.0341,
2289
+ "step": 187500
2290
+ },
2291
+ {
2292
+ "epoch": 13.63,
2293
+ "learning_rate": 1.829165156457654e-06,
2294
+ "loss": 1.9992,
2295
+ "step": 188000
2296
+ },
2297
+ {
2298
+ "epoch": 13.66,
2299
+ "learning_rate": 1.7808384680439775e-06,
2300
+ "loss": 2.0233,
2301
+ "step": 188500
2302
+ },
2303
+ {
2304
+ "epoch": 13.7,
2305
+ "learning_rate": 1.732511779630301e-06,
2306
+ "loss": 2.0065,
2307
+ "step": 189000
2308
+ },
2309
+ {
2310
+ "epoch": 13.74,
2311
+ "learning_rate": 1.6841850912166246e-06,
2312
+ "loss": 2.024,
2313
+ "step": 189500
2314
+ },
2315
+ {
2316
+ "epoch": 13.77,
2317
+ "learning_rate": 1.635858402802948e-06,
2318
+ "loss": 2.0306,
2319
+ "step": 190000
2320
+ },
2321
+ {
2322
+ "epoch": 13.81,
2323
+ "learning_rate": 1.5875317143892715e-06,
2324
+ "loss": 2.0454,
2325
+ "step": 190500
2326
+ },
2327
+ {
2328
+ "epoch": 13.85,
2329
+ "learning_rate": 1.5392050259755951e-06,
2330
+ "loss": 2.0143,
2331
+ "step": 191000
2332
+ },
2333
+ {
2334
+ "epoch": 13.88,
2335
+ "learning_rate": 1.4908783375619188e-06,
2336
+ "loss": 2.0186,
2337
+ "step": 191500
2338
+ },
2339
+ {
2340
+ "epoch": 13.92,
2341
+ "learning_rate": 1.4425516491482422e-06,
2342
+ "loss": 2.0109,
2343
+ "step": 192000
2344
+ },
2345
+ {
2346
+ "epoch": 13.95,
2347
+ "learning_rate": 1.3942249607345657e-06,
2348
+ "loss": 2.0091,
2349
+ "step": 192500
2350
+ },
2351
+ {
2352
+ "epoch": 13.99,
2353
+ "learning_rate": 1.3458982723208893e-06,
2354
+ "loss": 2.0223,
2355
+ "step": 193000
2356
+ },
2357
+ {
2358
+ "epoch": 14.03,
2359
+ "learning_rate": 1.2975715839072127e-06,
2360
+ "loss": 1.9994,
2361
+ "step": 193500
2362
+ },
2363
+ {
2364
+ "epoch": 14.06,
2365
+ "learning_rate": 1.2492448954935364e-06,
2366
+ "loss": 2.0055,
2367
+ "step": 194000
2368
+ },
2369
+ {
2370
+ "epoch": 14.1,
2371
+ "learning_rate": 1.20091820707986e-06,
2372
+ "loss": 2.0195,
2373
+ "step": 194500
2374
+ },
2375
+ {
2376
+ "epoch": 14.14,
2377
+ "learning_rate": 1.1525915186661835e-06,
2378
+ "loss": 2.0082,
2379
+ "step": 195000
2380
+ },
2381
+ {
2382
+ "epoch": 14.17,
2383
+ "learning_rate": 1.1042648302525072e-06,
2384
+ "loss": 2.0337,
2385
+ "step": 195500
2386
+ },
2387
+ {
2388
+ "epoch": 14.21,
2389
+ "learning_rate": 1.0559381418388306e-06,
2390
+ "loss": 2.0057,
2391
+ "step": 196000
2392
+ },
2393
+ {
2394
+ "epoch": 14.24,
2395
+ "learning_rate": 1.007611453425154e-06,
2396
+ "loss": 2.0224,
2397
+ "step": 196500
2398
+ },
2399
+ {
2400
+ "epoch": 14.28,
2401
+ "learning_rate": 9.592847650114777e-07,
2402
+ "loss": 2.031,
2403
+ "step": 197000
2404
+ },
2405
+ {
2406
+ "epoch": 14.32,
2407
+ "learning_rate": 9.109580765978012e-07,
2408
+ "loss": 2.0261,
2409
+ "step": 197500
2410
+ },
2411
+ {
2412
+ "epoch": 14.35,
2413
+ "learning_rate": 8.626313881841247e-07,
2414
+ "loss": 2.0288,
2415
+ "step": 198000
2416
+ },
2417
+ {
2418
+ "epoch": 14.39,
2419
+ "learning_rate": 8.143046997704483e-07,
2420
+ "loss": 2.0266,
2421
+ "step": 198500
2422
+ },
2423
+ {
2424
+ "epoch": 14.43,
2425
+ "learning_rate": 7.659780113567718e-07,
2426
+ "loss": 2.0164,
2427
+ "step": 199000
2428
+ },
2429
+ {
2430
+ "epoch": 14.46,
2431
+ "learning_rate": 7.176513229430953e-07,
2432
+ "loss": 2.0295,
2433
+ "step": 199500
2434
+ },
2435
+ {
2436
+ "epoch": 14.5,
2437
+ "learning_rate": 6.69324634529419e-07,
2438
+ "loss": 2.0177,
2439
+ "step": 200000
2440
+ },
2441
+ {
2442
+ "epoch": 14.5,
2443
+ "eval_bleu": 26.5388,
2444
+ "eval_gen_len": 24.6306,
2445
+ "eval_loss": 1.7493975162506104,
2446
+ "eval_runtime": 4655.3081,
2447
+ "eval_samples_per_second": 11.853,
2448
+ "eval_steps_per_second": 1.482,
2449
+ "step": 200000
2450
+ }
2451
+ ],
2452
+ "logging_steps": 500,
2453
+ "max_steps": 206925,
2454
+ "num_input_tokens_seen": 0,
2455
+ "num_train_epochs": 15,
2456
+ "save_steps": 50000,
2457
+ "total_flos": 2.0776464395563008e+17,
2458
+ "train_batch_size": 8,
2459
+ "trial_name": null,
2460
+ "trial_params": null
2461
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:596e6df7ff73946fbdc2cbdb9d67e12494a368115bae324f52a496aacf7624ac
3
+ size 4856