hushell commited on
Commit
935514f
1 Parent(s): c19550d

Model save

Browse files
README.md ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ base_model: ondevicellm/tinyllama_moe
4
+ tags:
5
+ - trl
6
+ - sft
7
+ - generated_from_trainer
8
+ datasets:
9
+ - generator
10
+ model-index:
11
+ - name: tinyllama_moe_sft_ultrachat200k_v2_epochs3
12
+ results: []
13
+ ---
14
+
15
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
16
+ should probably proofread and complete it, then remove this comment. -->
17
+
18
+ # tinyllama_moe_sft_ultrachat200k_v2_epochs3
19
+
20
+ This model is a fine-tuned version of [ondevicellm/tinyllama_moe](https://huggingface.co/ondevicellm/tinyllama_moe) on the generator dataset.
21
+ It achieves the following results on the evaluation set:
22
+ - Loss: 1.1178
23
+
24
+ ## Model description
25
+
26
+ More information needed
27
+
28
+ ## Intended uses & limitations
29
+
30
+ More information needed
31
+
32
+ ## Training and evaluation data
33
+
34
+ More information needed
35
+
36
+ ## Training procedure
37
+
38
+ ### Training hyperparameters
39
+
40
+ The following hyperparameters were used during training:
41
+ - learning_rate: 2e-05
42
+ - train_batch_size: 16
43
+ - eval_batch_size: 8
44
+ - seed: 42
45
+ - distributed_type: multi-GPU
46
+ - num_devices: 4
47
+ - gradient_accumulation_steps: 2
48
+ - total_train_batch_size: 128
49
+ - total_eval_batch_size: 32
50
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
51
+ - lr_scheduler_type: cosine
52
+ - lr_scheduler_warmup_steps: 115
53
+ - num_epochs: 3
54
+
55
+ ### Training results
56
+
57
+ | Training Loss | Epoch | Step | Validation Loss |
58
+ |:-------------:|:-----:|:----:|:---------------:|
59
+ | 1.336 | 0.09 | 100 | 1.3129 |
60
+ | 1.2424 | 0.18 | 200 | 1.2363 |
61
+ | 1.2079 | 0.26 | 300 | 1.2084 |
62
+ | 1.185 | 0.35 | 400 | 1.1911 |
63
+ | 1.1546 | 0.44 | 500 | 1.1787 |
64
+ | 1.1741 | 0.53 | 600 | 1.1692 |
65
+ | 1.1612 | 0.61 | 700 | 1.1613 |
66
+ | 1.1453 | 0.7 | 800 | 1.1547 |
67
+ | 1.141 | 0.79 | 900 | 1.1489 |
68
+ | 1.1247 | 0.88 | 1000 | 1.1438 |
69
+ | 1.1485 | 0.96 | 1100 | 1.1392 |
70
+ | 1.067 | 1.05 | 1200 | 1.1387 |
71
+ | 1.0694 | 1.14 | 1300 | 1.1368 |
72
+ | 1.0814 | 1.23 | 1400 | 1.1341 |
73
+ | 1.0727 | 1.31 | 1500 | 1.1316 |
74
+ | 1.0769 | 1.4 | 1600 | 1.1292 |
75
+ | 1.0728 | 1.49 | 1700 | 1.1270 |
76
+ | 1.0558 | 1.58 | 1800 | 1.1247 |
77
+ | 1.0753 | 1.66 | 1900 | 1.1229 |
78
+ | 1.0799 | 1.75 | 2000 | 1.1209 |
79
+ | 1.066 | 1.84 | 2100 | 1.1192 |
80
+ | 1.0406 | 1.93 | 2200 | 1.1178 |
81
+ | 1.0193 | 2.01 | 2300 | 1.1222 |
82
+ | 1.0276 | 2.1 | 2400 | 1.1220 |
83
+ | 1.0171 | 2.19 | 2500 | 1.1215 |
84
+ | 1.0112 | 2.28 | 2600 | 1.1211 |
85
+ | 1.0087 | 2.37 | 2700 | 1.1207 |
86
+ | 1.0158 | 2.45 | 2800 | 1.1204 |
87
+ | 1.0219 | 2.54 | 2900 | 1.1199 |
88
+ | 1.0024 | 2.63 | 3000 | 1.1197 |
89
+ | 1.019 | 2.72 | 3100 | 1.1197 |
90
+ | 1.0135 | 2.8 | 3200 | 1.1194 |
91
+ | 1.0094 | 2.89 | 3300 | 1.1194 |
92
+ | 1.0284 | 2.98 | 3400 | 1.1194 |
93
+
94
+
95
+ ### Framework versions
96
+
97
+ - Transformers 4.36.2
98
+ - Pytorch 2.1.2+cu118
99
+ - Datasets 2.14.6
100
+ - Tokenizers 0.15.0
all_results.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "eval_loss": 1.1177691221237183,
4
+ "eval_runtime": 426.5899,
5
+ "eval_samples": 23110,
6
+ "eval_samples_per_second": 37.896,
7
+ "eval_steps_per_second": 1.186,
8
+ "train_loss": 1.1024342694896863,
9
+ "train_runtime": 51497.7783,
10
+ "train_samples": 207865,
11
+ "train_samples_per_second": 8.508,
12
+ "train_steps_per_second": 0.066
13
+ }
eval_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "eval_loss": 1.1177691221237183,
4
+ "eval_runtime": 426.5899,
5
+ "eval_samples": 23110,
6
+ "eval_samples_per_second": 37.896,
7
+ "eval_steps_per_second": 1.186
8
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.36.2",
6
+ "use_cache": false
7
+ }
model-00001-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c001402a18e8e178c1727401980da81545c07b1fc5e3433fc9521cc2437cfdad
3
+ size 4984275728
model-00002-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f009665206ac49e54bf93808a84d2959c12c0272f2aba5f0672f0b7f4c39f73
3
+ size 4991625072
model-00003-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:171d052cb8033b6dafd50c5b2139eff39a3ebc11d072ff6010e18027c1abb1db
3
+ size 2882730416
model.safetensors.index.json ADDED
@@ -0,0 +1,692 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 12858544128
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00003-of-00003.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00003.safetensors",
8
+ "model.layers.0.block_sparse_moe.experts.0.w1.weight": "model-00001-of-00003.safetensors",
9
+ "model.layers.0.block_sparse_moe.experts.0.w2.weight": "model-00001-of-00003.safetensors",
10
+ "model.layers.0.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00003.safetensors",
11
+ "model.layers.0.block_sparse_moe.experts.1.w1.weight": "model-00001-of-00003.safetensors",
12
+ "model.layers.0.block_sparse_moe.experts.1.w2.weight": "model-00001-of-00003.safetensors",
13
+ "model.layers.0.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00003.safetensors",
14
+ "model.layers.0.block_sparse_moe.experts.2.w1.weight": "model-00001-of-00003.safetensors",
15
+ "model.layers.0.block_sparse_moe.experts.2.w2.weight": "model-00001-of-00003.safetensors",
16
+ "model.layers.0.block_sparse_moe.experts.2.w3.weight": "model-00001-of-00003.safetensors",
17
+ "model.layers.0.block_sparse_moe.experts.3.w1.weight": "model-00001-of-00003.safetensors",
18
+ "model.layers.0.block_sparse_moe.experts.3.w2.weight": "model-00001-of-00003.safetensors",
19
+ "model.layers.0.block_sparse_moe.experts.3.w3.weight": "model-00001-of-00003.safetensors",
20
+ "model.layers.0.block_sparse_moe.experts.4.w1.weight": "model-00001-of-00003.safetensors",
21
+ "model.layers.0.block_sparse_moe.experts.4.w2.weight": "model-00001-of-00003.safetensors",
22
+ "model.layers.0.block_sparse_moe.experts.4.w3.weight": "model-00001-of-00003.safetensors",
23
+ "model.layers.0.block_sparse_moe.experts.5.w1.weight": "model-00001-of-00003.safetensors",
24
+ "model.layers.0.block_sparse_moe.experts.5.w2.weight": "model-00001-of-00003.safetensors",
25
+ "model.layers.0.block_sparse_moe.experts.5.w3.weight": "model-00001-of-00003.safetensors",
26
+ "model.layers.0.block_sparse_moe.experts.6.w1.weight": "model-00001-of-00003.safetensors",
27
+ "model.layers.0.block_sparse_moe.experts.6.w2.weight": "model-00001-of-00003.safetensors",
28
+ "model.layers.0.block_sparse_moe.experts.6.w3.weight": "model-00001-of-00003.safetensors",
29
+ "model.layers.0.block_sparse_moe.experts.7.w1.weight": "model-00001-of-00003.safetensors",
30
+ "model.layers.0.block_sparse_moe.experts.7.w2.weight": "model-00001-of-00003.safetensors",
31
+ "model.layers.0.block_sparse_moe.experts.7.w3.weight": "model-00001-of-00003.safetensors",
32
+ "model.layers.0.block_sparse_moe.gate.weight": "model-00001-of-00003.safetensors",
33
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
34
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
35
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
36
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
37
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
38
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
39
+ "model.layers.1.block_sparse_moe.experts.0.w1.weight": "model-00001-of-00003.safetensors",
40
+ "model.layers.1.block_sparse_moe.experts.0.w2.weight": "model-00001-of-00003.safetensors",
41
+ "model.layers.1.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00003.safetensors",
42
+ "model.layers.1.block_sparse_moe.experts.1.w1.weight": "model-00001-of-00003.safetensors",
43
+ "model.layers.1.block_sparse_moe.experts.1.w2.weight": "model-00001-of-00003.safetensors",
44
+ "model.layers.1.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00003.safetensors",
45
+ "model.layers.1.block_sparse_moe.experts.2.w1.weight": "model-00001-of-00003.safetensors",
46
+ "model.layers.1.block_sparse_moe.experts.2.w2.weight": "model-00001-of-00003.safetensors",
47
+ "model.layers.1.block_sparse_moe.experts.2.w3.weight": "model-00001-of-00003.safetensors",
48
+ "model.layers.1.block_sparse_moe.experts.3.w1.weight": "model-00001-of-00003.safetensors",
49
+ "model.layers.1.block_sparse_moe.experts.3.w2.weight": "model-00001-of-00003.safetensors",
50
+ "model.layers.1.block_sparse_moe.experts.3.w3.weight": "model-00001-of-00003.safetensors",
51
+ "model.layers.1.block_sparse_moe.experts.4.w1.weight": "model-00001-of-00003.safetensors",
52
+ "model.layers.1.block_sparse_moe.experts.4.w2.weight": "model-00001-of-00003.safetensors",
53
+ "model.layers.1.block_sparse_moe.experts.4.w3.weight": "model-00001-of-00003.safetensors",
54
+ "model.layers.1.block_sparse_moe.experts.5.w1.weight": "model-00001-of-00003.safetensors",
55
+ "model.layers.1.block_sparse_moe.experts.5.w2.weight": "model-00001-of-00003.safetensors",
56
+ "model.layers.1.block_sparse_moe.experts.5.w3.weight": "model-00001-of-00003.safetensors",
57
+ "model.layers.1.block_sparse_moe.experts.6.w1.weight": "model-00001-of-00003.safetensors",
58
+ "model.layers.1.block_sparse_moe.experts.6.w2.weight": "model-00001-of-00003.safetensors",
59
+ "model.layers.1.block_sparse_moe.experts.6.w3.weight": "model-00001-of-00003.safetensors",
60
+ "model.layers.1.block_sparse_moe.experts.7.w1.weight": "model-00001-of-00003.safetensors",
61
+ "model.layers.1.block_sparse_moe.experts.7.w2.weight": "model-00001-of-00003.safetensors",
62
+ "model.layers.1.block_sparse_moe.experts.7.w3.weight": "model-00001-of-00003.safetensors",
63
+ "model.layers.1.block_sparse_moe.gate.weight": "model-00001-of-00003.safetensors",
64
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
65
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
66
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
67
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
68
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
69
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
70
+ "model.layers.10.block_sparse_moe.experts.0.w1.weight": "model-00002-of-00003.safetensors",
71
+ "model.layers.10.block_sparse_moe.experts.0.w2.weight": "model-00002-of-00003.safetensors",
72
+ "model.layers.10.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00003.safetensors",
73
+ "model.layers.10.block_sparse_moe.experts.1.w1.weight": "model-00002-of-00003.safetensors",
74
+ "model.layers.10.block_sparse_moe.experts.1.w2.weight": "model-00002-of-00003.safetensors",
75
+ "model.layers.10.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00003.safetensors",
76
+ "model.layers.10.block_sparse_moe.experts.2.w1.weight": "model-00002-of-00003.safetensors",
77
+ "model.layers.10.block_sparse_moe.experts.2.w2.weight": "model-00002-of-00003.safetensors",
78
+ "model.layers.10.block_sparse_moe.experts.2.w3.weight": "model-00002-of-00003.safetensors",
79
+ "model.layers.10.block_sparse_moe.experts.3.w1.weight": "model-00002-of-00003.safetensors",
80
+ "model.layers.10.block_sparse_moe.experts.3.w2.weight": "model-00002-of-00003.safetensors",
81
+ "model.layers.10.block_sparse_moe.experts.3.w3.weight": "model-00002-of-00003.safetensors",
82
+ "model.layers.10.block_sparse_moe.experts.4.w1.weight": "model-00002-of-00003.safetensors",
83
+ "model.layers.10.block_sparse_moe.experts.4.w2.weight": "model-00002-of-00003.safetensors",
84
+ "model.layers.10.block_sparse_moe.experts.4.w3.weight": "model-00002-of-00003.safetensors",
85
+ "model.layers.10.block_sparse_moe.experts.5.w1.weight": "model-00002-of-00003.safetensors",
86
+ "model.layers.10.block_sparse_moe.experts.5.w2.weight": "model-00002-of-00003.safetensors",
87
+ "model.layers.10.block_sparse_moe.experts.5.w3.weight": "model-00002-of-00003.safetensors",
88
+ "model.layers.10.block_sparse_moe.experts.6.w1.weight": "model-00002-of-00003.safetensors",
89
+ "model.layers.10.block_sparse_moe.experts.6.w2.weight": "model-00002-of-00003.safetensors",
90
+ "model.layers.10.block_sparse_moe.experts.6.w3.weight": "model-00002-of-00003.safetensors",
91
+ "model.layers.10.block_sparse_moe.experts.7.w1.weight": "model-00002-of-00003.safetensors",
92
+ "model.layers.10.block_sparse_moe.experts.7.w2.weight": "model-00002-of-00003.safetensors",
93
+ "model.layers.10.block_sparse_moe.experts.7.w3.weight": "model-00002-of-00003.safetensors",
94
+ "model.layers.10.block_sparse_moe.gate.weight": "model-00002-of-00003.safetensors",
95
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00003.safetensors",
96
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
97
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
98
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
99
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
100
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
101
+ "model.layers.11.block_sparse_moe.experts.0.w1.weight": "model-00002-of-00003.safetensors",
102
+ "model.layers.11.block_sparse_moe.experts.0.w2.weight": "model-00002-of-00003.safetensors",
103
+ "model.layers.11.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00003.safetensors",
104
+ "model.layers.11.block_sparse_moe.experts.1.w1.weight": "model-00002-of-00003.safetensors",
105
+ "model.layers.11.block_sparse_moe.experts.1.w2.weight": "model-00002-of-00003.safetensors",
106
+ "model.layers.11.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00003.safetensors",
107
+ "model.layers.11.block_sparse_moe.experts.2.w1.weight": "model-00002-of-00003.safetensors",
108
+ "model.layers.11.block_sparse_moe.experts.2.w2.weight": "model-00002-of-00003.safetensors",
109
+ "model.layers.11.block_sparse_moe.experts.2.w3.weight": "model-00002-of-00003.safetensors",
110
+ "model.layers.11.block_sparse_moe.experts.3.w1.weight": "model-00002-of-00003.safetensors",
111
+ "model.layers.11.block_sparse_moe.experts.3.w2.weight": "model-00002-of-00003.safetensors",
112
+ "model.layers.11.block_sparse_moe.experts.3.w3.weight": "model-00002-of-00003.safetensors",
113
+ "model.layers.11.block_sparse_moe.experts.4.w1.weight": "model-00002-of-00003.safetensors",
114
+ "model.layers.11.block_sparse_moe.experts.4.w2.weight": "model-00002-of-00003.safetensors",
115
+ "model.layers.11.block_sparse_moe.experts.4.w3.weight": "model-00002-of-00003.safetensors",
116
+ "model.layers.11.block_sparse_moe.experts.5.w1.weight": "model-00002-of-00003.safetensors",
117
+ "model.layers.11.block_sparse_moe.experts.5.w2.weight": "model-00002-of-00003.safetensors",
118
+ "model.layers.11.block_sparse_moe.experts.5.w3.weight": "model-00002-of-00003.safetensors",
119
+ "model.layers.11.block_sparse_moe.experts.6.w1.weight": "model-00002-of-00003.safetensors",
120
+ "model.layers.11.block_sparse_moe.experts.6.w2.weight": "model-00002-of-00003.safetensors",
121
+ "model.layers.11.block_sparse_moe.experts.6.w3.weight": "model-00002-of-00003.safetensors",
122
+ "model.layers.11.block_sparse_moe.experts.7.w1.weight": "model-00002-of-00003.safetensors",
123
+ "model.layers.11.block_sparse_moe.experts.7.w2.weight": "model-00002-of-00003.safetensors",
124
+ "model.layers.11.block_sparse_moe.experts.7.w3.weight": "model-00002-of-00003.safetensors",
125
+ "model.layers.11.block_sparse_moe.gate.weight": "model-00002-of-00003.safetensors",
126
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
127
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
128
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
129
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
130
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
131
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
132
+ "model.layers.12.block_sparse_moe.experts.0.w1.weight": "model-00002-of-00003.safetensors",
133
+ "model.layers.12.block_sparse_moe.experts.0.w2.weight": "model-00002-of-00003.safetensors",
134
+ "model.layers.12.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00003.safetensors",
135
+ "model.layers.12.block_sparse_moe.experts.1.w1.weight": "model-00002-of-00003.safetensors",
136
+ "model.layers.12.block_sparse_moe.experts.1.w2.weight": "model-00002-of-00003.safetensors",
137
+ "model.layers.12.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00003.safetensors",
138
+ "model.layers.12.block_sparse_moe.experts.2.w1.weight": "model-00002-of-00003.safetensors",
139
+ "model.layers.12.block_sparse_moe.experts.2.w2.weight": "model-00002-of-00003.safetensors",
140
+ "model.layers.12.block_sparse_moe.experts.2.w3.weight": "model-00002-of-00003.safetensors",
141
+ "model.layers.12.block_sparse_moe.experts.3.w1.weight": "model-00002-of-00003.safetensors",
142
+ "model.layers.12.block_sparse_moe.experts.3.w2.weight": "model-00002-of-00003.safetensors",
143
+ "model.layers.12.block_sparse_moe.experts.3.w3.weight": "model-00002-of-00003.safetensors",
144
+ "model.layers.12.block_sparse_moe.experts.4.w1.weight": "model-00002-of-00003.safetensors",
145
+ "model.layers.12.block_sparse_moe.experts.4.w2.weight": "model-00002-of-00003.safetensors",
146
+ "model.layers.12.block_sparse_moe.experts.4.w3.weight": "model-00002-of-00003.safetensors",
147
+ "model.layers.12.block_sparse_moe.experts.5.w1.weight": "model-00002-of-00003.safetensors",
148
+ "model.layers.12.block_sparse_moe.experts.5.w2.weight": "model-00002-of-00003.safetensors",
149
+ "model.layers.12.block_sparse_moe.experts.5.w3.weight": "model-00002-of-00003.safetensors",
150
+ "model.layers.12.block_sparse_moe.experts.6.w1.weight": "model-00002-of-00003.safetensors",
151
+ "model.layers.12.block_sparse_moe.experts.6.w2.weight": "model-00002-of-00003.safetensors",
152
+ "model.layers.12.block_sparse_moe.experts.6.w3.weight": "model-00002-of-00003.safetensors",
153
+ "model.layers.12.block_sparse_moe.experts.7.w1.weight": "model-00002-of-00003.safetensors",
154
+ "model.layers.12.block_sparse_moe.experts.7.w2.weight": "model-00002-of-00003.safetensors",
155
+ "model.layers.12.block_sparse_moe.experts.7.w3.weight": "model-00002-of-00003.safetensors",
156
+ "model.layers.12.block_sparse_moe.gate.weight": "model-00002-of-00003.safetensors",
157
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
158
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
159
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
160
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
161
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
162
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
163
+ "model.layers.13.block_sparse_moe.experts.0.w1.weight": "model-00002-of-00003.safetensors",
164
+ "model.layers.13.block_sparse_moe.experts.0.w2.weight": "model-00002-of-00003.safetensors",
165
+ "model.layers.13.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00003.safetensors",
166
+ "model.layers.13.block_sparse_moe.experts.1.w1.weight": "model-00002-of-00003.safetensors",
167
+ "model.layers.13.block_sparse_moe.experts.1.w2.weight": "model-00002-of-00003.safetensors",
168
+ "model.layers.13.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00003.safetensors",
169
+ "model.layers.13.block_sparse_moe.experts.2.w1.weight": "model-00002-of-00003.safetensors",
170
+ "model.layers.13.block_sparse_moe.experts.2.w2.weight": "model-00002-of-00003.safetensors",
171
+ "model.layers.13.block_sparse_moe.experts.2.w3.weight": "model-00002-of-00003.safetensors",
172
+ "model.layers.13.block_sparse_moe.experts.3.w1.weight": "model-00002-of-00003.safetensors",
173
+ "model.layers.13.block_sparse_moe.experts.3.w2.weight": "model-00002-of-00003.safetensors",
174
+ "model.layers.13.block_sparse_moe.experts.3.w3.weight": "model-00002-of-00003.safetensors",
175
+ "model.layers.13.block_sparse_moe.experts.4.w1.weight": "model-00002-of-00003.safetensors",
176
+ "model.layers.13.block_sparse_moe.experts.4.w2.weight": "model-00002-of-00003.safetensors",
177
+ "model.layers.13.block_sparse_moe.experts.4.w3.weight": "model-00002-of-00003.safetensors",
178
+ "model.layers.13.block_sparse_moe.experts.5.w1.weight": "model-00002-of-00003.safetensors",
179
+ "model.layers.13.block_sparse_moe.experts.5.w2.weight": "model-00002-of-00003.safetensors",
180
+ "model.layers.13.block_sparse_moe.experts.5.w3.weight": "model-00002-of-00003.safetensors",
181
+ "model.layers.13.block_sparse_moe.experts.6.w1.weight": "model-00002-of-00003.safetensors",
182
+ "model.layers.13.block_sparse_moe.experts.6.w2.weight": "model-00002-of-00003.safetensors",
183
+ "model.layers.13.block_sparse_moe.experts.6.w3.weight": "model-00002-of-00003.safetensors",
184
+ "model.layers.13.block_sparse_moe.experts.7.w1.weight": "model-00002-of-00003.safetensors",
185
+ "model.layers.13.block_sparse_moe.experts.7.w2.weight": "model-00002-of-00003.safetensors",
186
+ "model.layers.13.block_sparse_moe.experts.7.w3.weight": "model-00002-of-00003.safetensors",
187
+ "model.layers.13.block_sparse_moe.gate.weight": "model-00002-of-00003.safetensors",
188
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
189
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
190
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
191
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
192
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
193
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
194
+ "model.layers.14.block_sparse_moe.experts.0.w1.weight": "model-00002-of-00003.safetensors",
195
+ "model.layers.14.block_sparse_moe.experts.0.w2.weight": "model-00002-of-00003.safetensors",
196
+ "model.layers.14.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00003.safetensors",
197
+ "model.layers.14.block_sparse_moe.experts.1.w1.weight": "model-00002-of-00003.safetensors",
198
+ "model.layers.14.block_sparse_moe.experts.1.w2.weight": "model-00002-of-00003.safetensors",
199
+ "model.layers.14.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00003.safetensors",
200
+ "model.layers.14.block_sparse_moe.experts.2.w1.weight": "model-00002-of-00003.safetensors",
201
+ "model.layers.14.block_sparse_moe.experts.2.w2.weight": "model-00002-of-00003.safetensors",
202
+ "model.layers.14.block_sparse_moe.experts.2.w3.weight": "model-00002-of-00003.safetensors",
203
+ "model.layers.14.block_sparse_moe.experts.3.w1.weight": "model-00002-of-00003.safetensors",
204
+ "model.layers.14.block_sparse_moe.experts.3.w2.weight": "model-00002-of-00003.safetensors",
205
+ "model.layers.14.block_sparse_moe.experts.3.w3.weight": "model-00002-of-00003.safetensors",
206
+ "model.layers.14.block_sparse_moe.experts.4.w1.weight": "model-00002-of-00003.safetensors",
207
+ "model.layers.14.block_sparse_moe.experts.4.w2.weight": "model-00002-of-00003.safetensors",
208
+ "model.layers.14.block_sparse_moe.experts.4.w3.weight": "model-00002-of-00003.safetensors",
209
+ "model.layers.14.block_sparse_moe.experts.5.w1.weight": "model-00002-of-00003.safetensors",
210
+ "model.layers.14.block_sparse_moe.experts.5.w2.weight": "model-00002-of-00003.safetensors",
211
+ "model.layers.14.block_sparse_moe.experts.5.w3.weight": "model-00002-of-00003.safetensors",
212
+ "model.layers.14.block_sparse_moe.experts.6.w1.weight": "model-00002-of-00003.safetensors",
213
+ "model.layers.14.block_sparse_moe.experts.6.w2.weight": "model-00002-of-00003.safetensors",
214
+ "model.layers.14.block_sparse_moe.experts.6.w3.weight": "model-00002-of-00003.safetensors",
215
+ "model.layers.14.block_sparse_moe.experts.7.w1.weight": "model-00002-of-00003.safetensors",
216
+ "model.layers.14.block_sparse_moe.experts.7.w2.weight": "model-00002-of-00003.safetensors",
217
+ "model.layers.14.block_sparse_moe.experts.7.w3.weight": "model-00002-of-00003.safetensors",
218
+ "model.layers.14.block_sparse_moe.gate.weight": "model-00002-of-00003.safetensors",
219
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
220
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
221
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
222
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
223
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
224
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
225
+ "model.layers.15.block_sparse_moe.experts.0.w1.weight": "model-00002-of-00003.safetensors",
226
+ "model.layers.15.block_sparse_moe.experts.0.w2.weight": "model-00002-of-00003.safetensors",
227
+ "model.layers.15.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00003.safetensors",
228
+ "model.layers.15.block_sparse_moe.experts.1.w1.weight": "model-00002-of-00003.safetensors",
229
+ "model.layers.15.block_sparse_moe.experts.1.w2.weight": "model-00002-of-00003.safetensors",
230
+ "model.layers.15.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00003.safetensors",
231
+ "model.layers.15.block_sparse_moe.experts.2.w1.weight": "model-00002-of-00003.safetensors",
232
+ "model.layers.15.block_sparse_moe.experts.2.w2.weight": "model-00002-of-00003.safetensors",
233
+ "model.layers.15.block_sparse_moe.experts.2.w3.weight": "model-00002-of-00003.safetensors",
234
+ "model.layers.15.block_sparse_moe.experts.3.w1.weight": "model-00002-of-00003.safetensors",
235
+ "model.layers.15.block_sparse_moe.experts.3.w2.weight": "model-00002-of-00003.safetensors",
236
+ "model.layers.15.block_sparse_moe.experts.3.w3.weight": "model-00002-of-00003.safetensors",
237
+ "model.layers.15.block_sparse_moe.experts.4.w1.weight": "model-00002-of-00003.safetensors",
238
+ "model.layers.15.block_sparse_moe.experts.4.w2.weight": "model-00002-of-00003.safetensors",
239
+ "model.layers.15.block_sparse_moe.experts.4.w3.weight": "model-00002-of-00003.safetensors",
240
+ "model.layers.15.block_sparse_moe.experts.5.w1.weight": "model-00002-of-00003.safetensors",
241
+ "model.layers.15.block_sparse_moe.experts.5.w2.weight": "model-00002-of-00003.safetensors",
242
+ "model.layers.15.block_sparse_moe.experts.5.w3.weight": "model-00002-of-00003.safetensors",
243
+ "model.layers.15.block_sparse_moe.experts.6.w1.weight": "model-00002-of-00003.safetensors",
244
+ "model.layers.15.block_sparse_moe.experts.6.w2.weight": "model-00002-of-00003.safetensors",
245
+ "model.layers.15.block_sparse_moe.experts.6.w3.weight": "model-00002-of-00003.safetensors",
246
+ "model.layers.15.block_sparse_moe.experts.7.w1.weight": "model-00002-of-00003.safetensors",
247
+ "model.layers.15.block_sparse_moe.experts.7.w2.weight": "model-00002-of-00003.safetensors",
248
+ "model.layers.15.block_sparse_moe.experts.7.w3.weight": "model-00002-of-00003.safetensors",
249
+ "model.layers.15.block_sparse_moe.gate.weight": "model-00002-of-00003.safetensors",
250
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
251
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
252
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
253
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
254
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
255
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
256
+ "model.layers.16.block_sparse_moe.experts.0.w1.weight": "model-00002-of-00003.safetensors",
257
+ "model.layers.16.block_sparse_moe.experts.0.w2.weight": "model-00002-of-00003.safetensors",
258
+ "model.layers.16.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00003.safetensors",
259
+ "model.layers.16.block_sparse_moe.experts.1.w1.weight": "model-00002-of-00003.safetensors",
260
+ "model.layers.16.block_sparse_moe.experts.1.w2.weight": "model-00002-of-00003.safetensors",
261
+ "model.layers.16.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00003.safetensors",
262
+ "model.layers.16.block_sparse_moe.experts.2.w1.weight": "model-00002-of-00003.safetensors",
263
+ "model.layers.16.block_sparse_moe.experts.2.w2.weight": "model-00002-of-00003.safetensors",
264
+ "model.layers.16.block_sparse_moe.experts.2.w3.weight": "model-00002-of-00003.safetensors",
265
+ "model.layers.16.block_sparse_moe.experts.3.w1.weight": "model-00002-of-00003.safetensors",
266
+ "model.layers.16.block_sparse_moe.experts.3.w2.weight": "model-00002-of-00003.safetensors",
267
+ "model.layers.16.block_sparse_moe.experts.3.w3.weight": "model-00002-of-00003.safetensors",
268
+ "model.layers.16.block_sparse_moe.experts.4.w1.weight": "model-00002-of-00003.safetensors",
269
+ "model.layers.16.block_sparse_moe.experts.4.w2.weight": "model-00002-of-00003.safetensors",
270
+ "model.layers.16.block_sparse_moe.experts.4.w3.weight": "model-00002-of-00003.safetensors",
271
+ "model.layers.16.block_sparse_moe.experts.5.w1.weight": "model-00002-of-00003.safetensors",
272
+ "model.layers.16.block_sparse_moe.experts.5.w2.weight": "model-00002-of-00003.safetensors",
273
+ "model.layers.16.block_sparse_moe.experts.5.w3.weight": "model-00002-of-00003.safetensors",
274
+ "model.layers.16.block_sparse_moe.experts.6.w1.weight": "model-00002-of-00003.safetensors",
275
+ "model.layers.16.block_sparse_moe.experts.6.w2.weight": "model-00002-of-00003.safetensors",
276
+ "model.layers.16.block_sparse_moe.experts.6.w3.weight": "model-00002-of-00003.safetensors",
277
+ "model.layers.16.block_sparse_moe.experts.7.w1.weight": "model-00002-of-00003.safetensors",
278
+ "model.layers.16.block_sparse_moe.experts.7.w2.weight": "model-00002-of-00003.safetensors",
279
+ "model.layers.16.block_sparse_moe.experts.7.w3.weight": "model-00002-of-00003.safetensors",
280
+ "model.layers.16.block_sparse_moe.gate.weight": "model-00002-of-00003.safetensors",
281
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
282
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
283
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
284
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
285
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
286
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
287
+ "model.layers.17.block_sparse_moe.experts.0.w1.weight": "model-00002-of-00003.safetensors",
288
+ "model.layers.17.block_sparse_moe.experts.0.w2.weight": "model-00002-of-00003.safetensors",
289
+ "model.layers.17.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00003.safetensors",
290
+ "model.layers.17.block_sparse_moe.experts.1.w1.weight": "model-00002-of-00003.safetensors",
291
+ "model.layers.17.block_sparse_moe.experts.1.w2.weight": "model-00003-of-00003.safetensors",
292
+ "model.layers.17.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00003.safetensors",
293
+ "model.layers.17.block_sparse_moe.experts.2.w1.weight": "model-00003-of-00003.safetensors",
294
+ "model.layers.17.block_sparse_moe.experts.2.w2.weight": "model-00003-of-00003.safetensors",
295
+ "model.layers.17.block_sparse_moe.experts.2.w3.weight": "model-00003-of-00003.safetensors",
296
+ "model.layers.17.block_sparse_moe.experts.3.w1.weight": "model-00003-of-00003.safetensors",
297
+ "model.layers.17.block_sparse_moe.experts.3.w2.weight": "model-00003-of-00003.safetensors",
298
+ "model.layers.17.block_sparse_moe.experts.3.w3.weight": "model-00003-of-00003.safetensors",
299
+ "model.layers.17.block_sparse_moe.experts.4.w1.weight": "model-00003-of-00003.safetensors",
300
+ "model.layers.17.block_sparse_moe.experts.4.w2.weight": "model-00003-of-00003.safetensors",
301
+ "model.layers.17.block_sparse_moe.experts.4.w3.weight": "model-00003-of-00003.safetensors",
302
+ "model.layers.17.block_sparse_moe.experts.5.w1.weight": "model-00003-of-00003.safetensors",
303
+ "model.layers.17.block_sparse_moe.experts.5.w2.weight": "model-00003-of-00003.safetensors",
304
+ "model.layers.17.block_sparse_moe.experts.5.w3.weight": "model-00003-of-00003.safetensors",
305
+ "model.layers.17.block_sparse_moe.experts.6.w1.weight": "model-00003-of-00003.safetensors",
306
+ "model.layers.17.block_sparse_moe.experts.6.w2.weight": "model-00003-of-00003.safetensors",
307
+ "model.layers.17.block_sparse_moe.experts.6.w3.weight": "model-00003-of-00003.safetensors",
308
+ "model.layers.17.block_sparse_moe.experts.7.w1.weight": "model-00003-of-00003.safetensors",
309
+ "model.layers.17.block_sparse_moe.experts.7.w2.weight": "model-00003-of-00003.safetensors",
310
+ "model.layers.17.block_sparse_moe.experts.7.w3.weight": "model-00003-of-00003.safetensors",
311
+ "model.layers.17.block_sparse_moe.gate.weight": "model-00002-of-00003.safetensors",
312
+ "model.layers.17.input_layernorm.weight": "model-00003-of-00003.safetensors",
313
+ "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
314
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
315
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
316
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
317
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
318
+ "model.layers.18.block_sparse_moe.experts.0.w1.weight": "model-00003-of-00003.safetensors",
319
+ "model.layers.18.block_sparse_moe.experts.0.w2.weight": "model-00003-of-00003.safetensors",
320
+ "model.layers.18.block_sparse_moe.experts.0.w3.weight": "model-00003-of-00003.safetensors",
321
+ "model.layers.18.block_sparse_moe.experts.1.w1.weight": "model-00003-of-00003.safetensors",
322
+ "model.layers.18.block_sparse_moe.experts.1.w2.weight": "model-00003-of-00003.safetensors",
323
+ "model.layers.18.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00003.safetensors",
324
+ "model.layers.18.block_sparse_moe.experts.2.w1.weight": "model-00003-of-00003.safetensors",
325
+ "model.layers.18.block_sparse_moe.experts.2.w2.weight": "model-00003-of-00003.safetensors",
326
+ "model.layers.18.block_sparse_moe.experts.2.w3.weight": "model-00003-of-00003.safetensors",
327
+ "model.layers.18.block_sparse_moe.experts.3.w1.weight": "model-00003-of-00003.safetensors",
328
+ "model.layers.18.block_sparse_moe.experts.3.w2.weight": "model-00003-of-00003.safetensors",
329
+ "model.layers.18.block_sparse_moe.experts.3.w3.weight": "model-00003-of-00003.safetensors",
330
+ "model.layers.18.block_sparse_moe.experts.4.w1.weight": "model-00003-of-00003.safetensors",
331
+ "model.layers.18.block_sparse_moe.experts.4.w2.weight": "model-00003-of-00003.safetensors",
332
+ "model.layers.18.block_sparse_moe.experts.4.w3.weight": "model-00003-of-00003.safetensors",
333
+ "model.layers.18.block_sparse_moe.experts.5.w1.weight": "model-00003-of-00003.safetensors",
334
+ "model.layers.18.block_sparse_moe.experts.5.w2.weight": "model-00003-of-00003.safetensors",
335
+ "model.layers.18.block_sparse_moe.experts.5.w3.weight": "model-00003-of-00003.safetensors",
336
+ "model.layers.18.block_sparse_moe.experts.6.w1.weight": "model-00003-of-00003.safetensors",
337
+ "model.layers.18.block_sparse_moe.experts.6.w2.weight": "model-00003-of-00003.safetensors",
338
+ "model.layers.18.block_sparse_moe.experts.6.w3.weight": "model-00003-of-00003.safetensors",
339
+ "model.layers.18.block_sparse_moe.experts.7.w1.weight": "model-00003-of-00003.safetensors",
340
+ "model.layers.18.block_sparse_moe.experts.7.w2.weight": "model-00003-of-00003.safetensors",
341
+ "model.layers.18.block_sparse_moe.experts.7.w3.weight": "model-00003-of-00003.safetensors",
342
+ "model.layers.18.block_sparse_moe.gate.weight": "model-00003-of-00003.safetensors",
343
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00003.safetensors",
344
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
345
+ "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
346
+ "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
347
+ "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
348
+ "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
349
+ "model.layers.19.block_sparse_moe.experts.0.w1.weight": "model-00003-of-00003.safetensors",
350
+ "model.layers.19.block_sparse_moe.experts.0.w2.weight": "model-00003-of-00003.safetensors",
351
+ "model.layers.19.block_sparse_moe.experts.0.w3.weight": "model-00003-of-00003.safetensors",
352
+ "model.layers.19.block_sparse_moe.experts.1.w1.weight": "model-00003-of-00003.safetensors",
353
+ "model.layers.19.block_sparse_moe.experts.1.w2.weight": "model-00003-of-00003.safetensors",
354
+ "model.layers.19.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00003.safetensors",
355
+ "model.layers.19.block_sparse_moe.experts.2.w1.weight": "model-00003-of-00003.safetensors",
356
+ "model.layers.19.block_sparse_moe.experts.2.w2.weight": "model-00003-of-00003.safetensors",
357
+ "model.layers.19.block_sparse_moe.experts.2.w3.weight": "model-00003-of-00003.safetensors",
358
+ "model.layers.19.block_sparse_moe.experts.3.w1.weight": "model-00003-of-00003.safetensors",
359
+ "model.layers.19.block_sparse_moe.experts.3.w2.weight": "model-00003-of-00003.safetensors",
360
+ "model.layers.19.block_sparse_moe.experts.3.w3.weight": "model-00003-of-00003.safetensors",
361
+ "model.layers.19.block_sparse_moe.experts.4.w1.weight": "model-00003-of-00003.safetensors",
362
+ "model.layers.19.block_sparse_moe.experts.4.w2.weight": "model-00003-of-00003.safetensors",
363
+ "model.layers.19.block_sparse_moe.experts.4.w3.weight": "model-00003-of-00003.safetensors",
364
+ "model.layers.19.block_sparse_moe.experts.5.w1.weight": "model-00003-of-00003.safetensors",
365
+ "model.layers.19.block_sparse_moe.experts.5.w2.weight": "model-00003-of-00003.safetensors",
366
+ "model.layers.19.block_sparse_moe.experts.5.w3.weight": "model-00003-of-00003.safetensors",
367
+ "model.layers.19.block_sparse_moe.experts.6.w1.weight": "model-00003-of-00003.safetensors",
368
+ "model.layers.19.block_sparse_moe.experts.6.w2.weight": "model-00003-of-00003.safetensors",
369
+ "model.layers.19.block_sparse_moe.experts.6.w3.weight": "model-00003-of-00003.safetensors",
370
+ "model.layers.19.block_sparse_moe.experts.7.w1.weight": "model-00003-of-00003.safetensors",
371
+ "model.layers.19.block_sparse_moe.experts.7.w2.weight": "model-00003-of-00003.safetensors",
372
+ "model.layers.19.block_sparse_moe.experts.7.w3.weight": "model-00003-of-00003.safetensors",
373
+ "model.layers.19.block_sparse_moe.gate.weight": "model-00003-of-00003.safetensors",
374
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00003.safetensors",
375
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
376
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
377
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
378
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
379
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
380
+ "model.layers.2.block_sparse_moe.experts.0.w1.weight": "model-00001-of-00003.safetensors",
381
+ "model.layers.2.block_sparse_moe.experts.0.w2.weight": "model-00001-of-00003.safetensors",
382
+ "model.layers.2.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00003.safetensors",
383
+ "model.layers.2.block_sparse_moe.experts.1.w1.weight": "model-00001-of-00003.safetensors",
384
+ "model.layers.2.block_sparse_moe.experts.1.w2.weight": "model-00001-of-00003.safetensors",
385
+ "model.layers.2.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00003.safetensors",
386
+ "model.layers.2.block_sparse_moe.experts.2.w1.weight": "model-00001-of-00003.safetensors",
387
+ "model.layers.2.block_sparse_moe.experts.2.w2.weight": "model-00001-of-00003.safetensors",
388
+ "model.layers.2.block_sparse_moe.experts.2.w3.weight": "model-00001-of-00003.safetensors",
389
+ "model.layers.2.block_sparse_moe.experts.3.w1.weight": "model-00001-of-00003.safetensors",
390
+ "model.layers.2.block_sparse_moe.experts.3.w2.weight": "model-00001-of-00003.safetensors",
391
+ "model.layers.2.block_sparse_moe.experts.3.w3.weight": "model-00001-of-00003.safetensors",
392
+ "model.layers.2.block_sparse_moe.experts.4.w1.weight": "model-00001-of-00003.safetensors",
393
+ "model.layers.2.block_sparse_moe.experts.4.w2.weight": "model-00001-of-00003.safetensors",
394
+ "model.layers.2.block_sparse_moe.experts.4.w3.weight": "model-00001-of-00003.safetensors",
395
+ "model.layers.2.block_sparse_moe.experts.5.w1.weight": "model-00001-of-00003.safetensors",
396
+ "model.layers.2.block_sparse_moe.experts.5.w2.weight": "model-00001-of-00003.safetensors",
397
+ "model.layers.2.block_sparse_moe.experts.5.w3.weight": "model-00001-of-00003.safetensors",
398
+ "model.layers.2.block_sparse_moe.experts.6.w1.weight": "model-00001-of-00003.safetensors",
399
+ "model.layers.2.block_sparse_moe.experts.6.w2.weight": "model-00001-of-00003.safetensors",
400
+ "model.layers.2.block_sparse_moe.experts.6.w3.weight": "model-00001-of-00003.safetensors",
401
+ "model.layers.2.block_sparse_moe.experts.7.w1.weight": "model-00001-of-00003.safetensors",
402
+ "model.layers.2.block_sparse_moe.experts.7.w2.weight": "model-00001-of-00003.safetensors",
403
+ "model.layers.2.block_sparse_moe.experts.7.w3.weight": "model-00001-of-00003.safetensors",
404
+ "model.layers.2.block_sparse_moe.gate.weight": "model-00001-of-00003.safetensors",
405
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
406
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
407
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
408
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
409
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
410
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
411
+ "model.layers.20.block_sparse_moe.experts.0.w1.weight": "model-00003-of-00003.safetensors",
412
+ "model.layers.20.block_sparse_moe.experts.0.w2.weight": "model-00003-of-00003.safetensors",
413
+ "model.layers.20.block_sparse_moe.experts.0.w3.weight": "model-00003-of-00003.safetensors",
414
+ "model.layers.20.block_sparse_moe.experts.1.w1.weight": "model-00003-of-00003.safetensors",
415
+ "model.layers.20.block_sparse_moe.experts.1.w2.weight": "model-00003-of-00003.safetensors",
416
+ "model.layers.20.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00003.safetensors",
417
+ "model.layers.20.block_sparse_moe.experts.2.w1.weight": "model-00003-of-00003.safetensors",
418
+ "model.layers.20.block_sparse_moe.experts.2.w2.weight": "model-00003-of-00003.safetensors",
419
+ "model.layers.20.block_sparse_moe.experts.2.w3.weight": "model-00003-of-00003.safetensors",
420
+ "model.layers.20.block_sparse_moe.experts.3.w1.weight": "model-00003-of-00003.safetensors",
421
+ "model.layers.20.block_sparse_moe.experts.3.w2.weight": "model-00003-of-00003.safetensors",
422
+ "model.layers.20.block_sparse_moe.experts.3.w3.weight": "model-00003-of-00003.safetensors",
423
+ "model.layers.20.block_sparse_moe.experts.4.w1.weight": "model-00003-of-00003.safetensors",
424
+ "model.layers.20.block_sparse_moe.experts.4.w2.weight": "model-00003-of-00003.safetensors",
425
+ "model.layers.20.block_sparse_moe.experts.4.w3.weight": "model-00003-of-00003.safetensors",
426
+ "model.layers.20.block_sparse_moe.experts.5.w1.weight": "model-00003-of-00003.safetensors",
427
+ "model.layers.20.block_sparse_moe.experts.5.w2.weight": "model-00003-of-00003.safetensors",
428
+ "model.layers.20.block_sparse_moe.experts.5.w3.weight": "model-00003-of-00003.safetensors",
429
+ "model.layers.20.block_sparse_moe.experts.6.w1.weight": "model-00003-of-00003.safetensors",
430
+ "model.layers.20.block_sparse_moe.experts.6.w2.weight": "model-00003-of-00003.safetensors",
431
+ "model.layers.20.block_sparse_moe.experts.6.w3.weight": "model-00003-of-00003.safetensors",
432
+ "model.layers.20.block_sparse_moe.experts.7.w1.weight": "model-00003-of-00003.safetensors",
433
+ "model.layers.20.block_sparse_moe.experts.7.w2.weight": "model-00003-of-00003.safetensors",
434
+ "model.layers.20.block_sparse_moe.experts.7.w3.weight": "model-00003-of-00003.safetensors",
435
+ "model.layers.20.block_sparse_moe.gate.weight": "model-00003-of-00003.safetensors",
436
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00003.safetensors",
437
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
438
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
439
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
440
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
441
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
442
+ "model.layers.21.block_sparse_moe.experts.0.w1.weight": "model-00003-of-00003.safetensors",
443
+ "model.layers.21.block_sparse_moe.experts.0.w2.weight": "model-00003-of-00003.safetensors",
444
+ "model.layers.21.block_sparse_moe.experts.0.w3.weight": "model-00003-of-00003.safetensors",
445
+ "model.layers.21.block_sparse_moe.experts.1.w1.weight": "model-00003-of-00003.safetensors",
446
+ "model.layers.21.block_sparse_moe.experts.1.w2.weight": "model-00003-of-00003.safetensors",
447
+ "model.layers.21.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00003.safetensors",
448
+ "model.layers.21.block_sparse_moe.experts.2.w1.weight": "model-00003-of-00003.safetensors",
449
+ "model.layers.21.block_sparse_moe.experts.2.w2.weight": "model-00003-of-00003.safetensors",
450
+ "model.layers.21.block_sparse_moe.experts.2.w3.weight": "model-00003-of-00003.safetensors",
451
+ "model.layers.21.block_sparse_moe.experts.3.w1.weight": "model-00003-of-00003.safetensors",
452
+ "model.layers.21.block_sparse_moe.experts.3.w2.weight": "model-00003-of-00003.safetensors",
453
+ "model.layers.21.block_sparse_moe.experts.3.w3.weight": "model-00003-of-00003.safetensors",
454
+ "model.layers.21.block_sparse_moe.experts.4.w1.weight": "model-00003-of-00003.safetensors",
455
+ "model.layers.21.block_sparse_moe.experts.4.w2.weight": "model-00003-of-00003.safetensors",
456
+ "model.layers.21.block_sparse_moe.experts.4.w3.weight": "model-00003-of-00003.safetensors",
457
+ "model.layers.21.block_sparse_moe.experts.5.w1.weight": "model-00003-of-00003.safetensors",
458
+ "model.layers.21.block_sparse_moe.experts.5.w2.weight": "model-00003-of-00003.safetensors",
459
+ "model.layers.21.block_sparse_moe.experts.5.w3.weight": "model-00003-of-00003.safetensors",
460
+ "model.layers.21.block_sparse_moe.experts.6.w1.weight": "model-00003-of-00003.safetensors",
461
+ "model.layers.21.block_sparse_moe.experts.6.w2.weight": "model-00003-of-00003.safetensors",
462
+ "model.layers.21.block_sparse_moe.experts.6.w3.weight": "model-00003-of-00003.safetensors",
463
+ "model.layers.21.block_sparse_moe.experts.7.w1.weight": "model-00003-of-00003.safetensors",
464
+ "model.layers.21.block_sparse_moe.experts.7.w2.weight": "model-00003-of-00003.safetensors",
465
+ "model.layers.21.block_sparse_moe.experts.7.w3.weight": "model-00003-of-00003.safetensors",
466
+ "model.layers.21.block_sparse_moe.gate.weight": "model-00003-of-00003.safetensors",
467
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00003.safetensors",
468
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
469
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
470
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
471
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
472
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
473
+ "model.layers.3.block_sparse_moe.experts.0.w1.weight": "model-00001-of-00003.safetensors",
474
+ "model.layers.3.block_sparse_moe.experts.0.w2.weight": "model-00001-of-00003.safetensors",
475
+ "model.layers.3.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00003.safetensors",
476
+ "model.layers.3.block_sparse_moe.experts.1.w1.weight": "model-00001-of-00003.safetensors",
477
+ "model.layers.3.block_sparse_moe.experts.1.w2.weight": "model-00001-of-00003.safetensors",
478
+ "model.layers.3.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00003.safetensors",
479
+ "model.layers.3.block_sparse_moe.experts.2.w1.weight": "model-00001-of-00003.safetensors",
480
+ "model.layers.3.block_sparse_moe.experts.2.w2.weight": "model-00001-of-00003.safetensors",
481
+ "model.layers.3.block_sparse_moe.experts.2.w3.weight": "model-00001-of-00003.safetensors",
482
+ "model.layers.3.block_sparse_moe.experts.3.w1.weight": "model-00001-of-00003.safetensors",
483
+ "model.layers.3.block_sparse_moe.experts.3.w2.weight": "model-00001-of-00003.safetensors",
484
+ "model.layers.3.block_sparse_moe.experts.3.w3.weight": "model-00001-of-00003.safetensors",
485
+ "model.layers.3.block_sparse_moe.experts.4.w1.weight": "model-00001-of-00003.safetensors",
486
+ "model.layers.3.block_sparse_moe.experts.4.w2.weight": "model-00001-of-00003.safetensors",
487
+ "model.layers.3.block_sparse_moe.experts.4.w3.weight": "model-00001-of-00003.safetensors",
488
+ "model.layers.3.block_sparse_moe.experts.5.w1.weight": "model-00001-of-00003.safetensors",
489
+ "model.layers.3.block_sparse_moe.experts.5.w2.weight": "model-00001-of-00003.safetensors",
490
+ "model.layers.3.block_sparse_moe.experts.5.w3.weight": "model-00001-of-00003.safetensors",
491
+ "model.layers.3.block_sparse_moe.experts.6.w1.weight": "model-00001-of-00003.safetensors",
492
+ "model.layers.3.block_sparse_moe.experts.6.w2.weight": "model-00001-of-00003.safetensors",
493
+ "model.layers.3.block_sparse_moe.experts.6.w3.weight": "model-00001-of-00003.safetensors",
494
+ "model.layers.3.block_sparse_moe.experts.7.w1.weight": "model-00001-of-00003.safetensors",
495
+ "model.layers.3.block_sparse_moe.experts.7.w2.weight": "model-00001-of-00003.safetensors",
496
+ "model.layers.3.block_sparse_moe.experts.7.w3.weight": "model-00001-of-00003.safetensors",
497
+ "model.layers.3.block_sparse_moe.gate.weight": "model-00001-of-00003.safetensors",
498
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
499
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
500
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
501
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
502
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
503
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
504
+ "model.layers.4.block_sparse_moe.experts.0.w1.weight": "model-00001-of-00003.safetensors",
505
+ "model.layers.4.block_sparse_moe.experts.0.w2.weight": "model-00001-of-00003.safetensors",
506
+ "model.layers.4.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00003.safetensors",
507
+ "model.layers.4.block_sparse_moe.experts.1.w1.weight": "model-00001-of-00003.safetensors",
508
+ "model.layers.4.block_sparse_moe.experts.1.w2.weight": "model-00001-of-00003.safetensors",
509
+ "model.layers.4.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00003.safetensors",
510
+ "model.layers.4.block_sparse_moe.experts.2.w1.weight": "model-00001-of-00003.safetensors",
511
+ "model.layers.4.block_sparse_moe.experts.2.w2.weight": "model-00001-of-00003.safetensors",
512
+ "model.layers.4.block_sparse_moe.experts.2.w3.weight": "model-00001-of-00003.safetensors",
513
+ "model.layers.4.block_sparse_moe.experts.3.w1.weight": "model-00001-of-00003.safetensors",
514
+ "model.layers.4.block_sparse_moe.experts.3.w2.weight": "model-00001-of-00003.safetensors",
515
+ "model.layers.4.block_sparse_moe.experts.3.w3.weight": "model-00001-of-00003.safetensors",
516
+ "model.layers.4.block_sparse_moe.experts.4.w1.weight": "model-00001-of-00003.safetensors",
517
+ "model.layers.4.block_sparse_moe.experts.4.w2.weight": "model-00001-of-00003.safetensors",
518
+ "model.layers.4.block_sparse_moe.experts.4.w3.weight": "model-00001-of-00003.safetensors",
519
+ "model.layers.4.block_sparse_moe.experts.5.w1.weight": "model-00001-of-00003.safetensors",
520
+ "model.layers.4.block_sparse_moe.experts.5.w2.weight": "model-00001-of-00003.safetensors",
521
+ "model.layers.4.block_sparse_moe.experts.5.w3.weight": "model-00001-of-00003.safetensors",
522
+ "model.layers.4.block_sparse_moe.experts.6.w1.weight": "model-00001-of-00003.safetensors",
523
+ "model.layers.4.block_sparse_moe.experts.6.w2.weight": "model-00001-of-00003.safetensors",
524
+ "model.layers.4.block_sparse_moe.experts.6.w3.weight": "model-00001-of-00003.safetensors",
525
+ "model.layers.4.block_sparse_moe.experts.7.w1.weight": "model-00001-of-00003.safetensors",
526
+ "model.layers.4.block_sparse_moe.experts.7.w2.weight": "model-00001-of-00003.safetensors",
527
+ "model.layers.4.block_sparse_moe.experts.7.w3.weight": "model-00001-of-00003.safetensors",
528
+ "model.layers.4.block_sparse_moe.gate.weight": "model-00001-of-00003.safetensors",
529
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
530
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
531
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
532
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
533
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
534
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
535
+ "model.layers.5.block_sparse_moe.experts.0.w1.weight": "model-00001-of-00003.safetensors",
536
+ "model.layers.5.block_sparse_moe.experts.0.w2.weight": "model-00001-of-00003.safetensors",
537
+ "model.layers.5.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00003.safetensors",
538
+ "model.layers.5.block_sparse_moe.experts.1.w1.weight": "model-00001-of-00003.safetensors",
539
+ "model.layers.5.block_sparse_moe.experts.1.w2.weight": "model-00001-of-00003.safetensors",
540
+ "model.layers.5.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00003.safetensors",
541
+ "model.layers.5.block_sparse_moe.experts.2.w1.weight": "model-00001-of-00003.safetensors",
542
+ "model.layers.5.block_sparse_moe.experts.2.w2.weight": "model-00001-of-00003.safetensors",
543
+ "model.layers.5.block_sparse_moe.experts.2.w3.weight": "model-00001-of-00003.safetensors",
544
+ "model.layers.5.block_sparse_moe.experts.3.w1.weight": "model-00001-of-00003.safetensors",
545
+ "model.layers.5.block_sparse_moe.experts.3.w2.weight": "model-00001-of-00003.safetensors",
546
+ "model.layers.5.block_sparse_moe.experts.3.w3.weight": "model-00001-of-00003.safetensors",
547
+ "model.layers.5.block_sparse_moe.experts.4.w1.weight": "model-00001-of-00003.safetensors",
548
+ "model.layers.5.block_sparse_moe.experts.4.w2.weight": "model-00001-of-00003.safetensors",
549
+ "model.layers.5.block_sparse_moe.experts.4.w3.weight": "model-00001-of-00003.safetensors",
550
+ "model.layers.5.block_sparse_moe.experts.5.w1.weight": "model-00001-of-00003.safetensors",
551
+ "model.layers.5.block_sparse_moe.experts.5.w2.weight": "model-00001-of-00003.safetensors",
552
+ "model.layers.5.block_sparse_moe.experts.5.w3.weight": "model-00001-of-00003.safetensors",
553
+ "model.layers.5.block_sparse_moe.experts.6.w1.weight": "model-00001-of-00003.safetensors",
554
+ "model.layers.5.block_sparse_moe.experts.6.w2.weight": "model-00001-of-00003.safetensors",
555
+ "model.layers.5.block_sparse_moe.experts.6.w3.weight": "model-00001-of-00003.safetensors",
556
+ "model.layers.5.block_sparse_moe.experts.7.w1.weight": "model-00001-of-00003.safetensors",
557
+ "model.layers.5.block_sparse_moe.experts.7.w2.weight": "model-00001-of-00003.safetensors",
558
+ "model.layers.5.block_sparse_moe.experts.7.w3.weight": "model-00001-of-00003.safetensors",
559
+ "model.layers.5.block_sparse_moe.gate.weight": "model-00001-of-00003.safetensors",
560
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
561
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
562
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
563
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
564
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
565
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
566
+ "model.layers.6.block_sparse_moe.experts.0.w1.weight": "model-00001-of-00003.safetensors",
567
+ "model.layers.6.block_sparse_moe.experts.0.w2.weight": "model-00001-of-00003.safetensors",
568
+ "model.layers.6.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00003.safetensors",
569
+ "model.layers.6.block_sparse_moe.experts.1.w1.weight": "model-00001-of-00003.safetensors",
570
+ "model.layers.6.block_sparse_moe.experts.1.w2.weight": "model-00001-of-00003.safetensors",
571
+ "model.layers.6.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00003.safetensors",
572
+ "model.layers.6.block_sparse_moe.experts.2.w1.weight": "model-00001-of-00003.safetensors",
573
+ "model.layers.6.block_sparse_moe.experts.2.w2.weight": "model-00001-of-00003.safetensors",
574
+ "model.layers.6.block_sparse_moe.experts.2.w3.weight": "model-00001-of-00003.safetensors",
575
+ "model.layers.6.block_sparse_moe.experts.3.w1.weight": "model-00001-of-00003.safetensors",
576
+ "model.layers.6.block_sparse_moe.experts.3.w2.weight": "model-00001-of-00003.safetensors",
577
+ "model.layers.6.block_sparse_moe.experts.3.w3.weight": "model-00001-of-00003.safetensors",
578
+ "model.layers.6.block_sparse_moe.experts.4.w1.weight": "model-00001-of-00003.safetensors",
579
+ "model.layers.6.block_sparse_moe.experts.4.w2.weight": "model-00001-of-00003.safetensors",
580
+ "model.layers.6.block_sparse_moe.experts.4.w3.weight": "model-00001-of-00003.safetensors",
581
+ "model.layers.6.block_sparse_moe.experts.5.w1.weight": "model-00001-of-00003.safetensors",
582
+ "model.layers.6.block_sparse_moe.experts.5.w2.weight": "model-00001-of-00003.safetensors",
583
+ "model.layers.6.block_sparse_moe.experts.5.w3.weight": "model-00001-of-00003.safetensors",
584
+ "model.layers.6.block_sparse_moe.experts.6.w1.weight": "model-00001-of-00003.safetensors",
585
+ "model.layers.6.block_sparse_moe.experts.6.w2.weight": "model-00001-of-00003.safetensors",
586
+ "model.layers.6.block_sparse_moe.experts.6.w3.weight": "model-00001-of-00003.safetensors",
587
+ "model.layers.6.block_sparse_moe.experts.7.w1.weight": "model-00001-of-00003.safetensors",
588
+ "model.layers.6.block_sparse_moe.experts.7.w2.weight": "model-00001-of-00003.safetensors",
589
+ "model.layers.6.block_sparse_moe.experts.7.w3.weight": "model-00001-of-00003.safetensors",
590
+ "model.layers.6.block_sparse_moe.gate.weight": "model-00001-of-00003.safetensors",
591
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
592
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
593
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
594
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
595
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
596
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
597
+ "model.layers.7.block_sparse_moe.experts.0.w1.weight": "model-00001-of-00003.safetensors",
598
+ "model.layers.7.block_sparse_moe.experts.0.w2.weight": "model-00001-of-00003.safetensors",
599
+ "model.layers.7.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00003.safetensors",
600
+ "model.layers.7.block_sparse_moe.experts.1.w1.weight": "model-00001-of-00003.safetensors",
601
+ "model.layers.7.block_sparse_moe.experts.1.w2.weight": "model-00001-of-00003.safetensors",
602
+ "model.layers.7.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00003.safetensors",
603
+ "model.layers.7.block_sparse_moe.experts.2.w1.weight": "model-00001-of-00003.safetensors",
604
+ "model.layers.7.block_sparse_moe.experts.2.w2.weight": "model-00001-of-00003.safetensors",
605
+ "model.layers.7.block_sparse_moe.experts.2.w3.weight": "model-00001-of-00003.safetensors",
606
+ "model.layers.7.block_sparse_moe.experts.3.w1.weight": "model-00001-of-00003.safetensors",
607
+ "model.layers.7.block_sparse_moe.experts.3.w2.weight": "model-00001-of-00003.safetensors",
608
+ "model.layers.7.block_sparse_moe.experts.3.w3.weight": "model-00001-of-00003.safetensors",
609
+ "model.layers.7.block_sparse_moe.experts.4.w1.weight": "model-00001-of-00003.safetensors",
610
+ "model.layers.7.block_sparse_moe.experts.4.w2.weight": "model-00001-of-00003.safetensors",
611
+ "model.layers.7.block_sparse_moe.experts.4.w3.weight": "model-00001-of-00003.safetensors",
612
+ "model.layers.7.block_sparse_moe.experts.5.w1.weight": "model-00001-of-00003.safetensors",
613
+ "model.layers.7.block_sparse_moe.experts.5.w2.weight": "model-00001-of-00003.safetensors",
614
+ "model.layers.7.block_sparse_moe.experts.5.w3.weight": "model-00001-of-00003.safetensors",
615
+ "model.layers.7.block_sparse_moe.experts.6.w1.weight": "model-00001-of-00003.safetensors",
616
+ "model.layers.7.block_sparse_moe.experts.6.w2.weight": "model-00001-of-00003.safetensors",
617
+ "model.layers.7.block_sparse_moe.experts.6.w3.weight": "model-00001-of-00003.safetensors",
618
+ "model.layers.7.block_sparse_moe.experts.7.w1.weight": "model-00001-of-00003.safetensors",
619
+ "model.layers.7.block_sparse_moe.experts.7.w2.weight": "model-00001-of-00003.safetensors",
620
+ "model.layers.7.block_sparse_moe.experts.7.w3.weight": "model-00001-of-00003.safetensors",
621
+ "model.layers.7.block_sparse_moe.gate.weight": "model-00001-of-00003.safetensors",
622
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
623
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
624
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
625
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
626
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
627
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
628
+ "model.layers.8.block_sparse_moe.experts.0.w1.weight": "model-00001-of-00003.safetensors",
629
+ "model.layers.8.block_sparse_moe.experts.0.w2.weight": "model-00001-of-00003.safetensors",
630
+ "model.layers.8.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00003.safetensors",
631
+ "model.layers.8.block_sparse_moe.experts.1.w1.weight": "model-00001-of-00003.safetensors",
632
+ "model.layers.8.block_sparse_moe.experts.1.w2.weight": "model-00001-of-00003.safetensors",
633
+ "model.layers.8.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00003.safetensors",
634
+ "model.layers.8.block_sparse_moe.experts.2.w1.weight": "model-00001-of-00003.safetensors",
635
+ "model.layers.8.block_sparse_moe.experts.2.w2.weight": "model-00001-of-00003.safetensors",
636
+ "model.layers.8.block_sparse_moe.experts.2.w3.weight": "model-00001-of-00003.safetensors",
637
+ "model.layers.8.block_sparse_moe.experts.3.w1.weight": "model-00001-of-00003.safetensors",
638
+ "model.layers.8.block_sparse_moe.experts.3.w2.weight": "model-00001-of-00003.safetensors",
639
+ "model.layers.8.block_sparse_moe.experts.3.w3.weight": "model-00002-of-00003.safetensors",
640
+ "model.layers.8.block_sparse_moe.experts.4.w1.weight": "model-00002-of-00003.safetensors",
641
+ "model.layers.8.block_sparse_moe.experts.4.w2.weight": "model-00002-of-00003.safetensors",
642
+ "model.layers.8.block_sparse_moe.experts.4.w3.weight": "model-00002-of-00003.safetensors",
643
+ "model.layers.8.block_sparse_moe.experts.5.w1.weight": "model-00002-of-00003.safetensors",
644
+ "model.layers.8.block_sparse_moe.experts.5.w2.weight": "model-00002-of-00003.safetensors",
645
+ "model.layers.8.block_sparse_moe.experts.5.w3.weight": "model-00002-of-00003.safetensors",
646
+ "model.layers.8.block_sparse_moe.experts.6.w1.weight": "model-00002-of-00003.safetensors",
647
+ "model.layers.8.block_sparse_moe.experts.6.w2.weight": "model-00002-of-00003.safetensors",
648
+ "model.layers.8.block_sparse_moe.experts.6.w3.weight": "model-00002-of-00003.safetensors",
649
+ "model.layers.8.block_sparse_moe.experts.7.w1.weight": "model-00002-of-00003.safetensors",
650
+ "model.layers.8.block_sparse_moe.experts.7.w2.weight": "model-00002-of-00003.safetensors",
651
+ "model.layers.8.block_sparse_moe.experts.7.w3.weight": "model-00002-of-00003.safetensors",
652
+ "model.layers.8.block_sparse_moe.gate.weight": "model-00001-of-00003.safetensors",
653
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00003.safetensors",
654
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
655
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
656
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
657
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
658
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
659
+ "model.layers.9.block_sparse_moe.experts.0.w1.weight": "model-00002-of-00003.safetensors",
660
+ "model.layers.9.block_sparse_moe.experts.0.w2.weight": "model-00002-of-00003.safetensors",
661
+ "model.layers.9.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00003.safetensors",
662
+ "model.layers.9.block_sparse_moe.experts.1.w1.weight": "model-00002-of-00003.safetensors",
663
+ "model.layers.9.block_sparse_moe.experts.1.w2.weight": "model-00002-of-00003.safetensors",
664
+ "model.layers.9.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00003.safetensors",
665
+ "model.layers.9.block_sparse_moe.experts.2.w1.weight": "model-00002-of-00003.safetensors",
666
+ "model.layers.9.block_sparse_moe.experts.2.w2.weight": "model-00002-of-00003.safetensors",
667
+ "model.layers.9.block_sparse_moe.experts.2.w3.weight": "model-00002-of-00003.safetensors",
668
+ "model.layers.9.block_sparse_moe.experts.3.w1.weight": "model-00002-of-00003.safetensors",
669
+ "model.layers.9.block_sparse_moe.experts.3.w2.weight": "model-00002-of-00003.safetensors",
670
+ "model.layers.9.block_sparse_moe.experts.3.w3.weight": "model-00002-of-00003.safetensors",
671
+ "model.layers.9.block_sparse_moe.experts.4.w1.weight": "model-00002-of-00003.safetensors",
672
+ "model.layers.9.block_sparse_moe.experts.4.w2.weight": "model-00002-of-00003.safetensors",
673
+ "model.layers.9.block_sparse_moe.experts.4.w3.weight": "model-00002-of-00003.safetensors",
674
+ "model.layers.9.block_sparse_moe.experts.5.w1.weight": "model-00002-of-00003.safetensors",
675
+ "model.layers.9.block_sparse_moe.experts.5.w2.weight": "model-00002-of-00003.safetensors",
676
+ "model.layers.9.block_sparse_moe.experts.5.w3.weight": "model-00002-of-00003.safetensors",
677
+ "model.layers.9.block_sparse_moe.experts.6.w1.weight": "model-00002-of-00003.safetensors",
678
+ "model.layers.9.block_sparse_moe.experts.6.w2.weight": "model-00002-of-00003.safetensors",
679
+ "model.layers.9.block_sparse_moe.experts.6.w3.weight": "model-00002-of-00003.safetensors",
680
+ "model.layers.9.block_sparse_moe.experts.7.w1.weight": "model-00002-of-00003.safetensors",
681
+ "model.layers.9.block_sparse_moe.experts.7.w2.weight": "model-00002-of-00003.safetensors",
682
+ "model.layers.9.block_sparse_moe.experts.7.w3.weight": "model-00002-of-00003.safetensors",
683
+ "model.layers.9.block_sparse_moe.gate.weight": "model-00002-of-00003.safetensors",
684
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00003.safetensors",
685
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
686
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
687
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
688
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
689
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
690
+ "model.norm.weight": "model-00003-of-00003.safetensors"
691
+ }
692
+ }
runs/Jan17_21-13-19_main1/events.out.tfevents.1705526089.main1.11993.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d0150bb65644574cd94662863c2eab1bc5ec04846368036c9a8af771add5fded
3
- size 120785
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c70132a0f5763eb38ae3af3b8f80cd7a78744e86c7992bd31834eafd9069d66
3
+ size 121767
runs/Jan17_21-13-19_main1/events.out.tfevents.1705578011.main1.11993.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cefd06a18ab91b3c763a7c07b11cdf25883385e1467da3aee8e38ac13eb65e5a
3
+ size 359
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "train_loss": 1.1024342694896863,
4
+ "train_runtime": 51497.7783,
5
+ "train_samples": 207865,
6
+ "train_samples_per_second": 8.508,
7
+ "train_steps_per_second": 0.066
8
+ }
trainer_state.json ADDED
@@ -0,0 +1,4412 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 1.1177691221237183,
3
+ "best_model_checkpoint": "data/tinyllama_moe_sft_ultrachat200k_v2_epochs3/checkpoint-2200",
4
+ "epoch": 2.9986859395532193,
5
+ "eval_steps": 100,
6
+ "global_step": 3423,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.0,
13
+ "learning_rate": 1.7391304347826088e-07,
14
+ "loss": 2.7477,
15
+ "step": 1
16
+ },
17
+ {
18
+ "epoch": 0.0,
19
+ "learning_rate": 8.695652173913044e-07,
20
+ "loss": 2.6989,
21
+ "step": 5
22
+ },
23
+ {
24
+ "epoch": 0.01,
25
+ "learning_rate": 1.7391304347826088e-06,
26
+ "loss": 2.7026,
27
+ "step": 10
28
+ },
29
+ {
30
+ "epoch": 0.01,
31
+ "learning_rate": 2.6086956521739132e-06,
32
+ "loss": 2.5994,
33
+ "step": 15
34
+ },
35
+ {
36
+ "epoch": 0.02,
37
+ "learning_rate": 3.4782608695652175e-06,
38
+ "loss": 2.4352,
39
+ "step": 20
40
+ },
41
+ {
42
+ "epoch": 0.02,
43
+ "learning_rate": 4.347826086956522e-06,
44
+ "loss": 2.0772,
45
+ "step": 25
46
+ },
47
+ {
48
+ "epoch": 0.03,
49
+ "learning_rate": 5.2173913043478265e-06,
50
+ "loss": 1.8706,
51
+ "step": 30
52
+ },
53
+ {
54
+ "epoch": 0.03,
55
+ "learning_rate": 6.086956521739132e-06,
56
+ "loss": 1.7352,
57
+ "step": 35
58
+ },
59
+ {
60
+ "epoch": 0.04,
61
+ "learning_rate": 6.956521739130435e-06,
62
+ "loss": 1.6574,
63
+ "step": 40
64
+ },
65
+ {
66
+ "epoch": 0.04,
67
+ "learning_rate": 7.82608695652174e-06,
68
+ "loss": 1.5879,
69
+ "step": 45
70
+ },
71
+ {
72
+ "epoch": 0.04,
73
+ "learning_rate": 8.695652173913044e-06,
74
+ "loss": 1.5429,
75
+ "step": 50
76
+ },
77
+ {
78
+ "epoch": 0.05,
79
+ "learning_rate": 9.565217391304349e-06,
80
+ "loss": 1.4805,
81
+ "step": 55
82
+ },
83
+ {
84
+ "epoch": 0.05,
85
+ "learning_rate": 1.0434782608695653e-05,
86
+ "loss": 1.4633,
87
+ "step": 60
88
+ },
89
+ {
90
+ "epoch": 0.06,
91
+ "learning_rate": 1.1304347826086957e-05,
92
+ "loss": 1.4199,
93
+ "step": 65
94
+ },
95
+ {
96
+ "epoch": 0.06,
97
+ "learning_rate": 1.2173913043478263e-05,
98
+ "loss": 1.4117,
99
+ "step": 70
100
+ },
101
+ {
102
+ "epoch": 0.07,
103
+ "learning_rate": 1.3043478260869566e-05,
104
+ "loss": 1.3668,
105
+ "step": 75
106
+ },
107
+ {
108
+ "epoch": 0.07,
109
+ "learning_rate": 1.391304347826087e-05,
110
+ "loss": 1.359,
111
+ "step": 80
112
+ },
113
+ {
114
+ "epoch": 0.07,
115
+ "learning_rate": 1.4782608695652174e-05,
116
+ "loss": 1.3563,
117
+ "step": 85
118
+ },
119
+ {
120
+ "epoch": 0.08,
121
+ "learning_rate": 1.565217391304348e-05,
122
+ "loss": 1.3425,
123
+ "step": 90
124
+ },
125
+ {
126
+ "epoch": 0.08,
127
+ "learning_rate": 1.6521739130434785e-05,
128
+ "loss": 1.3117,
129
+ "step": 95
130
+ },
131
+ {
132
+ "epoch": 0.09,
133
+ "learning_rate": 1.739130434782609e-05,
134
+ "loss": 1.336,
135
+ "step": 100
136
+ },
137
+ {
138
+ "epoch": 0.09,
139
+ "eval_loss": 1.312857747077942,
140
+ "eval_runtime": 427.5314,
141
+ "eval_samples_per_second": 37.812,
142
+ "eval_steps_per_second": 1.184,
143
+ "step": 100
144
+ },
145
+ {
146
+ "epoch": 0.09,
147
+ "learning_rate": 1.8260869565217393e-05,
148
+ "loss": 1.3086,
149
+ "step": 105
150
+ },
151
+ {
152
+ "epoch": 0.1,
153
+ "learning_rate": 1.9130434782608697e-05,
154
+ "loss": 1.2992,
155
+ "step": 110
156
+ },
157
+ {
158
+ "epoch": 0.1,
159
+ "learning_rate": 2e-05,
160
+ "loss": 1.2999,
161
+ "step": 115
162
+ },
163
+ {
164
+ "epoch": 0.11,
165
+ "learning_rate": 1.9999887260023335e-05,
166
+ "loss": 1.2738,
167
+ "step": 120
168
+ },
169
+ {
170
+ "epoch": 0.11,
171
+ "learning_rate": 1.99995490426354e-05,
172
+ "loss": 1.277,
173
+ "step": 125
174
+ },
175
+ {
176
+ "epoch": 0.11,
177
+ "learning_rate": 1.9998985355462318e-05,
178
+ "loss": 1.255,
179
+ "step": 130
180
+ },
181
+ {
182
+ "epoch": 0.12,
183
+ "learning_rate": 1.999819621121411e-05,
184
+ "loss": 1.271,
185
+ "step": 135
186
+ },
187
+ {
188
+ "epoch": 0.12,
189
+ "learning_rate": 1.999718162768439e-05,
190
+ "loss": 1.282,
191
+ "step": 140
192
+ },
193
+ {
194
+ "epoch": 0.13,
195
+ "learning_rate": 1.9995941627749983e-05,
196
+ "loss": 1.2597,
197
+ "step": 145
198
+ },
199
+ {
200
+ "epoch": 0.13,
201
+ "learning_rate": 1.9994476239370407e-05,
202
+ "loss": 1.2623,
203
+ "step": 150
204
+ },
205
+ {
206
+ "epoch": 0.14,
207
+ "learning_rate": 1.9992785495587226e-05,
208
+ "loss": 1.2541,
209
+ "step": 155
210
+ },
211
+ {
212
+ "epoch": 0.14,
213
+ "learning_rate": 1.999086943452333e-05,
214
+ "loss": 1.248,
215
+ "step": 160
216
+ },
217
+ {
218
+ "epoch": 0.14,
219
+ "learning_rate": 1.998872809938205e-05,
220
+ "loss": 1.2444,
221
+ "step": 165
222
+ },
223
+ {
224
+ "epoch": 0.15,
225
+ "learning_rate": 1.9986361538446202e-05,
226
+ "loss": 1.2482,
227
+ "step": 170
228
+ },
229
+ {
230
+ "epoch": 0.15,
231
+ "learning_rate": 1.998376980507699e-05,
232
+ "loss": 1.2575,
233
+ "step": 175
234
+ },
235
+ {
236
+ "epoch": 0.16,
237
+ "learning_rate": 1.998095295771281e-05,
238
+ "loss": 1.2307,
239
+ "step": 180
240
+ },
241
+ {
242
+ "epoch": 0.16,
243
+ "learning_rate": 1.9977911059867917e-05,
244
+ "loss": 1.2328,
245
+ "step": 185
246
+ },
247
+ {
248
+ "epoch": 0.17,
249
+ "learning_rate": 1.9974644180131016e-05,
250
+ "loss": 1.2423,
251
+ "step": 190
252
+ },
253
+ {
254
+ "epoch": 0.17,
255
+ "learning_rate": 1.997115239216369e-05,
256
+ "loss": 1.2272,
257
+ "step": 195
258
+ },
259
+ {
260
+ "epoch": 0.18,
261
+ "learning_rate": 1.996743577469876e-05,
262
+ "loss": 1.2424,
263
+ "step": 200
264
+ },
265
+ {
266
+ "epoch": 0.18,
267
+ "eval_loss": 1.236301302909851,
268
+ "eval_runtime": 425.2862,
269
+ "eval_samples_per_second": 38.012,
270
+ "eval_steps_per_second": 1.19,
271
+ "step": 200
272
+ },
273
+ {
274
+ "epoch": 0.18,
275
+ "learning_rate": 1.99634944115385e-05,
276
+ "loss": 1.2403,
277
+ "step": 205
278
+ },
279
+ {
280
+ "epoch": 0.18,
281
+ "learning_rate": 1.995932839155275e-05,
282
+ "loss": 1.2136,
283
+ "step": 210
284
+ },
285
+ {
286
+ "epoch": 0.19,
287
+ "learning_rate": 1.9954937808676906e-05,
288
+ "loss": 1.2461,
289
+ "step": 215
290
+ },
291
+ {
292
+ "epoch": 0.19,
293
+ "learning_rate": 1.995032276190981e-05,
294
+ "loss": 1.2197,
295
+ "step": 220
296
+ },
297
+ {
298
+ "epoch": 0.2,
299
+ "learning_rate": 1.9945483355311515e-05,
300
+ "loss": 1.2035,
301
+ "step": 225
302
+ },
303
+ {
304
+ "epoch": 0.2,
305
+ "learning_rate": 1.9940419698000944e-05,
306
+ "loss": 1.2222,
307
+ "step": 230
308
+ },
309
+ {
310
+ "epoch": 0.21,
311
+ "learning_rate": 1.993513190415341e-05,
312
+ "loss": 1.2323,
313
+ "step": 235
314
+ },
315
+ {
316
+ "epoch": 0.21,
317
+ "learning_rate": 1.992962009299807e-05,
318
+ "loss": 1.2048,
319
+ "step": 240
320
+ },
321
+ {
322
+ "epoch": 0.21,
323
+ "learning_rate": 1.992388438881521e-05,
324
+ "loss": 1.2339,
325
+ "step": 245
326
+ },
327
+ {
328
+ "epoch": 0.22,
329
+ "learning_rate": 1.9917924920933472e-05,
330
+ "loss": 1.2085,
331
+ "step": 250
332
+ },
333
+ {
334
+ "epoch": 0.22,
335
+ "learning_rate": 1.99117418237269e-05,
336
+ "loss": 1.2169,
337
+ "step": 255
338
+ },
339
+ {
340
+ "epoch": 0.23,
341
+ "learning_rate": 1.990533523661194e-05,
342
+ "loss": 1.2209,
343
+ "step": 260
344
+ },
345
+ {
346
+ "epoch": 0.23,
347
+ "learning_rate": 1.9898705304044303e-05,
348
+ "loss": 1.2016,
349
+ "step": 265
350
+ },
351
+ {
352
+ "epoch": 0.24,
353
+ "learning_rate": 1.989185217551566e-05,
354
+ "loss": 1.2028,
355
+ "step": 270
356
+ },
357
+ {
358
+ "epoch": 0.24,
359
+ "learning_rate": 1.9884776005550326e-05,
360
+ "loss": 1.2171,
361
+ "step": 275
362
+ },
363
+ {
364
+ "epoch": 0.25,
365
+ "learning_rate": 1.9877476953701756e-05,
366
+ "loss": 1.1922,
367
+ "step": 280
368
+ },
369
+ {
370
+ "epoch": 0.25,
371
+ "learning_rate": 1.986995518454893e-05,
372
+ "loss": 1.224,
373
+ "step": 285
374
+ },
375
+ {
376
+ "epoch": 0.25,
377
+ "learning_rate": 1.9862210867692665e-05,
378
+ "loss": 1.1925,
379
+ "step": 290
380
+ },
381
+ {
382
+ "epoch": 0.26,
383
+ "learning_rate": 1.9854244177751782e-05,
384
+ "loss": 1.2121,
385
+ "step": 295
386
+ },
387
+ {
388
+ "epoch": 0.26,
389
+ "learning_rate": 1.984605529435917e-05,
390
+ "loss": 1.2079,
391
+ "step": 300
392
+ },
393
+ {
394
+ "epoch": 0.26,
395
+ "eval_loss": 1.208355188369751,
396
+ "eval_runtime": 425.2694,
397
+ "eval_samples_per_second": 38.014,
398
+ "eval_steps_per_second": 1.19,
399
+ "step": 300
400
+ },
401
+ {
402
+ "epoch": 0.27,
403
+ "learning_rate": 1.9837644402157726e-05,
404
+ "loss": 1.2104,
405
+ "step": 305
406
+ },
407
+ {
408
+ "epoch": 0.27,
409
+ "learning_rate": 1.9829011690796222e-05,
410
+ "loss": 1.1775,
411
+ "step": 310
412
+ },
413
+ {
414
+ "epoch": 0.28,
415
+ "learning_rate": 1.982015735492498e-05,
416
+ "loss": 1.2005,
417
+ "step": 315
418
+ },
419
+ {
420
+ "epoch": 0.28,
421
+ "learning_rate": 1.981108159419153e-05,
422
+ "loss": 1.1946,
423
+ "step": 320
424
+ },
425
+ {
426
+ "epoch": 0.28,
427
+ "learning_rate": 1.9801784613236082e-05,
428
+ "loss": 1.1911,
429
+ "step": 325
430
+ },
431
+ {
432
+ "epoch": 0.29,
433
+ "learning_rate": 1.9792266621686924e-05,
434
+ "loss": 1.1859,
435
+ "step": 330
436
+ },
437
+ {
438
+ "epoch": 0.29,
439
+ "learning_rate": 1.9782527834155674e-05,
440
+ "loss": 1.2373,
441
+ "step": 335
442
+ },
443
+ {
444
+ "epoch": 0.3,
445
+ "learning_rate": 1.977256847023248e-05,
446
+ "loss": 1.1896,
447
+ "step": 340
448
+ },
449
+ {
450
+ "epoch": 0.3,
451
+ "learning_rate": 1.9762388754481024e-05,
452
+ "loss": 1.1971,
453
+ "step": 345
454
+ },
455
+ {
456
+ "epoch": 0.31,
457
+ "learning_rate": 1.9751988916433494e-05,
458
+ "loss": 1.1909,
459
+ "step": 350
460
+ },
461
+ {
462
+ "epoch": 0.31,
463
+ "learning_rate": 1.9741369190585383e-05,
464
+ "loss": 1.19,
465
+ "step": 355
466
+ },
467
+ {
468
+ "epoch": 0.32,
469
+ "learning_rate": 1.9730529816390232e-05,
470
+ "loss": 1.1955,
471
+ "step": 360
472
+ },
473
+ {
474
+ "epoch": 0.32,
475
+ "learning_rate": 1.971947103825419e-05,
476
+ "loss": 1.1921,
477
+ "step": 365
478
+ },
479
+ {
480
+ "epoch": 0.32,
481
+ "learning_rate": 1.9708193105530537e-05,
482
+ "loss": 1.1863,
483
+ "step": 370
484
+ },
485
+ {
486
+ "epoch": 0.33,
487
+ "learning_rate": 1.969669627251405e-05,
488
+ "loss": 1.1778,
489
+ "step": 375
490
+ },
491
+ {
492
+ "epoch": 0.33,
493
+ "learning_rate": 1.9684980798435266e-05,
494
+ "loss": 1.1933,
495
+ "step": 380
496
+ },
497
+ {
498
+ "epoch": 0.34,
499
+ "learning_rate": 1.9673046947454634e-05,
500
+ "loss": 1.1831,
501
+ "step": 385
502
+ },
503
+ {
504
+ "epoch": 0.34,
505
+ "learning_rate": 1.9660894988656575e-05,
506
+ "loss": 1.1763,
507
+ "step": 390
508
+ },
509
+ {
510
+ "epoch": 0.35,
511
+ "learning_rate": 1.9648525196043402e-05,
512
+ "loss": 1.1778,
513
+ "step": 395
514
+ },
515
+ {
516
+ "epoch": 0.35,
517
+ "learning_rate": 1.9635937848529133e-05,
518
+ "loss": 1.185,
519
+ "step": 400
520
+ },
521
+ {
522
+ "epoch": 0.35,
523
+ "eval_loss": 1.191055178642273,
524
+ "eval_runtime": 424.901,
525
+ "eval_samples_per_second": 38.047,
526
+ "eval_steps_per_second": 1.191,
527
+ "step": 400
528
+ },
529
+ {
530
+ "epoch": 0.35,
531
+ "learning_rate": 1.962313322993323e-05,
532
+ "loss": 1.2061,
533
+ "step": 405
534
+ },
535
+ {
536
+ "epoch": 0.36,
537
+ "learning_rate": 1.961011162897417e-05,
538
+ "loss": 1.1976,
539
+ "step": 410
540
+ },
541
+ {
542
+ "epoch": 0.36,
543
+ "learning_rate": 1.9596873339262948e-05,
544
+ "loss": 1.184,
545
+ "step": 415
546
+ },
547
+ {
548
+ "epoch": 0.37,
549
+ "learning_rate": 1.9583418659296462e-05,
550
+ "loss": 1.1855,
551
+ "step": 420
552
+ },
553
+ {
554
+ "epoch": 0.37,
555
+ "learning_rate": 1.9569747892450768e-05,
556
+ "loss": 1.1769,
557
+ "step": 425
558
+ },
559
+ {
560
+ "epoch": 0.38,
561
+ "learning_rate": 1.955586134697426e-05,
562
+ "loss": 1.1843,
563
+ "step": 430
564
+ },
565
+ {
566
+ "epoch": 0.38,
567
+ "learning_rate": 1.9541759335980695e-05,
568
+ "loss": 1.1709,
569
+ "step": 435
570
+ },
571
+ {
572
+ "epoch": 0.39,
573
+ "learning_rate": 1.9527442177442154e-05,
574
+ "loss": 1.1852,
575
+ "step": 440
576
+ },
577
+ {
578
+ "epoch": 0.39,
579
+ "learning_rate": 1.951291019418186e-05,
580
+ "loss": 1.1749,
581
+ "step": 445
582
+ },
583
+ {
584
+ "epoch": 0.39,
585
+ "learning_rate": 1.94981637138669e-05,
586
+ "loss": 1.1996,
587
+ "step": 450
588
+ },
589
+ {
590
+ "epoch": 0.4,
591
+ "learning_rate": 1.9483203069000854e-05,
592
+ "loss": 1.1702,
593
+ "step": 455
594
+ },
595
+ {
596
+ "epoch": 0.4,
597
+ "learning_rate": 1.9468028596916265e-05,
598
+ "loss": 1.1689,
599
+ "step": 460
600
+ },
601
+ {
602
+ "epoch": 0.41,
603
+ "learning_rate": 1.9452640639767058e-05,
604
+ "loss": 1.1794,
605
+ "step": 465
606
+ },
607
+ {
608
+ "epoch": 0.41,
609
+ "learning_rate": 1.943703954452082e-05,
610
+ "loss": 1.1833,
611
+ "step": 470
612
+ },
613
+ {
614
+ "epoch": 0.42,
615
+ "learning_rate": 1.9421225662950975e-05,
616
+ "loss": 1.1773,
617
+ "step": 475
618
+ },
619
+ {
620
+ "epoch": 0.42,
621
+ "learning_rate": 1.940519935162885e-05,
622
+ "loss": 1.1847,
623
+ "step": 480
624
+ },
625
+ {
626
+ "epoch": 0.42,
627
+ "learning_rate": 1.9388960971915637e-05,
628
+ "loss": 1.151,
629
+ "step": 485
630
+ },
631
+ {
632
+ "epoch": 0.43,
633
+ "learning_rate": 1.9372510889954247e-05,
634
+ "loss": 1.1838,
635
+ "step": 490
636
+ },
637
+ {
638
+ "epoch": 0.43,
639
+ "learning_rate": 1.9355849476661055e-05,
640
+ "loss": 1.1672,
641
+ "step": 495
642
+ },
643
+ {
644
+ "epoch": 0.44,
645
+ "learning_rate": 1.933897710771752e-05,
646
+ "loss": 1.1546,
647
+ "step": 500
648
+ },
649
+ {
650
+ "epoch": 0.44,
651
+ "eval_loss": 1.1787223815917969,
652
+ "eval_runtime": 427.1249,
653
+ "eval_samples_per_second": 37.848,
654
+ "eval_steps_per_second": 1.185,
655
+ "step": 500
656
+ },
657
+ {
658
+ "epoch": 0.44,
659
+ "learning_rate": 1.9321894163561753e-05,
660
+ "loss": 1.1683,
661
+ "step": 505
662
+ },
663
+ {
664
+ "epoch": 0.45,
665
+ "learning_rate": 1.9304601029379884e-05,
666
+ "loss": 1.1716,
667
+ "step": 510
668
+ },
669
+ {
670
+ "epoch": 0.45,
671
+ "learning_rate": 1.9287098095097434e-05,
672
+ "loss": 1.1793,
673
+ "step": 515
674
+ },
675
+ {
676
+ "epoch": 0.46,
677
+ "learning_rate": 1.926938575537048e-05,
678
+ "loss": 1.1494,
679
+ "step": 520
680
+ },
681
+ {
682
+ "epoch": 0.46,
683
+ "learning_rate": 1.9251464409576766e-05,
684
+ "loss": 1.169,
685
+ "step": 525
686
+ },
687
+ {
688
+ "epoch": 0.46,
689
+ "learning_rate": 1.9233334461806724e-05,
690
+ "loss": 1.1595,
691
+ "step": 530
692
+ },
693
+ {
694
+ "epoch": 0.47,
695
+ "learning_rate": 1.921499632085433e-05,
696
+ "loss": 1.1558,
697
+ "step": 535
698
+ },
699
+ {
700
+ "epoch": 0.47,
701
+ "learning_rate": 1.91964504002079e-05,
702
+ "loss": 1.1704,
703
+ "step": 540
704
+ },
705
+ {
706
+ "epoch": 0.48,
707
+ "learning_rate": 1.9177697118040763e-05,
708
+ "loss": 1.1593,
709
+ "step": 545
710
+ },
711
+ {
712
+ "epoch": 0.48,
713
+ "learning_rate": 1.9158736897201843e-05,
714
+ "loss": 1.1715,
715
+ "step": 550
716
+ },
717
+ {
718
+ "epoch": 0.49,
719
+ "learning_rate": 1.9139570165206106e-05,
720
+ "loss": 1.1723,
721
+ "step": 555
722
+ },
723
+ {
724
+ "epoch": 0.49,
725
+ "learning_rate": 1.912019735422494e-05,
726
+ "loss": 1.179,
727
+ "step": 560
728
+ },
729
+ {
730
+ "epoch": 0.49,
731
+ "learning_rate": 1.910061890107639e-05,
732
+ "loss": 1.1761,
733
+ "step": 565
734
+ },
735
+ {
736
+ "epoch": 0.5,
737
+ "learning_rate": 1.9080835247215335e-05,
738
+ "loss": 1.1675,
739
+ "step": 570
740
+ },
741
+ {
742
+ "epoch": 0.5,
743
+ "learning_rate": 1.9060846838723505e-05,
744
+ "loss": 1.1686,
745
+ "step": 575
746
+ },
747
+ {
748
+ "epoch": 0.51,
749
+ "learning_rate": 1.9040654126299443e-05,
750
+ "loss": 1.153,
751
+ "step": 580
752
+ },
753
+ {
754
+ "epoch": 0.51,
755
+ "learning_rate": 1.902025756524833e-05,
756
+ "loss": 1.1687,
757
+ "step": 585
758
+ },
759
+ {
760
+ "epoch": 0.52,
761
+ "learning_rate": 1.8999657615471737e-05,
762
+ "loss": 1.1599,
763
+ "step": 590
764
+ },
765
+ {
766
+ "epoch": 0.52,
767
+ "learning_rate": 1.897885474145723e-05,
768
+ "loss": 1.1848,
769
+ "step": 595
770
+ },
771
+ {
772
+ "epoch": 0.53,
773
+ "learning_rate": 1.8957849412267914e-05,
774
+ "loss": 1.1741,
775
+ "step": 600
776
+ },
777
+ {
778
+ "epoch": 0.53,
779
+ "eval_loss": 1.1691807508468628,
780
+ "eval_runtime": 426.8549,
781
+ "eval_samples_per_second": 37.872,
782
+ "eval_steps_per_second": 1.185,
783
+ "step": 600
784
+ },
785
+ {
786
+ "epoch": 0.53,
787
+ "learning_rate": 1.893664210153186e-05,
788
+ "loss": 1.1447,
789
+ "step": 605
790
+ },
791
+ {
792
+ "epoch": 0.53,
793
+ "learning_rate": 1.8915233287431403e-05,
794
+ "loss": 1.1569,
795
+ "step": 610
796
+ },
797
+ {
798
+ "epoch": 0.54,
799
+ "learning_rate": 1.8893623452692392e-05,
800
+ "loss": 1.1776,
801
+ "step": 615
802
+ },
803
+ {
804
+ "epoch": 0.54,
805
+ "learning_rate": 1.8871813084573275e-05,
806
+ "loss": 1.1536,
807
+ "step": 620
808
+ },
809
+ {
810
+ "epoch": 0.55,
811
+ "learning_rate": 1.884980267485413e-05,
812
+ "loss": 1.1573,
813
+ "step": 625
814
+ },
815
+ {
816
+ "epoch": 0.55,
817
+ "learning_rate": 1.8827592719825577e-05,
818
+ "loss": 1.1575,
819
+ "step": 630
820
+ },
821
+ {
822
+ "epoch": 0.56,
823
+ "learning_rate": 1.880518372027757e-05,
824
+ "loss": 1.177,
825
+ "step": 635
826
+ },
827
+ {
828
+ "epoch": 0.56,
829
+ "learning_rate": 1.8782576181488135e-05,
830
+ "loss": 1.168,
831
+ "step": 640
832
+ },
833
+ {
834
+ "epoch": 0.57,
835
+ "learning_rate": 1.8759770613211946e-05,
836
+ "loss": 1.1651,
837
+ "step": 645
838
+ },
839
+ {
840
+ "epoch": 0.57,
841
+ "learning_rate": 1.8736767529668855e-05,
842
+ "loss": 1.1548,
843
+ "step": 650
844
+ },
845
+ {
846
+ "epoch": 0.57,
847
+ "learning_rate": 1.8713567449532277e-05,
848
+ "loss": 1.1605,
849
+ "step": 655
850
+ },
851
+ {
852
+ "epoch": 0.58,
853
+ "learning_rate": 1.8690170895917513e-05,
854
+ "loss": 1.1651,
855
+ "step": 660
856
+ },
857
+ {
858
+ "epoch": 0.58,
859
+ "learning_rate": 1.8666578396369947e-05,
860
+ "loss": 1.1562,
861
+ "step": 665
862
+ },
863
+ {
864
+ "epoch": 0.59,
865
+ "learning_rate": 1.8642790482853144e-05,
866
+ "loss": 1.1434,
867
+ "step": 670
868
+ },
869
+ {
870
+ "epoch": 0.59,
871
+ "learning_rate": 1.8618807691736867e-05,
872
+ "loss": 1.1667,
873
+ "step": 675
874
+ },
875
+ {
876
+ "epoch": 0.6,
877
+ "learning_rate": 1.8594630563784983e-05,
878
+ "loss": 1.1737,
879
+ "step": 680
880
+ },
881
+ {
882
+ "epoch": 0.6,
883
+ "learning_rate": 1.8570259644143256e-05,
884
+ "loss": 1.1543,
885
+ "step": 685
886
+ },
887
+ {
888
+ "epoch": 0.6,
889
+ "learning_rate": 1.854569548232707e-05,
890
+ "loss": 1.1558,
891
+ "step": 690
892
+ },
893
+ {
894
+ "epoch": 0.61,
895
+ "learning_rate": 1.8520938632209032e-05,
896
+ "loss": 1.1488,
897
+ "step": 695
898
+ },
899
+ {
900
+ "epoch": 0.61,
901
+ "learning_rate": 1.8495989652006485e-05,
902
+ "loss": 1.1612,
903
+ "step": 700
904
+ },
905
+ {
906
+ "epoch": 0.61,
907
+ "eval_loss": 1.1612565517425537,
908
+ "eval_runtime": 426.665,
909
+ "eval_samples_per_second": 37.889,
910
+ "eval_steps_per_second": 1.186,
911
+ "step": 700
912
+ },
913
+ {
914
+ "epoch": 0.62,
915
+ "learning_rate": 1.847084910426891e-05,
916
+ "loss": 1.1588,
917
+ "step": 705
918
+ },
919
+ {
920
+ "epoch": 0.62,
921
+ "learning_rate": 1.8445517555865265e-05,
922
+ "loss": 1.1313,
923
+ "step": 710
924
+ },
925
+ {
926
+ "epoch": 0.63,
927
+ "learning_rate": 1.8419995577971186e-05,
928
+ "loss": 1.1387,
929
+ "step": 715
930
+ },
931
+ {
932
+ "epoch": 0.63,
933
+ "learning_rate": 1.839428374605611e-05,
934
+ "loss": 1.1686,
935
+ "step": 720
936
+ },
937
+ {
938
+ "epoch": 0.64,
939
+ "learning_rate": 1.8368382639870307e-05,
940
+ "loss": 1.1371,
941
+ "step": 725
942
+ },
943
+ {
944
+ "epoch": 0.64,
945
+ "learning_rate": 1.8342292843431794e-05,
946
+ "loss": 1.1781,
947
+ "step": 730
948
+ },
949
+ {
950
+ "epoch": 0.64,
951
+ "learning_rate": 1.831601494501318e-05,
952
+ "loss": 1.1627,
953
+ "step": 735
954
+ },
955
+ {
956
+ "epoch": 0.65,
957
+ "learning_rate": 1.8289549537128395e-05,
958
+ "loss": 1.1508,
959
+ "step": 740
960
+ },
961
+ {
962
+ "epoch": 0.65,
963
+ "learning_rate": 1.8262897216519334e-05,
964
+ "loss": 1.1641,
965
+ "step": 745
966
+ },
967
+ {
968
+ "epoch": 0.66,
969
+ "learning_rate": 1.82360585841424e-05,
970
+ "loss": 1.1495,
971
+ "step": 750
972
+ },
973
+ {
974
+ "epoch": 0.66,
975
+ "learning_rate": 1.8209034245154944e-05,
976
+ "loss": 1.1637,
977
+ "step": 755
978
+ },
979
+ {
980
+ "epoch": 0.67,
981
+ "learning_rate": 1.818182480890164e-05,
982
+ "loss": 1.1523,
983
+ "step": 760
984
+ },
985
+ {
986
+ "epoch": 0.67,
987
+ "learning_rate": 1.8154430888900728e-05,
988
+ "loss": 1.1368,
989
+ "step": 765
990
+ },
991
+ {
992
+ "epoch": 0.67,
993
+ "learning_rate": 1.8126853102830195e-05,
994
+ "loss": 1.1374,
995
+ "step": 770
996
+ },
997
+ {
998
+ "epoch": 0.68,
999
+ "learning_rate": 1.8099092072513826e-05,
1000
+ "loss": 1.1433,
1001
+ "step": 775
1002
+ },
1003
+ {
1004
+ "epoch": 0.68,
1005
+ "learning_rate": 1.8071148423907198e-05,
1006
+ "loss": 1.1357,
1007
+ "step": 780
1008
+ },
1009
+ {
1010
+ "epoch": 0.69,
1011
+ "learning_rate": 1.804302278708358e-05,
1012
+ "loss": 1.1536,
1013
+ "step": 785
1014
+ },
1015
+ {
1016
+ "epoch": 0.69,
1017
+ "learning_rate": 1.8014715796219698e-05,
1018
+ "loss": 1.1308,
1019
+ "step": 790
1020
+ },
1021
+ {
1022
+ "epoch": 0.7,
1023
+ "learning_rate": 1.7986228089581443e-05,
1024
+ "loss": 1.1589,
1025
+ "step": 795
1026
+ },
1027
+ {
1028
+ "epoch": 0.7,
1029
+ "learning_rate": 1.7957560309509498e-05,
1030
+ "loss": 1.1453,
1031
+ "step": 800
1032
+ },
1033
+ {
1034
+ "epoch": 0.7,
1035
+ "eval_loss": 1.1546913385391235,
1036
+ "eval_runtime": 427.2728,
1037
+ "eval_samples_per_second": 37.835,
1038
+ "eval_steps_per_second": 1.184,
1039
+ "step": 800
1040
+ },
1041
+ {
1042
+ "epoch": 0.71,
1043
+ "learning_rate": 1.7928713102404825e-05,
1044
+ "loss": 1.1298,
1045
+ "step": 805
1046
+ },
1047
+ {
1048
+ "epoch": 0.71,
1049
+ "learning_rate": 1.789968711871413e-05,
1050
+ "loss": 1.1348,
1051
+ "step": 810
1052
+ },
1053
+ {
1054
+ "epoch": 0.71,
1055
+ "learning_rate": 1.7870483012915146e-05,
1056
+ "loss": 1.1322,
1057
+ "step": 815
1058
+ },
1059
+ {
1060
+ "epoch": 0.72,
1061
+ "learning_rate": 1.784110144350192e-05,
1062
+ "loss": 1.1252,
1063
+ "step": 820
1064
+ },
1065
+ {
1066
+ "epoch": 0.72,
1067
+ "learning_rate": 1.7811543072969938e-05,
1068
+ "loss": 1.1476,
1069
+ "step": 825
1070
+ },
1071
+ {
1072
+ "epoch": 0.73,
1073
+ "learning_rate": 1.7781808567801206e-05,
1074
+ "loss": 1.1424,
1075
+ "step": 830
1076
+ },
1077
+ {
1078
+ "epoch": 0.73,
1079
+ "learning_rate": 1.77518985984492e-05,
1080
+ "loss": 1.167,
1081
+ "step": 835
1082
+ },
1083
+ {
1084
+ "epoch": 0.74,
1085
+ "learning_rate": 1.7721813839323778e-05,
1086
+ "loss": 1.1445,
1087
+ "step": 840
1088
+ },
1089
+ {
1090
+ "epoch": 0.74,
1091
+ "learning_rate": 1.7691554968775942e-05,
1092
+ "loss": 1.1596,
1093
+ "step": 845
1094
+ },
1095
+ {
1096
+ "epoch": 0.74,
1097
+ "learning_rate": 1.7661122669082566e-05,
1098
+ "loss": 1.1383,
1099
+ "step": 850
1100
+ },
1101
+ {
1102
+ "epoch": 0.75,
1103
+ "learning_rate": 1.7630517626431002e-05,
1104
+ "loss": 1.1507,
1105
+ "step": 855
1106
+ },
1107
+ {
1108
+ "epoch": 0.75,
1109
+ "learning_rate": 1.759974053090361e-05,
1110
+ "loss": 1.1455,
1111
+ "step": 860
1112
+ },
1113
+ {
1114
+ "epoch": 0.76,
1115
+ "learning_rate": 1.7568792076462194e-05,
1116
+ "loss": 1.1606,
1117
+ "step": 865
1118
+ },
1119
+ {
1120
+ "epoch": 0.76,
1121
+ "learning_rate": 1.7537672960932363e-05,
1122
+ "loss": 1.152,
1123
+ "step": 870
1124
+ },
1125
+ {
1126
+ "epoch": 0.77,
1127
+ "learning_rate": 1.750638388598778e-05,
1128
+ "loss": 1.1334,
1129
+ "step": 875
1130
+ },
1131
+ {
1132
+ "epoch": 0.77,
1133
+ "learning_rate": 1.7474925557134375e-05,
1134
+ "loss": 1.1466,
1135
+ "step": 880
1136
+ },
1137
+ {
1138
+ "epoch": 0.78,
1139
+ "learning_rate": 1.744329868369439e-05,
1140
+ "loss": 1.1321,
1141
+ "step": 885
1142
+ },
1143
+ {
1144
+ "epoch": 0.78,
1145
+ "learning_rate": 1.741150397879042e-05,
1146
+ "loss": 1.1321,
1147
+ "step": 890
1148
+ },
1149
+ {
1150
+ "epoch": 0.78,
1151
+ "learning_rate": 1.7379542159329334e-05,
1152
+ "loss": 1.1377,
1153
+ "step": 895
1154
+ },
1155
+ {
1156
+ "epoch": 0.79,
1157
+ "learning_rate": 1.734741394598607e-05,
1158
+ "loss": 1.141,
1159
+ "step": 900
1160
+ },
1161
+ {
1162
+ "epoch": 0.79,
1163
+ "eval_loss": 1.1488687992095947,
1164
+ "eval_runtime": 426.9549,
1165
+ "eval_samples_per_second": 37.863,
1166
+ "eval_steps_per_second": 1.185,
1167
+ "step": 900
1168
+ },
1169
+ {
1170
+ "epoch": 0.79,
1171
+ "learning_rate": 1.7315120063187446e-05,
1172
+ "loss": 1.1465,
1173
+ "step": 905
1174
+ },
1175
+ {
1176
+ "epoch": 0.8,
1177
+ "learning_rate": 1.7282661239095774e-05,
1178
+ "loss": 1.1435,
1179
+ "step": 910
1180
+ },
1181
+ {
1182
+ "epoch": 0.8,
1183
+ "learning_rate": 1.7250038205592474e-05,
1184
+ "loss": 1.1562,
1185
+ "step": 915
1186
+ },
1187
+ {
1188
+ "epoch": 0.81,
1189
+ "learning_rate": 1.721725169826155e-05,
1190
+ "loss": 1.1324,
1191
+ "step": 920
1192
+ },
1193
+ {
1194
+ "epoch": 0.81,
1195
+ "learning_rate": 1.7184302456373013e-05,
1196
+ "loss": 1.1414,
1197
+ "step": 925
1198
+ },
1199
+ {
1200
+ "epoch": 0.81,
1201
+ "learning_rate": 1.7151191222866225e-05,
1202
+ "loss": 1.1318,
1203
+ "step": 930
1204
+ },
1205
+ {
1206
+ "epoch": 0.82,
1207
+ "learning_rate": 1.711791874433311e-05,
1208
+ "loss": 1.155,
1209
+ "step": 935
1210
+ },
1211
+ {
1212
+ "epoch": 0.82,
1213
+ "learning_rate": 1.7084485771001374e-05,
1214
+ "loss": 1.1459,
1215
+ "step": 940
1216
+ },
1217
+ {
1218
+ "epoch": 0.83,
1219
+ "learning_rate": 1.705089305671753e-05,
1220
+ "loss": 1.1466,
1221
+ "step": 945
1222
+ },
1223
+ {
1224
+ "epoch": 0.83,
1225
+ "learning_rate": 1.7017141358929953e-05,
1226
+ "loss": 1.156,
1227
+ "step": 950
1228
+ },
1229
+ {
1230
+ "epoch": 0.84,
1231
+ "learning_rate": 1.6983231438671762e-05,
1232
+ "loss": 1.1527,
1233
+ "step": 955
1234
+ },
1235
+ {
1236
+ "epoch": 0.84,
1237
+ "learning_rate": 1.6949164060543684e-05,
1238
+ "loss": 1.1354,
1239
+ "step": 960
1240
+ },
1241
+ {
1242
+ "epoch": 0.85,
1243
+ "learning_rate": 1.69149399926968e-05,
1244
+ "loss": 1.1307,
1245
+ "step": 965
1246
+ },
1247
+ {
1248
+ "epoch": 0.85,
1249
+ "learning_rate": 1.688056000681523e-05,
1250
+ "loss": 1.1388,
1251
+ "step": 970
1252
+ },
1253
+ {
1254
+ "epoch": 0.85,
1255
+ "learning_rate": 1.6846024878098738e-05,
1256
+ "loss": 1.1526,
1257
+ "step": 975
1258
+ },
1259
+ {
1260
+ "epoch": 0.86,
1261
+ "learning_rate": 1.6811335385245247e-05,
1262
+ "loss": 1.1474,
1263
+ "step": 980
1264
+ },
1265
+ {
1266
+ "epoch": 0.86,
1267
+ "learning_rate": 1.6776492310433278e-05,
1268
+ "loss": 1.131,
1269
+ "step": 985
1270
+ },
1271
+ {
1272
+ "epoch": 0.87,
1273
+ "learning_rate": 1.6741496439304315e-05,
1274
+ "loss": 1.1343,
1275
+ "step": 990
1276
+ },
1277
+ {
1278
+ "epoch": 0.87,
1279
+ "learning_rate": 1.6706348560945107e-05,
1280
+ "loss": 1.1503,
1281
+ "step": 995
1282
+ },
1283
+ {
1284
+ "epoch": 0.88,
1285
+ "learning_rate": 1.6671049467869844e-05,
1286
+ "loss": 1.1247,
1287
+ "step": 1000
1288
+ },
1289
+ {
1290
+ "epoch": 0.88,
1291
+ "eval_loss": 1.1437697410583496,
1292
+ "eval_runtime": 426.4692,
1293
+ "eval_samples_per_second": 37.907,
1294
+ "eval_steps_per_second": 1.186,
1295
+ "step": 1000
1296
+ },
1297
+ {
1298
+ "epoch": 0.88,
1299
+ "learning_rate": 1.6635599956002307e-05,
1300
+ "loss": 1.1257,
1301
+ "step": 1005
1302
+ },
1303
+ {
1304
+ "epoch": 0.88,
1305
+ "learning_rate": 1.6600000824657935e-05,
1306
+ "loss": 1.1317,
1307
+ "step": 1010
1308
+ },
1309
+ {
1310
+ "epoch": 0.89,
1311
+ "learning_rate": 1.6564252876525766e-05,
1312
+ "loss": 1.1264,
1313
+ "step": 1015
1314
+ },
1315
+ {
1316
+ "epoch": 0.89,
1317
+ "learning_rate": 1.6528356917650378e-05,
1318
+ "loss": 1.1238,
1319
+ "step": 1020
1320
+ },
1321
+ {
1322
+ "epoch": 0.9,
1323
+ "learning_rate": 1.6492313757413673e-05,
1324
+ "loss": 1.1424,
1325
+ "step": 1025
1326
+ },
1327
+ {
1328
+ "epoch": 0.9,
1329
+ "learning_rate": 1.645612420851667e-05,
1330
+ "loss": 1.1222,
1331
+ "step": 1030
1332
+ },
1333
+ {
1334
+ "epoch": 0.91,
1335
+ "learning_rate": 1.641978908696114e-05,
1336
+ "loss": 1.133,
1337
+ "step": 1035
1338
+ },
1339
+ {
1340
+ "epoch": 0.91,
1341
+ "learning_rate": 1.638330921203124e-05,
1342
+ "loss": 1.1423,
1343
+ "step": 1040
1344
+ },
1345
+ {
1346
+ "epoch": 0.92,
1347
+ "learning_rate": 1.634668540627502e-05,
1348
+ "loss": 1.1403,
1349
+ "step": 1045
1350
+ },
1351
+ {
1352
+ "epoch": 0.92,
1353
+ "learning_rate": 1.6309918495485875e-05,
1354
+ "loss": 1.1327,
1355
+ "step": 1050
1356
+ },
1357
+ {
1358
+ "epoch": 0.92,
1359
+ "learning_rate": 1.6273009308683945e-05,
1360
+ "loss": 1.1269,
1361
+ "step": 1055
1362
+ },
1363
+ {
1364
+ "epoch": 0.93,
1365
+ "learning_rate": 1.6235958678097396e-05,
1366
+ "loss": 1.1708,
1367
+ "step": 1060
1368
+ },
1369
+ {
1370
+ "epoch": 0.93,
1371
+ "learning_rate": 1.6198767439143677e-05,
1372
+ "loss": 1.1402,
1373
+ "step": 1065
1374
+ },
1375
+ {
1376
+ "epoch": 0.94,
1377
+ "learning_rate": 1.616143643041067e-05,
1378
+ "loss": 1.13,
1379
+ "step": 1070
1380
+ },
1381
+ {
1382
+ "epoch": 0.94,
1383
+ "learning_rate": 1.6123966493637785e-05,
1384
+ "loss": 1.125,
1385
+ "step": 1075
1386
+ },
1387
+ {
1388
+ "epoch": 0.95,
1389
+ "learning_rate": 1.608635847369698e-05,
1390
+ "loss": 1.1341,
1391
+ "step": 1080
1392
+ },
1393
+ {
1394
+ "epoch": 0.95,
1395
+ "learning_rate": 1.6048613218573718e-05,
1396
+ "loss": 1.12,
1397
+ "step": 1085
1398
+ },
1399
+ {
1400
+ "epoch": 0.95,
1401
+ "learning_rate": 1.6010731579347833e-05,
1402
+ "loss": 1.1188,
1403
+ "step": 1090
1404
+ },
1405
+ {
1406
+ "epoch": 0.96,
1407
+ "learning_rate": 1.5972714410174345e-05,
1408
+ "loss": 1.1362,
1409
+ "step": 1095
1410
+ },
1411
+ {
1412
+ "epoch": 0.96,
1413
+ "learning_rate": 1.5934562568264214e-05,
1414
+ "loss": 1.1485,
1415
+ "step": 1100
1416
+ },
1417
+ {
1418
+ "epoch": 0.96,
1419
+ "eval_loss": 1.1392369270324707,
1420
+ "eval_runtime": 427.0259,
1421
+ "eval_samples_per_second": 37.857,
1422
+ "eval_steps_per_second": 1.185,
1423
+ "step": 1100
1424
+ },
1425
+ {
1426
+ "epoch": 0.97,
1427
+ "learning_rate": 1.5896276913864985e-05,
1428
+ "loss": 1.1422,
1429
+ "step": 1105
1430
+ },
1431
+ {
1432
+ "epoch": 0.97,
1433
+ "learning_rate": 1.5857858310241424e-05,
1434
+ "loss": 1.1151,
1435
+ "step": 1110
1436
+ },
1437
+ {
1438
+ "epoch": 0.98,
1439
+ "learning_rate": 1.5819307623656022e-05,
1440
+ "loss": 1.1098,
1441
+ "step": 1115
1442
+ },
1443
+ {
1444
+ "epoch": 0.98,
1445
+ "learning_rate": 1.578062572334948e-05,
1446
+ "loss": 1.1257,
1447
+ "step": 1120
1448
+ },
1449
+ {
1450
+ "epoch": 0.99,
1451
+ "learning_rate": 1.5741813481521108e-05,
1452
+ "loss": 1.1198,
1453
+ "step": 1125
1454
+ },
1455
+ {
1456
+ "epoch": 0.99,
1457
+ "learning_rate": 1.5702871773309147e-05,
1458
+ "loss": 1.1381,
1459
+ "step": 1130
1460
+ },
1461
+ {
1462
+ "epoch": 0.99,
1463
+ "learning_rate": 1.5663801476771058e-05,
1464
+ "loss": 1.1263,
1465
+ "step": 1135
1466
+ },
1467
+ {
1468
+ "epoch": 1.0,
1469
+ "learning_rate": 1.5624603472863703e-05,
1470
+ "loss": 1.137,
1471
+ "step": 1140
1472
+ },
1473
+ {
1474
+ "epoch": 1.0,
1475
+ "learning_rate": 1.558527864542349e-05,
1476
+ "loss": 1.1037,
1477
+ "step": 1145
1478
+ },
1479
+ {
1480
+ "epoch": 1.01,
1481
+ "learning_rate": 1.554582788114645e-05,
1482
+ "loss": 1.0475,
1483
+ "step": 1150
1484
+ },
1485
+ {
1486
+ "epoch": 1.01,
1487
+ "learning_rate": 1.5506252069568225e-05,
1488
+ "loss": 1.0786,
1489
+ "step": 1155
1490
+ },
1491
+ {
1492
+ "epoch": 1.02,
1493
+ "learning_rate": 1.5466552103044035e-05,
1494
+ "loss": 1.0866,
1495
+ "step": 1160
1496
+ },
1497
+ {
1498
+ "epoch": 1.02,
1499
+ "learning_rate": 1.542672887672854e-05,
1500
+ "loss": 1.0706,
1501
+ "step": 1165
1502
+ },
1503
+ {
1504
+ "epoch": 1.02,
1505
+ "learning_rate": 1.5386783288555655e-05,
1506
+ "loss": 1.0856,
1507
+ "step": 1170
1508
+ },
1509
+ {
1510
+ "epoch": 1.03,
1511
+ "learning_rate": 1.534671623921832e-05,
1512
+ "loss": 1.0812,
1513
+ "step": 1175
1514
+ },
1515
+ {
1516
+ "epoch": 1.03,
1517
+ "learning_rate": 1.530652863214818e-05,
1518
+ "loss": 1.0815,
1519
+ "step": 1180
1520
+ },
1521
+ {
1522
+ "epoch": 1.04,
1523
+ "learning_rate": 1.52662213734952e-05,
1524
+ "loss": 1.0796,
1525
+ "step": 1185
1526
+ },
1527
+ {
1528
+ "epoch": 1.04,
1529
+ "learning_rate": 1.5225795372107275e-05,
1530
+ "loss": 1.0812,
1531
+ "step": 1190
1532
+ },
1533
+ {
1534
+ "epoch": 1.05,
1535
+ "learning_rate": 1.5185251539509685e-05,
1536
+ "loss": 1.0921,
1537
+ "step": 1195
1538
+ },
1539
+ {
1540
+ "epoch": 1.05,
1541
+ "learning_rate": 1.5144590789884581e-05,
1542
+ "loss": 1.067,
1543
+ "step": 1200
1544
+ },
1545
+ {
1546
+ "epoch": 1.05,
1547
+ "eval_loss": 1.138688564300537,
1548
+ "eval_runtime": 427.7454,
1549
+ "eval_samples_per_second": 37.794,
1550
+ "eval_steps_per_second": 1.183,
1551
+ "step": 1200
1552
+ },
1553
+ {
1554
+ "epoch": 1.06,
1555
+ "learning_rate": 1.5103814040050358e-05,
1556
+ "loss": 1.0931,
1557
+ "step": 1205
1558
+ },
1559
+ {
1560
+ "epoch": 1.06,
1561
+ "learning_rate": 1.5062922209440976e-05,
1562
+ "loss": 1.0611,
1563
+ "step": 1210
1564
+ },
1565
+ {
1566
+ "epoch": 1.06,
1567
+ "learning_rate": 1.5021916220085246e-05,
1568
+ "loss": 1.0814,
1569
+ "step": 1215
1570
+ },
1571
+ {
1572
+ "epoch": 1.07,
1573
+ "learning_rate": 1.4980796996586024e-05,
1574
+ "loss": 1.0766,
1575
+ "step": 1220
1576
+ },
1577
+ {
1578
+ "epoch": 1.07,
1579
+ "learning_rate": 1.4939565466099365e-05,
1580
+ "loss": 1.0847,
1581
+ "step": 1225
1582
+ },
1583
+ {
1584
+ "epoch": 1.08,
1585
+ "learning_rate": 1.4898222558313633e-05,
1586
+ "loss": 1.068,
1587
+ "step": 1230
1588
+ },
1589
+ {
1590
+ "epoch": 1.08,
1591
+ "learning_rate": 1.4856769205428514e-05,
1592
+ "loss": 1.066,
1593
+ "step": 1235
1594
+ },
1595
+ {
1596
+ "epoch": 1.09,
1597
+ "learning_rate": 1.4815206342134017e-05,
1598
+ "loss": 1.0786,
1599
+ "step": 1240
1600
+ },
1601
+ {
1602
+ "epoch": 1.09,
1603
+ "learning_rate": 1.4773534905589388e-05,
1604
+ "loss": 1.0753,
1605
+ "step": 1245
1606
+ },
1607
+ {
1608
+ "epoch": 1.1,
1609
+ "learning_rate": 1.473175583540199e-05,
1610
+ "loss": 1.0793,
1611
+ "step": 1250
1612
+ },
1613
+ {
1614
+ "epoch": 1.1,
1615
+ "learning_rate": 1.4689870073606093e-05,
1616
+ "loss": 1.0931,
1617
+ "step": 1255
1618
+ },
1619
+ {
1620
+ "epoch": 1.1,
1621
+ "learning_rate": 1.464787856464167e-05,
1622
+ "loss": 1.0829,
1623
+ "step": 1260
1624
+ },
1625
+ {
1626
+ "epoch": 1.11,
1627
+ "learning_rate": 1.4605782255333058e-05,
1628
+ "loss": 1.0664,
1629
+ "step": 1265
1630
+ },
1631
+ {
1632
+ "epoch": 1.11,
1633
+ "learning_rate": 1.4563582094867649e-05,
1634
+ "loss": 1.0578,
1635
+ "step": 1270
1636
+ },
1637
+ {
1638
+ "epoch": 1.12,
1639
+ "learning_rate": 1.4521279034774465e-05,
1640
+ "loss": 1.0741,
1641
+ "step": 1275
1642
+ },
1643
+ {
1644
+ "epoch": 1.12,
1645
+ "learning_rate": 1.4478874028902702e-05,
1646
+ "loss": 1.0875,
1647
+ "step": 1280
1648
+ },
1649
+ {
1650
+ "epoch": 1.13,
1651
+ "learning_rate": 1.443636803340024e-05,
1652
+ "loss": 1.0798,
1653
+ "step": 1285
1654
+ },
1655
+ {
1656
+ "epoch": 1.13,
1657
+ "learning_rate": 1.4393762006692065e-05,
1658
+ "loss": 1.0807,
1659
+ "step": 1290
1660
+ },
1661
+ {
1662
+ "epoch": 1.13,
1663
+ "learning_rate": 1.435105690945867e-05,
1664
+ "loss": 1.0673,
1665
+ "step": 1295
1666
+ },
1667
+ {
1668
+ "epoch": 1.14,
1669
+ "learning_rate": 1.4308253704614386e-05,
1670
+ "loss": 1.0694,
1671
+ "step": 1300
1672
+ },
1673
+ {
1674
+ "epoch": 1.14,
1675
+ "eval_loss": 1.1368060111999512,
1676
+ "eval_runtime": 425.836,
1677
+ "eval_samples_per_second": 37.963,
1678
+ "eval_steps_per_second": 1.188,
1679
+ "step": 1300
1680
+ },
1681
+ {
1682
+ "epoch": 1.14,
1683
+ "learning_rate": 1.4265353357285675e-05,
1684
+ "loss": 1.0752,
1685
+ "step": 1305
1686
+ },
1687
+ {
1688
+ "epoch": 1.15,
1689
+ "learning_rate": 1.422235683478937e-05,
1690
+ "loss": 1.0621,
1691
+ "step": 1310
1692
+ },
1693
+ {
1694
+ "epoch": 1.15,
1695
+ "learning_rate": 1.4179265106610863e-05,
1696
+ "loss": 1.0819,
1697
+ "step": 1315
1698
+ },
1699
+ {
1700
+ "epoch": 1.16,
1701
+ "learning_rate": 1.4136079144382234e-05,
1702
+ "loss": 1.0662,
1703
+ "step": 1320
1704
+ },
1705
+ {
1706
+ "epoch": 1.16,
1707
+ "learning_rate": 1.4092799921860364e-05,
1708
+ "loss": 1.0783,
1709
+ "step": 1325
1710
+ },
1711
+ {
1712
+ "epoch": 1.17,
1713
+ "learning_rate": 1.404942841490495e-05,
1714
+ "loss": 1.0638,
1715
+ "step": 1330
1716
+ },
1717
+ {
1718
+ "epoch": 1.17,
1719
+ "learning_rate": 1.400596560145654e-05,
1720
+ "loss": 1.0746,
1721
+ "step": 1335
1722
+ },
1723
+ {
1724
+ "epoch": 1.17,
1725
+ "learning_rate": 1.3962412461514446e-05,
1726
+ "loss": 1.0756,
1727
+ "step": 1340
1728
+ },
1729
+ {
1730
+ "epoch": 1.18,
1731
+ "learning_rate": 1.3918769977114655e-05,
1732
+ "loss": 1.0994,
1733
+ "step": 1345
1734
+ },
1735
+ {
1736
+ "epoch": 1.18,
1737
+ "learning_rate": 1.3875039132307712e-05,
1738
+ "loss": 1.0576,
1739
+ "step": 1350
1740
+ },
1741
+ {
1742
+ "epoch": 1.19,
1743
+ "learning_rate": 1.3831220913136503e-05,
1744
+ "loss": 1.0816,
1745
+ "step": 1355
1746
+ },
1747
+ {
1748
+ "epoch": 1.19,
1749
+ "learning_rate": 1.3787316307614021e-05,
1750
+ "loss": 1.0631,
1751
+ "step": 1360
1752
+ },
1753
+ {
1754
+ "epoch": 1.2,
1755
+ "learning_rate": 1.3743326305701108e-05,
1756
+ "loss": 1.0889,
1757
+ "step": 1365
1758
+ },
1759
+ {
1760
+ "epoch": 1.2,
1761
+ "learning_rate": 1.3699251899284127e-05,
1762
+ "loss": 1.0756,
1763
+ "step": 1370
1764
+ },
1765
+ {
1766
+ "epoch": 1.2,
1767
+ "learning_rate": 1.3655094082152585e-05,
1768
+ "loss": 1.0798,
1769
+ "step": 1375
1770
+ },
1771
+ {
1772
+ "epoch": 1.21,
1773
+ "learning_rate": 1.3610853849976736e-05,
1774
+ "loss": 1.0778,
1775
+ "step": 1380
1776
+ },
1777
+ {
1778
+ "epoch": 1.21,
1779
+ "learning_rate": 1.356653220028513e-05,
1780
+ "loss": 1.0706,
1781
+ "step": 1385
1782
+ },
1783
+ {
1784
+ "epoch": 1.22,
1785
+ "learning_rate": 1.3522130132442115e-05,
1786
+ "loss": 1.0733,
1787
+ "step": 1390
1788
+ },
1789
+ {
1790
+ "epoch": 1.22,
1791
+ "learning_rate": 1.3477648647625312e-05,
1792
+ "loss": 1.0749,
1793
+ "step": 1395
1794
+ },
1795
+ {
1796
+ "epoch": 1.23,
1797
+ "learning_rate": 1.3433088748803033e-05,
1798
+ "loss": 1.0814,
1799
+ "step": 1400
1800
+ },
1801
+ {
1802
+ "epoch": 1.23,
1803
+ "eval_loss": 1.1340500116348267,
1804
+ "eval_runtime": 427.2423,
1805
+ "eval_samples_per_second": 37.838,
1806
+ "eval_steps_per_second": 1.184,
1807
+ "step": 1400
1808
+ },
1809
+ {
1810
+ "epoch": 1.23,
1811
+ "learning_rate": 1.3388451440711671e-05,
1812
+ "loss": 1.0715,
1813
+ "step": 1405
1814
+ },
1815
+ {
1816
+ "epoch": 1.24,
1817
+ "learning_rate": 1.3343737729833032e-05,
1818
+ "loss": 1.1067,
1819
+ "step": 1410
1820
+ },
1821
+ {
1822
+ "epoch": 1.24,
1823
+ "learning_rate": 1.3298948624371669e-05,
1824
+ "loss": 1.0794,
1825
+ "step": 1415
1826
+ },
1827
+ {
1828
+ "epoch": 1.24,
1829
+ "learning_rate": 1.325408513423212e-05,
1830
+ "loss": 1.079,
1831
+ "step": 1420
1832
+ },
1833
+ {
1834
+ "epoch": 1.25,
1835
+ "learning_rate": 1.320914827099615e-05,
1836
+ "loss": 1.0811,
1837
+ "step": 1425
1838
+ },
1839
+ {
1840
+ "epoch": 1.25,
1841
+ "learning_rate": 1.3164139047899942e-05,
1842
+ "loss": 1.0649,
1843
+ "step": 1430
1844
+ },
1845
+ {
1846
+ "epoch": 1.26,
1847
+ "learning_rate": 1.3119058479811254e-05,
1848
+ "loss": 1.083,
1849
+ "step": 1435
1850
+ },
1851
+ {
1852
+ "epoch": 1.26,
1853
+ "learning_rate": 1.3073907583206514e-05,
1854
+ "loss": 1.0655,
1855
+ "step": 1440
1856
+ },
1857
+ {
1858
+ "epoch": 1.27,
1859
+ "learning_rate": 1.302868737614793e-05,
1860
+ "loss": 1.0796,
1861
+ "step": 1445
1862
+ },
1863
+ {
1864
+ "epoch": 1.27,
1865
+ "learning_rate": 1.298339887826053e-05,
1866
+ "loss": 1.0576,
1867
+ "step": 1450
1868
+ },
1869
+ {
1870
+ "epoch": 1.27,
1871
+ "learning_rate": 1.2938043110709146e-05,
1872
+ "loss": 1.0742,
1873
+ "step": 1455
1874
+ },
1875
+ {
1876
+ "epoch": 1.28,
1877
+ "learning_rate": 1.2892621096175412e-05,
1878
+ "loss": 1.0828,
1879
+ "step": 1460
1880
+ },
1881
+ {
1882
+ "epoch": 1.28,
1883
+ "learning_rate": 1.2847133858834701e-05,
1884
+ "loss": 1.0714,
1885
+ "step": 1465
1886
+ },
1887
+ {
1888
+ "epoch": 1.29,
1889
+ "learning_rate": 1.280158242433303e-05,
1890
+ "loss": 1.0664,
1891
+ "step": 1470
1892
+ },
1893
+ {
1894
+ "epoch": 1.29,
1895
+ "learning_rate": 1.2755967819763929e-05,
1896
+ "loss": 1.0777,
1897
+ "step": 1475
1898
+ },
1899
+ {
1900
+ "epoch": 1.3,
1901
+ "learning_rate": 1.271029107364529e-05,
1902
+ "loss": 1.0622,
1903
+ "step": 1480
1904
+ },
1905
+ {
1906
+ "epoch": 1.3,
1907
+ "learning_rate": 1.266455321589617e-05,
1908
+ "loss": 1.0545,
1909
+ "step": 1485
1910
+ },
1911
+ {
1912
+ "epoch": 1.31,
1913
+ "learning_rate": 1.2618755277813575e-05,
1914
+ "loss": 1.0691,
1915
+ "step": 1490
1916
+ },
1917
+ {
1918
+ "epoch": 1.31,
1919
+ "learning_rate": 1.2572898292049195e-05,
1920
+ "loss": 1.0776,
1921
+ "step": 1495
1922
+ },
1923
+ {
1924
+ "epoch": 1.31,
1925
+ "learning_rate": 1.2526983292586136e-05,
1926
+ "loss": 1.0727,
1927
+ "step": 1500
1928
+ },
1929
+ {
1930
+ "epoch": 1.31,
1931
+ "eval_loss": 1.131594181060791,
1932
+ "eval_runtime": 426.5085,
1933
+ "eval_samples_per_second": 37.903,
1934
+ "eval_steps_per_second": 1.186,
1935
+ "step": 1500
1936
+ },
1937
+ {
1938
+ "epoch": 1.32,
1939
+ "learning_rate": 1.248101131471559e-05,
1940
+ "loss": 1.0636,
1941
+ "step": 1505
1942
+ },
1943
+ {
1944
+ "epoch": 1.32,
1945
+ "learning_rate": 1.2434983395013495e-05,
1946
+ "loss": 1.0831,
1947
+ "step": 1510
1948
+ },
1949
+ {
1950
+ "epoch": 1.33,
1951
+ "learning_rate": 1.2388900571317175e-05,
1952
+ "loss": 1.085,
1953
+ "step": 1515
1954
+ },
1955
+ {
1956
+ "epoch": 1.33,
1957
+ "learning_rate": 1.2342763882701922e-05,
1958
+ "loss": 1.0965,
1959
+ "step": 1520
1960
+ },
1961
+ {
1962
+ "epoch": 1.34,
1963
+ "learning_rate": 1.2296574369457575e-05,
1964
+ "loss": 1.0723,
1965
+ "step": 1525
1966
+ },
1967
+ {
1968
+ "epoch": 1.34,
1969
+ "learning_rate": 1.2250333073065066e-05,
1970
+ "loss": 1.076,
1971
+ "step": 1530
1972
+ },
1973
+ {
1974
+ "epoch": 1.34,
1975
+ "learning_rate": 1.2204041036172924e-05,
1976
+ "loss": 1.0774,
1977
+ "step": 1535
1978
+ },
1979
+ {
1980
+ "epoch": 1.35,
1981
+ "learning_rate": 1.2157699302573787e-05,
1982
+ "loss": 1.0683,
1983
+ "step": 1540
1984
+ },
1985
+ {
1986
+ "epoch": 1.35,
1987
+ "learning_rate": 1.2111308917180844e-05,
1988
+ "loss": 1.0596,
1989
+ "step": 1545
1990
+ },
1991
+ {
1992
+ "epoch": 1.36,
1993
+ "learning_rate": 1.206487092600429e-05,
1994
+ "loss": 1.0696,
1995
+ "step": 1550
1996
+ },
1997
+ {
1998
+ "epoch": 1.36,
1999
+ "learning_rate": 1.2018386376127733e-05,
2000
+ "loss": 1.0732,
2001
+ "step": 1555
2002
+ },
2003
+ {
2004
+ "epoch": 1.37,
2005
+ "learning_rate": 1.197185631568459e-05,
2006
+ "loss": 1.0894,
2007
+ "step": 1560
2008
+ },
2009
+ {
2010
+ "epoch": 1.37,
2011
+ "learning_rate": 1.192528179383444e-05,
2012
+ "loss": 1.0723,
2013
+ "step": 1565
2014
+ },
2015
+ {
2016
+ "epoch": 1.38,
2017
+ "learning_rate": 1.1878663860739388e-05,
2018
+ "loss": 1.0746,
2019
+ "step": 1570
2020
+ },
2021
+ {
2022
+ "epoch": 1.38,
2023
+ "learning_rate": 1.1832003567540372e-05,
2024
+ "loss": 1.0837,
2025
+ "step": 1575
2026
+ },
2027
+ {
2028
+ "epoch": 1.38,
2029
+ "learning_rate": 1.1785301966333465e-05,
2030
+ "loss": 1.0849,
2031
+ "step": 1580
2032
+ },
2033
+ {
2034
+ "epoch": 1.39,
2035
+ "learning_rate": 1.1738560110146154e-05,
2036
+ "loss": 1.0658,
2037
+ "step": 1585
2038
+ },
2039
+ {
2040
+ "epoch": 1.39,
2041
+ "learning_rate": 1.1691779052913592e-05,
2042
+ "loss": 1.0834,
2043
+ "step": 1590
2044
+ },
2045
+ {
2046
+ "epoch": 1.4,
2047
+ "learning_rate": 1.1644959849454842e-05,
2048
+ "loss": 1.0743,
2049
+ "step": 1595
2050
+ },
2051
+ {
2052
+ "epoch": 1.4,
2053
+ "learning_rate": 1.159810355544908e-05,
2054
+ "loss": 1.0769,
2055
+ "step": 1600
2056
+ },
2057
+ {
2058
+ "epoch": 1.4,
2059
+ "eval_loss": 1.129186749458313,
2060
+ "eval_runtime": 426.3787,
2061
+ "eval_samples_per_second": 37.915,
2062
+ "eval_steps_per_second": 1.187,
2063
+ "step": 1600
2064
+ },
2065
+ {
2066
+ "epoch": 1.41,
2067
+ "learning_rate": 1.1551211227411811e-05,
2068
+ "loss": 1.0804,
2069
+ "step": 1605
2070
+ },
2071
+ {
2072
+ "epoch": 1.41,
2073
+ "learning_rate": 1.1504283922671027e-05,
2074
+ "loss": 1.0684,
2075
+ "step": 1610
2076
+ },
2077
+ {
2078
+ "epoch": 1.41,
2079
+ "learning_rate": 1.1457322699343371e-05,
2080
+ "loss": 1.0753,
2081
+ "step": 1615
2082
+ },
2083
+ {
2084
+ "epoch": 1.42,
2085
+ "learning_rate": 1.1410328616310293e-05,
2086
+ "loss": 1.0641,
2087
+ "step": 1620
2088
+ },
2089
+ {
2090
+ "epoch": 1.42,
2091
+ "learning_rate": 1.136330273319416e-05,
2092
+ "loss": 1.0814,
2093
+ "step": 1625
2094
+ },
2095
+ {
2096
+ "epoch": 1.43,
2097
+ "learning_rate": 1.1316246110334357e-05,
2098
+ "loss": 1.0738,
2099
+ "step": 1630
2100
+ },
2101
+ {
2102
+ "epoch": 1.43,
2103
+ "learning_rate": 1.12691598087634e-05,
2104
+ "loss": 1.0674,
2105
+ "step": 1635
2106
+ },
2107
+ {
2108
+ "epoch": 1.44,
2109
+ "learning_rate": 1.1222044890183003e-05,
2110
+ "loss": 1.0688,
2111
+ "step": 1640
2112
+ },
2113
+ {
2114
+ "epoch": 1.44,
2115
+ "learning_rate": 1.117490241694012e-05,
2116
+ "loss": 1.0685,
2117
+ "step": 1645
2118
+ },
2119
+ {
2120
+ "epoch": 1.45,
2121
+ "learning_rate": 1.1127733452003027e-05,
2122
+ "loss": 1.0736,
2123
+ "step": 1650
2124
+ },
2125
+ {
2126
+ "epoch": 1.45,
2127
+ "learning_rate": 1.1080539058937323e-05,
2128
+ "loss": 1.0623,
2129
+ "step": 1655
2130
+ },
2131
+ {
2132
+ "epoch": 1.45,
2133
+ "learning_rate": 1.1033320301881958e-05,
2134
+ "loss": 1.0653,
2135
+ "step": 1660
2136
+ },
2137
+ {
2138
+ "epoch": 1.46,
2139
+ "learning_rate": 1.0986078245525249e-05,
2140
+ "loss": 1.0869,
2141
+ "step": 1665
2142
+ },
2143
+ {
2144
+ "epoch": 1.46,
2145
+ "learning_rate": 1.093881395508086e-05,
2146
+ "loss": 1.0709,
2147
+ "step": 1670
2148
+ },
2149
+ {
2150
+ "epoch": 1.47,
2151
+ "learning_rate": 1.0891528496263797e-05,
2152
+ "loss": 1.0689,
2153
+ "step": 1675
2154
+ },
2155
+ {
2156
+ "epoch": 1.47,
2157
+ "learning_rate": 1.0844222935266356e-05,
2158
+ "loss": 1.0763,
2159
+ "step": 1680
2160
+ },
2161
+ {
2162
+ "epoch": 1.48,
2163
+ "learning_rate": 1.0796898338734116e-05,
2164
+ "loss": 1.0542,
2165
+ "step": 1685
2166
+ },
2167
+ {
2168
+ "epoch": 1.48,
2169
+ "learning_rate": 1.0749555773741851e-05,
2170
+ "loss": 1.0675,
2171
+ "step": 1690
2172
+ },
2173
+ {
2174
+ "epoch": 1.48,
2175
+ "learning_rate": 1.0702196307769497e-05,
2176
+ "loss": 1.0559,
2177
+ "step": 1695
2178
+ },
2179
+ {
2180
+ "epoch": 1.49,
2181
+ "learning_rate": 1.0654821008678072e-05,
2182
+ "loss": 1.0728,
2183
+ "step": 1700
2184
+ },
2185
+ {
2186
+ "epoch": 1.49,
2187
+ "eval_loss": 1.1270054578781128,
2188
+ "eval_runtime": 425.1702,
2189
+ "eval_samples_per_second": 38.022,
2190
+ "eval_steps_per_second": 1.19,
2191
+ "step": 1700
2192
+ },
2193
+ {
2194
+ "epoch": 1.49,
2195
+ "learning_rate": 1.0607430944685602e-05,
2196
+ "loss": 1.0686,
2197
+ "step": 1705
2198
+ },
2199
+ {
2200
+ "epoch": 1.5,
2201
+ "learning_rate": 1.0560027184343024e-05,
2202
+ "loss": 1.072,
2203
+ "step": 1710
2204
+ },
2205
+ {
2206
+ "epoch": 1.5,
2207
+ "learning_rate": 1.0512610796510106e-05,
2208
+ "loss": 1.0572,
2209
+ "step": 1715
2210
+ },
2211
+ {
2212
+ "epoch": 1.51,
2213
+ "learning_rate": 1.0465182850331343e-05,
2214
+ "loss": 1.0861,
2215
+ "step": 1720
2216
+ },
2217
+ {
2218
+ "epoch": 1.51,
2219
+ "learning_rate": 1.0417744415211841e-05,
2220
+ "loss": 1.0797,
2221
+ "step": 1725
2222
+ },
2223
+ {
2224
+ "epoch": 1.52,
2225
+ "learning_rate": 1.0370296560793213e-05,
2226
+ "loss": 1.0759,
2227
+ "step": 1730
2228
+ },
2229
+ {
2230
+ "epoch": 1.52,
2231
+ "learning_rate": 1.0322840356929464e-05,
2232
+ "loss": 1.0598,
2233
+ "step": 1735
2234
+ },
2235
+ {
2236
+ "epoch": 1.52,
2237
+ "learning_rate": 1.0275376873662853e-05,
2238
+ "loss": 1.0584,
2239
+ "step": 1740
2240
+ },
2241
+ {
2242
+ "epoch": 1.53,
2243
+ "learning_rate": 1.0227907181199777e-05,
2244
+ "loss": 1.0831,
2245
+ "step": 1745
2246
+ },
2247
+ {
2248
+ "epoch": 1.53,
2249
+ "learning_rate": 1.0180432349886645e-05,
2250
+ "loss": 1.0547,
2251
+ "step": 1750
2252
+ },
2253
+ {
2254
+ "epoch": 1.54,
2255
+ "learning_rate": 1.0132953450185727e-05,
2256
+ "loss": 1.0726,
2257
+ "step": 1755
2258
+ },
2259
+ {
2260
+ "epoch": 1.54,
2261
+ "learning_rate": 1.0085471552651037e-05,
2262
+ "loss": 1.083,
2263
+ "step": 1760
2264
+ },
2265
+ {
2266
+ "epoch": 1.55,
2267
+ "learning_rate": 1.0037987727904178e-05,
2268
+ "loss": 1.0723,
2269
+ "step": 1765
2270
+ },
2271
+ {
2272
+ "epoch": 1.55,
2273
+ "learning_rate": 9.990503046610206e-06,
2274
+ "loss": 1.056,
2275
+ "step": 1770
2276
+ },
2277
+ {
2278
+ "epoch": 1.55,
2279
+ "learning_rate": 9.943018579453491e-06,
2280
+ "loss": 1.0764,
2281
+ "step": 1775
2282
+ },
2283
+ {
2284
+ "epoch": 1.56,
2285
+ "learning_rate": 9.895535397113585e-06,
2286
+ "loss": 1.0795,
2287
+ "step": 1780
2288
+ },
2289
+ {
2290
+ "epoch": 1.56,
2291
+ "learning_rate": 9.848054570241058e-06,
2292
+ "loss": 1.0741,
2293
+ "step": 1785
2294
+ },
2295
+ {
2296
+ "epoch": 1.57,
2297
+ "learning_rate": 9.800577169433372e-06,
2298
+ "loss": 1.0629,
2299
+ "step": 1790
2300
+ },
2301
+ {
2302
+ "epoch": 1.57,
2303
+ "learning_rate": 9.753104265210735e-06,
2304
+ "loss": 1.0681,
2305
+ "step": 1795
2306
+ },
2307
+ {
2308
+ "epoch": 1.58,
2309
+ "learning_rate": 9.705636927991976e-06,
2310
+ "loss": 1.0558,
2311
+ "step": 1800
2312
+ },
2313
+ {
2314
+ "epoch": 1.58,
2315
+ "eval_loss": 1.124674916267395,
2316
+ "eval_runtime": 425.5493,
2317
+ "eval_samples_per_second": 37.989,
2318
+ "eval_steps_per_second": 1.189,
2319
+ "step": 1800
2320
+ },
2321
+ {
2322
+ "epoch": 1.58,
2323
+ "learning_rate": 9.658176228070393e-06,
2324
+ "loss": 1.0708,
2325
+ "step": 1805
2326
+ },
2327
+ {
2328
+ "epoch": 1.59,
2329
+ "learning_rate": 9.610723235589621e-06,
2330
+ "loss": 1.053,
2331
+ "step": 1810
2332
+ },
2333
+ {
2334
+ "epoch": 1.59,
2335
+ "learning_rate": 9.56327902051952e-06,
2336
+ "loss": 1.0571,
2337
+ "step": 1815
2338
+ },
2339
+ {
2340
+ "epoch": 1.59,
2341
+ "learning_rate": 9.515844652632021e-06,
2342
+ "loss": 1.0861,
2343
+ "step": 1820
2344
+ },
2345
+ {
2346
+ "epoch": 1.6,
2347
+ "learning_rate": 9.468421201477039e-06,
2348
+ "loss": 1.0576,
2349
+ "step": 1825
2350
+ },
2351
+ {
2352
+ "epoch": 1.6,
2353
+ "learning_rate": 9.421009736358324e-06,
2354
+ "loss": 1.053,
2355
+ "step": 1830
2356
+ },
2357
+ {
2358
+ "epoch": 1.61,
2359
+ "learning_rate": 9.373611326309368e-06,
2360
+ "loss": 1.0871,
2361
+ "step": 1835
2362
+ },
2363
+ {
2364
+ "epoch": 1.61,
2365
+ "learning_rate": 9.32622704006931e-06,
2366
+ "loss": 1.0644,
2367
+ "step": 1840
2368
+ },
2369
+ {
2370
+ "epoch": 1.62,
2371
+ "learning_rate": 9.278857946058804e-06,
2372
+ "loss": 1.0786,
2373
+ "step": 1845
2374
+ },
2375
+ {
2376
+ "epoch": 1.62,
2377
+ "learning_rate": 9.231505112355966e-06,
2378
+ "loss": 1.0498,
2379
+ "step": 1850
2380
+ },
2381
+ {
2382
+ "epoch": 1.63,
2383
+ "learning_rate": 9.184169606672266e-06,
2384
+ "loss": 1.0578,
2385
+ "step": 1855
2386
+ },
2387
+ {
2388
+ "epoch": 1.63,
2389
+ "learning_rate": 9.136852496328469e-06,
2390
+ "loss": 1.0573,
2391
+ "step": 1860
2392
+ },
2393
+ {
2394
+ "epoch": 1.63,
2395
+ "learning_rate": 9.089554848230556e-06,
2396
+ "loss": 1.0679,
2397
+ "step": 1865
2398
+ },
2399
+ {
2400
+ "epoch": 1.64,
2401
+ "learning_rate": 9.042277728845673e-06,
2402
+ "loss": 1.0688,
2403
+ "step": 1870
2404
+ },
2405
+ {
2406
+ "epoch": 1.64,
2407
+ "learning_rate": 8.995022204178096e-06,
2408
+ "loss": 1.0545,
2409
+ "step": 1875
2410
+ },
2411
+ {
2412
+ "epoch": 1.65,
2413
+ "learning_rate": 8.947789339745165e-06,
2414
+ "loss": 1.0703,
2415
+ "step": 1880
2416
+ },
2417
+ {
2418
+ "epoch": 1.65,
2419
+ "learning_rate": 8.900580200553295e-06,
2420
+ "loss": 1.0487,
2421
+ "step": 1885
2422
+ },
2423
+ {
2424
+ "epoch": 1.66,
2425
+ "learning_rate": 8.85339585107393e-06,
2426
+ "loss": 1.067,
2427
+ "step": 1890
2428
+ },
2429
+ {
2430
+ "epoch": 1.66,
2431
+ "learning_rate": 8.806237355219563e-06,
2432
+ "loss": 1.0533,
2433
+ "step": 1895
2434
+ },
2435
+ {
2436
+ "epoch": 1.66,
2437
+ "learning_rate": 8.759105776319737e-06,
2438
+ "loss": 1.0753,
2439
+ "step": 1900
2440
+ },
2441
+ {
2442
+ "epoch": 1.66,
2443
+ "eval_loss": 1.1228857040405273,
2444
+ "eval_runtime": 426.9078,
2445
+ "eval_samples_per_second": 37.868,
2446
+ "eval_steps_per_second": 1.185,
2447
+ "step": 1900
2448
+ },
2449
+ {
2450
+ "epoch": 1.67,
2451
+ "learning_rate": 8.712002177097079e-06,
2452
+ "loss": 1.0754,
2453
+ "step": 1905
2454
+ },
2455
+ {
2456
+ "epoch": 1.67,
2457
+ "learning_rate": 8.66492761964332e-06,
2458
+ "loss": 1.0426,
2459
+ "step": 1910
2460
+ },
2461
+ {
2462
+ "epoch": 1.68,
2463
+ "learning_rate": 8.61788316539536e-06,
2464
+ "loss": 1.0496,
2465
+ "step": 1915
2466
+ },
2467
+ {
2468
+ "epoch": 1.68,
2469
+ "learning_rate": 8.57086987511134e-06,
2470
+ "loss": 1.0545,
2471
+ "step": 1920
2472
+ },
2473
+ {
2474
+ "epoch": 1.69,
2475
+ "learning_rate": 8.523888808846705e-06,
2476
+ "loss": 1.069,
2477
+ "step": 1925
2478
+ },
2479
+ {
2480
+ "epoch": 1.69,
2481
+ "learning_rate": 8.47694102593032e-06,
2482
+ "loss": 1.0474,
2483
+ "step": 1930
2484
+ },
2485
+ {
2486
+ "epoch": 1.7,
2487
+ "learning_rate": 8.430027584940572e-06,
2488
+ "loss": 1.0719,
2489
+ "step": 1935
2490
+ },
2491
+ {
2492
+ "epoch": 1.7,
2493
+ "learning_rate": 8.383149543681511e-06,
2494
+ "loss": 1.0891,
2495
+ "step": 1940
2496
+ },
2497
+ {
2498
+ "epoch": 1.7,
2499
+ "learning_rate": 8.336307959158997e-06,
2500
+ "loss": 1.066,
2501
+ "step": 1945
2502
+ },
2503
+ {
2504
+ "epoch": 1.71,
2505
+ "learning_rate": 8.28950388755685e-06,
2506
+ "loss": 1.0524,
2507
+ "step": 1950
2508
+ },
2509
+ {
2510
+ "epoch": 1.71,
2511
+ "learning_rate": 8.242738384213071e-06,
2512
+ "loss": 1.0692,
2513
+ "step": 1955
2514
+ },
2515
+ {
2516
+ "epoch": 1.72,
2517
+ "learning_rate": 8.196012503596e-06,
2518
+ "loss": 1.0545,
2519
+ "step": 1960
2520
+ },
2521
+ {
2522
+ "epoch": 1.72,
2523
+ "learning_rate": 8.149327299280576e-06,
2524
+ "loss": 1.0739,
2525
+ "step": 1965
2526
+ },
2527
+ {
2528
+ "epoch": 1.73,
2529
+ "learning_rate": 8.102683823924575e-06,
2530
+ "loss": 1.0435,
2531
+ "step": 1970
2532
+ },
2533
+ {
2534
+ "epoch": 1.73,
2535
+ "learning_rate": 8.056083129244857e-06,
2536
+ "loss": 1.0817,
2537
+ "step": 1975
2538
+ },
2539
+ {
2540
+ "epoch": 1.73,
2541
+ "learning_rate": 8.00952626599367e-06,
2542
+ "loss": 1.0556,
2543
+ "step": 1980
2544
+ },
2545
+ {
2546
+ "epoch": 1.74,
2547
+ "learning_rate": 7.963014283934944e-06,
2548
+ "loss": 1.0693,
2549
+ "step": 1985
2550
+ },
2551
+ {
2552
+ "epoch": 1.74,
2553
+ "learning_rate": 7.916548231820642e-06,
2554
+ "loss": 1.0732,
2555
+ "step": 1990
2556
+ },
2557
+ {
2558
+ "epoch": 1.75,
2559
+ "learning_rate": 7.870129157367079e-06,
2560
+ "loss": 1.0674,
2561
+ "step": 1995
2562
+ },
2563
+ {
2564
+ "epoch": 1.75,
2565
+ "learning_rate": 7.823758107231338e-06,
2566
+ "loss": 1.0799,
2567
+ "step": 2000
2568
+ },
2569
+ {
2570
+ "epoch": 1.75,
2571
+ "eval_loss": 1.1209245920181274,
2572
+ "eval_runtime": 427.1001,
2573
+ "eval_samples_per_second": 37.851,
2574
+ "eval_steps_per_second": 1.185,
2575
+ "step": 2000
2576
+ },
2577
+ {
2578
+ "epoch": 1.76,
2579
+ "learning_rate": 7.777436126987642e-06,
2580
+ "loss": 1.0638,
2581
+ "step": 2005
2582
+ },
2583
+ {
2584
+ "epoch": 1.76,
2585
+ "learning_rate": 7.731164261103777e-06,
2586
+ "loss": 1.0617,
2587
+ "step": 2010
2588
+ },
2589
+ {
2590
+ "epoch": 1.77,
2591
+ "learning_rate": 7.684943552917569e-06,
2592
+ "loss": 1.0698,
2593
+ "step": 2015
2594
+ },
2595
+ {
2596
+ "epoch": 1.77,
2597
+ "learning_rate": 7.638775044613324e-06,
2598
+ "loss": 1.072,
2599
+ "step": 2020
2600
+ },
2601
+ {
2602
+ "epoch": 1.77,
2603
+ "learning_rate": 7.592659777198354e-06,
2604
+ "loss": 1.0673,
2605
+ "step": 2025
2606
+ },
2607
+ {
2608
+ "epoch": 1.78,
2609
+ "learning_rate": 7.546598790479489e-06,
2610
+ "loss": 1.0734,
2611
+ "step": 2030
2612
+ },
2613
+ {
2614
+ "epoch": 1.78,
2615
+ "learning_rate": 7.500593123039649e-06,
2616
+ "loss": 1.067,
2617
+ "step": 2035
2618
+ },
2619
+ {
2620
+ "epoch": 1.79,
2621
+ "learning_rate": 7.4546438122144085e-06,
2622
+ "loss": 1.0486,
2623
+ "step": 2040
2624
+ },
2625
+ {
2626
+ "epoch": 1.79,
2627
+ "learning_rate": 7.408751894068607e-06,
2628
+ "loss": 1.063,
2629
+ "step": 2045
2630
+ },
2631
+ {
2632
+ "epoch": 1.8,
2633
+ "learning_rate": 7.362918403373007e-06,
2634
+ "loss": 1.0486,
2635
+ "step": 2050
2636
+ },
2637
+ {
2638
+ "epoch": 1.8,
2639
+ "learning_rate": 7.31714437358094e-06,
2640
+ "loss": 1.0702,
2641
+ "step": 2055
2642
+ },
2643
+ {
2644
+ "epoch": 1.8,
2645
+ "learning_rate": 7.271430836805018e-06,
2646
+ "loss": 1.0682,
2647
+ "step": 2060
2648
+ },
2649
+ {
2650
+ "epoch": 1.81,
2651
+ "learning_rate": 7.225778823793853e-06,
2652
+ "loss": 1.0414,
2653
+ "step": 2065
2654
+ },
2655
+ {
2656
+ "epoch": 1.81,
2657
+ "learning_rate": 7.180189363908818e-06,
2658
+ "loss": 1.0621,
2659
+ "step": 2070
2660
+ },
2661
+ {
2662
+ "epoch": 1.82,
2663
+ "learning_rate": 7.134663485100852e-06,
2664
+ "loss": 1.0796,
2665
+ "step": 2075
2666
+ },
2667
+ {
2668
+ "epoch": 1.82,
2669
+ "learning_rate": 7.089202213887248e-06,
2670
+ "loss": 1.0749,
2671
+ "step": 2080
2672
+ },
2673
+ {
2674
+ "epoch": 1.83,
2675
+ "learning_rate": 7.043806575328542e-06,
2676
+ "loss": 1.078,
2677
+ "step": 2085
2678
+ },
2679
+ {
2680
+ "epoch": 1.83,
2681
+ "learning_rate": 6.998477593005377e-06,
2682
+ "loss": 1.0667,
2683
+ "step": 2090
2684
+ },
2685
+ {
2686
+ "epoch": 1.84,
2687
+ "learning_rate": 6.95321628899544e-06,
2688
+ "loss": 1.0593,
2689
+ "step": 2095
2690
+ },
2691
+ {
2692
+ "epoch": 1.84,
2693
+ "learning_rate": 6.908023683850398e-06,
2694
+ "loss": 1.066,
2695
+ "step": 2100
2696
+ },
2697
+ {
2698
+ "epoch": 1.84,
2699
+ "eval_loss": 1.1192409992218018,
2700
+ "eval_runtime": 426.8527,
2701
+ "eval_samples_per_second": 37.873,
2702
+ "eval_steps_per_second": 1.185,
2703
+ "step": 2100
2704
+ },
2705
+ {
2706
+ "epoch": 1.84,
2707
+ "learning_rate": 6.8629007965729e-06,
2708
+ "loss": 1.0725,
2709
+ "step": 2105
2710
+ },
2711
+ {
2712
+ "epoch": 1.85,
2713
+ "learning_rate": 6.817848644593603e-06,
2714
+ "loss": 1.0578,
2715
+ "step": 2110
2716
+ },
2717
+ {
2718
+ "epoch": 1.85,
2719
+ "learning_rate": 6.7728682437482165e-06,
2720
+ "loss": 1.0892,
2721
+ "step": 2115
2722
+ },
2723
+ {
2724
+ "epoch": 1.86,
2725
+ "learning_rate": 6.727960608254611e-06,
2726
+ "loss": 1.0551,
2727
+ "step": 2120
2728
+ },
2729
+ {
2730
+ "epoch": 1.86,
2731
+ "learning_rate": 6.683126750689934e-06,
2732
+ "loss": 1.0516,
2733
+ "step": 2125
2734
+ },
2735
+ {
2736
+ "epoch": 1.87,
2737
+ "learning_rate": 6.63836768196781e-06,
2738
+ "loss": 1.0651,
2739
+ "step": 2130
2740
+ },
2741
+ {
2742
+ "epoch": 1.87,
2743
+ "learning_rate": 6.5936844113155e-06,
2744
+ "loss": 1.0722,
2745
+ "step": 2135
2746
+ },
2747
+ {
2748
+ "epoch": 1.87,
2749
+ "learning_rate": 6.549077946251186e-06,
2750
+ "loss": 1.0621,
2751
+ "step": 2140
2752
+ },
2753
+ {
2754
+ "epoch": 1.88,
2755
+ "learning_rate": 6.50454929256124e-06,
2756
+ "loss": 1.0523,
2757
+ "step": 2145
2758
+ },
2759
+ {
2760
+ "epoch": 1.88,
2761
+ "learning_rate": 6.460099454277525e-06,
2762
+ "loss": 1.0682,
2763
+ "step": 2150
2764
+ },
2765
+ {
2766
+ "epoch": 1.89,
2767
+ "learning_rate": 6.4157294336548e-06,
2768
+ "loss": 1.0714,
2769
+ "step": 2155
2770
+ },
2771
+ {
2772
+ "epoch": 1.89,
2773
+ "learning_rate": 6.371440231148075e-06,
2774
+ "loss": 1.047,
2775
+ "step": 2160
2776
+ },
2777
+ {
2778
+ "epoch": 1.9,
2779
+ "learning_rate": 6.3272328453900856e-06,
2780
+ "loss": 1.0659,
2781
+ "step": 2165
2782
+ },
2783
+ {
2784
+ "epoch": 1.9,
2785
+ "learning_rate": 6.283108273168753e-06,
2786
+ "loss": 1.0567,
2787
+ "step": 2170
2788
+ },
2789
+ {
2790
+ "epoch": 1.91,
2791
+ "learning_rate": 6.2390675094047326e-06,
2792
+ "loss": 1.082,
2793
+ "step": 2175
2794
+ },
2795
+ {
2796
+ "epoch": 1.91,
2797
+ "learning_rate": 6.1951115471289615e-06,
2798
+ "loss": 1.0704,
2799
+ "step": 2180
2800
+ },
2801
+ {
2802
+ "epoch": 1.91,
2803
+ "learning_rate": 6.1512413774602634e-06,
2804
+ "loss": 1.0697,
2805
+ "step": 2185
2806
+ },
2807
+ {
2808
+ "epoch": 1.92,
2809
+ "learning_rate": 6.1074579895830275e-06,
2810
+ "loss": 1.0588,
2811
+ "step": 2190
2812
+ },
2813
+ {
2814
+ "epoch": 1.92,
2815
+ "learning_rate": 6.063762370724876e-06,
2816
+ "loss": 1.0697,
2817
+ "step": 2195
2818
+ },
2819
+ {
2820
+ "epoch": 1.93,
2821
+ "learning_rate": 6.020155506134416e-06,
2822
+ "loss": 1.0406,
2823
+ "step": 2200
2824
+ },
2825
+ {
2826
+ "epoch": 1.93,
2827
+ "eval_loss": 1.1177691221237183,
2828
+ "eval_runtime": 425.6505,
2829
+ "eval_samples_per_second": 37.98,
2830
+ "eval_steps_per_second": 1.189,
2831
+ "step": 2200
2832
+ },
2833
+ {
2834
+ "epoch": 1.93,
2835
+ "learning_rate": 5.976638379059034e-06,
2836
+ "loss": 1.0813,
2837
+ "step": 2205
2838
+ },
2839
+ {
2840
+ "epoch": 1.94,
2841
+ "learning_rate": 5.933211970722703e-06,
2842
+ "loss": 1.0589,
2843
+ "step": 2210
2844
+ },
2845
+ {
2846
+ "epoch": 1.94,
2847
+ "learning_rate": 5.8898772603038775e-06,
2848
+ "loss": 1.0653,
2849
+ "step": 2215
2850
+ },
2851
+ {
2852
+ "epoch": 1.94,
2853
+ "learning_rate": 5.846635224913402e-06,
2854
+ "loss": 1.076,
2855
+ "step": 2220
2856
+ },
2857
+ {
2858
+ "epoch": 1.95,
2859
+ "learning_rate": 5.803486839572497e-06,
2860
+ "loss": 1.0569,
2861
+ "step": 2225
2862
+ },
2863
+ {
2864
+ "epoch": 1.95,
2865
+ "learning_rate": 5.760433077190741e-06,
2866
+ "loss": 1.0627,
2867
+ "step": 2230
2868
+ },
2869
+ {
2870
+ "epoch": 1.96,
2871
+ "learning_rate": 5.717474908544176e-06,
2872
+ "loss": 1.0598,
2873
+ "step": 2235
2874
+ },
2875
+ {
2876
+ "epoch": 1.96,
2877
+ "learning_rate": 5.674613302253391e-06,
2878
+ "loss": 1.085,
2879
+ "step": 2240
2880
+ },
2881
+ {
2882
+ "epoch": 1.97,
2883
+ "learning_rate": 5.631849224761681e-06,
2884
+ "loss": 1.07,
2885
+ "step": 2245
2886
+ },
2887
+ {
2888
+ "epoch": 1.97,
2889
+ "learning_rate": 5.589183640313264e-06,
2890
+ "loss": 1.0633,
2891
+ "step": 2250
2892
+ },
2893
+ {
2894
+ "epoch": 1.98,
2895
+ "learning_rate": 5.546617510931535e-06,
2896
+ "loss": 1.0765,
2897
+ "step": 2255
2898
+ },
2899
+ {
2900
+ "epoch": 1.98,
2901
+ "learning_rate": 5.504151796397392e-06,
2902
+ "loss": 1.0606,
2903
+ "step": 2260
2904
+ },
2905
+ {
2906
+ "epoch": 1.98,
2907
+ "learning_rate": 5.461787454227558e-06,
2908
+ "loss": 1.0817,
2909
+ "step": 2265
2910
+ },
2911
+ {
2912
+ "epoch": 1.99,
2913
+ "learning_rate": 5.419525439653025e-06,
2914
+ "loss": 1.0665,
2915
+ "step": 2270
2916
+ },
2917
+ {
2918
+ "epoch": 1.99,
2919
+ "learning_rate": 5.3773667055975066e-06,
2920
+ "loss": 1.0536,
2921
+ "step": 2275
2922
+ },
2923
+ {
2924
+ "epoch": 2.0,
2925
+ "learning_rate": 5.335312202655936e-06,
2926
+ "loss": 1.0574,
2927
+ "step": 2280
2928
+ },
2929
+ {
2930
+ "epoch": 2.0,
2931
+ "learning_rate": 5.29336287907305e-06,
2932
+ "loss": 1.0435,
2933
+ "step": 2285
2934
+ },
2935
+ {
2936
+ "epoch": 2.01,
2937
+ "learning_rate": 5.251519680721999e-06,
2938
+ "loss": 1.0025,
2939
+ "step": 2290
2940
+ },
2941
+ {
2942
+ "epoch": 2.01,
2943
+ "learning_rate": 5.209783551083028e-06,
2944
+ "loss": 0.9986,
2945
+ "step": 2295
2946
+ },
2947
+ {
2948
+ "epoch": 2.01,
2949
+ "learning_rate": 5.168155431222188e-06,
2950
+ "loss": 1.0193,
2951
+ "step": 2300
2952
+ },
2953
+ {
2954
+ "epoch": 2.01,
2955
+ "eval_loss": 1.1221781969070435,
2956
+ "eval_runtime": 426.9687,
2957
+ "eval_samples_per_second": 37.862,
2958
+ "eval_steps_per_second": 1.185,
2959
+ "step": 2300
2960
+ },
2961
+ {
2962
+ "epoch": 2.02,
2963
+ "learning_rate": 5.126636259770135e-06,
2964
+ "loss": 1.0106,
2965
+ "step": 2305
2966
+ },
2967
+ {
2968
+ "epoch": 2.02,
2969
+ "learning_rate": 5.085226972900961e-06,
2970
+ "loss": 1.0173,
2971
+ "step": 2310
2972
+ },
2973
+ {
2974
+ "epoch": 2.03,
2975
+ "learning_rate": 5.043928504311058e-06,
2976
+ "loss": 1.0212,
2977
+ "step": 2315
2978
+ },
2979
+ {
2980
+ "epoch": 2.03,
2981
+ "learning_rate": 5.002741785198111e-06,
2982
+ "loss": 1.0217,
2983
+ "step": 2320
2984
+ },
2985
+ {
2986
+ "epoch": 2.04,
2987
+ "learning_rate": 4.961667744240069e-06,
2988
+ "loss": 1.0155,
2989
+ "step": 2325
2990
+ },
2991
+ {
2992
+ "epoch": 2.04,
2993
+ "learning_rate": 4.920707307574218e-06,
2994
+ "loss": 1.0196,
2995
+ "step": 2330
2996
+ },
2997
+ {
2998
+ "epoch": 2.05,
2999
+ "learning_rate": 4.879861398776287e-06,
3000
+ "loss": 1.0074,
3001
+ "step": 2335
3002
+ },
3003
+ {
3004
+ "epoch": 2.05,
3005
+ "learning_rate": 4.839130938839645e-06,
3006
+ "loss": 1.0258,
3007
+ "step": 2340
3008
+ },
3009
+ {
3010
+ "epoch": 2.05,
3011
+ "learning_rate": 4.798516846154508e-06,
3012
+ "loss": 1.0171,
3013
+ "step": 2345
3014
+ },
3015
+ {
3016
+ "epoch": 2.06,
3017
+ "learning_rate": 4.758020036487247e-06,
3018
+ "loss": 1.0156,
3019
+ "step": 2350
3020
+ },
3021
+ {
3022
+ "epoch": 2.06,
3023
+ "learning_rate": 4.717641422959741e-06,
3024
+ "loss": 1.0213,
3025
+ "step": 2355
3026
+ },
3027
+ {
3028
+ "epoch": 2.07,
3029
+ "learning_rate": 4.677381916028775e-06,
3030
+ "loss": 1.0167,
3031
+ "step": 2360
3032
+ },
3033
+ {
3034
+ "epoch": 2.07,
3035
+ "learning_rate": 4.637242423465529e-06,
3036
+ "loss": 1.0171,
3037
+ "step": 2365
3038
+ },
3039
+ {
3040
+ "epoch": 2.08,
3041
+ "learning_rate": 4.597223850335088e-06,
3042
+ "loss": 1.0137,
3043
+ "step": 2370
3044
+ },
3045
+ {
3046
+ "epoch": 2.08,
3047
+ "learning_rate": 4.557327098976059e-06,
3048
+ "loss": 1.0048,
3049
+ "step": 2375
3050
+ },
3051
+ {
3052
+ "epoch": 2.08,
3053
+ "learning_rate": 4.5175530689802e-06,
3054
+ "loss": 1.0229,
3055
+ "step": 2380
3056
+ },
3057
+ {
3058
+ "epoch": 2.09,
3059
+ "learning_rate": 4.477902657172153e-06,
3060
+ "loss": 1.027,
3061
+ "step": 2385
3062
+ },
3063
+ {
3064
+ "epoch": 2.09,
3065
+ "learning_rate": 4.438376757589223e-06,
3066
+ "loss": 1.0161,
3067
+ "step": 2390
3068
+ },
3069
+ {
3070
+ "epoch": 2.1,
3071
+ "learning_rate": 4.398976261461205e-06,
3072
+ "loss": 1.0052,
3073
+ "step": 2395
3074
+ },
3075
+ {
3076
+ "epoch": 2.1,
3077
+ "learning_rate": 4.359702057190307e-06,
3078
+ "loss": 1.0276,
3079
+ "step": 2400
3080
+ },
3081
+ {
3082
+ "epoch": 2.1,
3083
+ "eval_loss": 1.1220041513442993,
3084
+ "eval_runtime": 426.2423,
3085
+ "eval_samples_per_second": 37.927,
3086
+ "eval_steps_per_second": 1.187,
3087
+ "step": 2400
3088
+ },
3089
+ {
3090
+ "epoch": 2.11,
3091
+ "learning_rate": 4.3205550303310996e-06,
3092
+ "loss": 1.0106,
3093
+ "step": 2405
3094
+ },
3095
+ {
3096
+ "epoch": 2.11,
3097
+ "learning_rate": 4.28153606357056e-06,
3098
+ "loss": 1.0026,
3099
+ "step": 2410
3100
+ },
3101
+ {
3102
+ "epoch": 2.12,
3103
+ "learning_rate": 4.242646036708174e-06,
3104
+ "loss": 1.0218,
3105
+ "step": 2415
3106
+ },
3107
+ {
3108
+ "epoch": 2.12,
3109
+ "learning_rate": 4.203885826636081e-06,
3110
+ "loss": 1.0281,
3111
+ "step": 2420
3112
+ },
3113
+ {
3114
+ "epoch": 2.12,
3115
+ "learning_rate": 4.165256307319321e-06,
3116
+ "loss": 1.0198,
3117
+ "step": 2425
3118
+ },
3119
+ {
3120
+ "epoch": 2.13,
3121
+ "learning_rate": 4.126758349776109e-06,
3122
+ "loss": 1.0209,
3123
+ "step": 2430
3124
+ },
3125
+ {
3126
+ "epoch": 2.13,
3127
+ "learning_rate": 4.08839282205822e-06,
3128
+ "loss": 1.0044,
3129
+ "step": 2435
3130
+ },
3131
+ {
3132
+ "epoch": 2.14,
3133
+ "learning_rate": 4.0501605892313915e-06,
3134
+ "loss": 1.0208,
3135
+ "step": 2440
3136
+ },
3137
+ {
3138
+ "epoch": 2.14,
3139
+ "learning_rate": 4.012062513355824e-06,
3140
+ "loss": 1.0162,
3141
+ "step": 2445
3142
+ },
3143
+ {
3144
+ "epoch": 2.15,
3145
+ "learning_rate": 3.9740994534667645e-06,
3146
+ "loss": 1.0009,
3147
+ "step": 2450
3148
+ },
3149
+ {
3150
+ "epoch": 2.15,
3151
+ "learning_rate": 3.936272265555101e-06,
3152
+ "loss": 1.0148,
3153
+ "step": 2455
3154
+ },
3155
+ {
3156
+ "epoch": 2.16,
3157
+ "learning_rate": 3.898581802548099e-06,
3158
+ "loss": 1.0043,
3159
+ "step": 2460
3160
+ },
3161
+ {
3162
+ "epoch": 2.16,
3163
+ "learning_rate": 3.861028914290135e-06,
3164
+ "loss": 1.0332,
3165
+ "step": 2465
3166
+ },
3167
+ {
3168
+ "epoch": 2.16,
3169
+ "learning_rate": 3.8236144475235635e-06,
3170
+ "loss": 1.0126,
3171
+ "step": 2470
3172
+ },
3173
+ {
3174
+ "epoch": 2.17,
3175
+ "learning_rate": 3.786339245869606e-06,
3176
+ "loss": 1.0256,
3177
+ "step": 2475
3178
+ },
3179
+ {
3180
+ "epoch": 2.17,
3181
+ "learning_rate": 3.749204149809329e-06,
3182
+ "loss": 1.0061,
3183
+ "step": 2480
3184
+ },
3185
+ {
3186
+ "epoch": 2.18,
3187
+ "learning_rate": 3.712209996664713e-06,
3188
+ "loss": 1.0244,
3189
+ "step": 2485
3190
+ },
3191
+ {
3192
+ "epoch": 2.18,
3193
+ "learning_rate": 3.675357620579745e-06,
3194
+ "loss": 0.9962,
3195
+ "step": 2490
3196
+ },
3197
+ {
3198
+ "epoch": 2.19,
3199
+ "learning_rate": 3.6386478525016346e-06,
3200
+ "loss": 1.013,
3201
+ "step": 2495
3202
+ },
3203
+ {
3204
+ "epoch": 2.19,
3205
+ "learning_rate": 3.6020815201620574e-06,
3206
+ "loss": 1.0171,
3207
+ "step": 2500
3208
+ },
3209
+ {
3210
+ "epoch": 2.19,
3211
+ "eval_loss": 1.121544361114502,
3212
+ "eval_runtime": 425.7468,
3213
+ "eval_samples_per_second": 37.971,
3214
+ "eval_steps_per_second": 1.188,
3215
+ "step": 2500
3216
+ },
3217
+ {
3218
+ "epoch": 2.19,
3219
+ "learning_rate": 3.5656594480585015e-06,
3220
+ "loss": 1.0092,
3221
+ "step": 2505
3222
+ },
3223
+ {
3224
+ "epoch": 2.2,
3225
+ "learning_rate": 3.529382457435686e-06,
3226
+ "loss": 1.0214,
3227
+ "step": 2510
3228
+ },
3229
+ {
3230
+ "epoch": 2.2,
3231
+ "learning_rate": 3.4932513662670175e-06,
3232
+ "loss": 1.0146,
3233
+ "step": 2515
3234
+ },
3235
+ {
3236
+ "epoch": 2.21,
3237
+ "learning_rate": 3.457266989236181e-06,
3238
+ "loss": 1.0051,
3239
+ "step": 2520
3240
+ },
3241
+ {
3242
+ "epoch": 2.21,
3243
+ "learning_rate": 3.421430137718732e-06,
3244
+ "loss": 1.0251,
3245
+ "step": 2525
3246
+ },
3247
+ {
3248
+ "epoch": 2.22,
3249
+ "learning_rate": 3.3857416197638394e-06,
3250
+ "loss": 1.0321,
3251
+ "step": 2530
3252
+ },
3253
+ {
3254
+ "epoch": 2.22,
3255
+ "learning_rate": 3.350202240076035e-06,
3256
+ "loss": 1.0146,
3257
+ "step": 2535
3258
+ },
3259
+ {
3260
+ "epoch": 2.23,
3261
+ "learning_rate": 3.314812799997085e-06,
3262
+ "loss": 1.0124,
3263
+ "step": 2540
3264
+ },
3265
+ {
3266
+ "epoch": 2.23,
3267
+ "learning_rate": 3.279574097487923e-06,
3268
+ "loss": 1.0024,
3269
+ "step": 2545
3270
+ },
3271
+ {
3272
+ "epoch": 2.23,
3273
+ "learning_rate": 3.244486927110645e-06,
3274
+ "loss": 1.0147,
3275
+ "step": 2550
3276
+ },
3277
+ {
3278
+ "epoch": 2.24,
3279
+ "learning_rate": 3.2095520800106095e-06,
3280
+ "loss": 1.0273,
3281
+ "step": 2555
3282
+ },
3283
+ {
3284
+ "epoch": 2.24,
3285
+ "learning_rate": 3.174770343898581e-06,
3286
+ "loss": 0.9955,
3287
+ "step": 2560
3288
+ },
3289
+ {
3290
+ "epoch": 2.25,
3291
+ "learning_rate": 3.14014250303299e-06,
3292
+ "loss": 1.0268,
3293
+ "step": 2565
3294
+ },
3295
+ {
3296
+ "epoch": 2.25,
3297
+ "learning_rate": 3.1056693382022208e-06,
3298
+ "loss": 1.0115,
3299
+ "step": 2570
3300
+ },
3301
+ {
3302
+ "epoch": 2.26,
3303
+ "learning_rate": 3.0713516267070375e-06,
3304
+ "loss": 1.0007,
3305
+ "step": 2575
3306
+ },
3307
+ {
3308
+ "epoch": 2.26,
3309
+ "learning_rate": 3.037190142343044e-06,
3310
+ "loss": 1.0164,
3311
+ "step": 2580
3312
+ },
3313
+ {
3314
+ "epoch": 2.26,
3315
+ "learning_rate": 3.003185655383222e-06,
3316
+ "loss": 1.0246,
3317
+ "step": 2585
3318
+ },
3319
+ {
3320
+ "epoch": 2.27,
3321
+ "learning_rate": 2.9693389325605904e-06,
3322
+ "loss": 1.0153,
3323
+ "step": 2590
3324
+ },
3325
+ {
3326
+ "epoch": 2.27,
3327
+ "learning_rate": 2.935650737050897e-06,
3328
+ "loss": 0.9947,
3329
+ "step": 2595
3330
+ },
3331
+ {
3332
+ "epoch": 2.28,
3333
+ "learning_rate": 2.9021218284554154e-06,
3334
+ "loss": 1.0112,
3335
+ "step": 2600
3336
+ },
3337
+ {
3338
+ "epoch": 2.28,
3339
+ "eval_loss": 1.1211416721343994,
3340
+ "eval_runtime": 426.0338,
3341
+ "eval_samples_per_second": 37.945,
3342
+ "eval_steps_per_second": 1.188,
3343
+ "step": 2600
3344
+ },
3345
+ {
3346
+ "epoch": 2.28,
3347
+ "learning_rate": 2.8687529627838174e-06,
3348
+ "loss": 1.0227,
3349
+ "step": 2605
3350
+ },
3351
+ {
3352
+ "epoch": 2.29,
3353
+ "learning_rate": 2.835544892437133e-06,
3354
+ "loss": 1.0153,
3355
+ "step": 2610
3356
+ },
3357
+ {
3358
+ "epoch": 2.29,
3359
+ "learning_rate": 2.8024983661907803e-06,
3360
+ "loss": 1.0114,
3361
+ "step": 2615
3362
+ },
3363
+ {
3364
+ "epoch": 2.3,
3365
+ "learning_rate": 2.7696141291776737e-06,
3366
+ "loss": 1.0165,
3367
+ "step": 2620
3368
+ },
3369
+ {
3370
+ "epoch": 2.3,
3371
+ "learning_rate": 2.7368929228714423e-06,
3372
+ "loss": 0.9873,
3373
+ "step": 2625
3374
+ },
3375
+ {
3376
+ "epoch": 2.3,
3377
+ "learning_rate": 2.704335485069688e-06,
3378
+ "loss": 1.0195,
3379
+ "step": 2630
3380
+ },
3381
+ {
3382
+ "epoch": 2.31,
3383
+ "learning_rate": 2.671942549877369e-06,
3384
+ "loss": 1.0156,
3385
+ "step": 2635
3386
+ },
3387
+ {
3388
+ "epoch": 2.31,
3389
+ "learning_rate": 2.6397148476902324e-06,
3390
+ "loss": 1.017,
3391
+ "step": 2640
3392
+ },
3393
+ {
3394
+ "epoch": 2.32,
3395
+ "learning_rate": 2.6076531051783594e-06,
3396
+ "loss": 1.0145,
3397
+ "step": 2645
3398
+ },
3399
+ {
3400
+ "epoch": 2.32,
3401
+ "learning_rate": 2.575758045269775e-06,
3402
+ "loss": 1.009,
3403
+ "step": 2650
3404
+ },
3405
+ {
3406
+ "epoch": 2.33,
3407
+ "learning_rate": 2.5440303871341365e-06,
3408
+ "loss": 1.0121,
3409
+ "step": 2655
3410
+ },
3411
+ {
3412
+ "epoch": 2.33,
3413
+ "learning_rate": 2.5124708461665326e-06,
3414
+ "loss": 1.0255,
3415
+ "step": 2660
3416
+ },
3417
+ {
3418
+ "epoch": 2.33,
3419
+ "learning_rate": 2.481080133971342e-06,
3420
+ "loss": 1.0191,
3421
+ "step": 2665
3422
+ },
3423
+ {
3424
+ "epoch": 2.34,
3425
+ "learning_rate": 2.4498589583461998e-06,
3426
+ "loss": 1.0386,
3427
+ "step": 2670
3428
+ },
3429
+ {
3430
+ "epoch": 2.34,
3431
+ "learning_rate": 2.4188080232660327e-06,
3432
+ "loss": 1.009,
3433
+ "step": 2675
3434
+ },
3435
+ {
3436
+ "epoch": 2.35,
3437
+ "learning_rate": 2.3879280288671725e-06,
3438
+ "loss": 1.0187,
3439
+ "step": 2680
3440
+ },
3441
+ {
3442
+ "epoch": 2.35,
3443
+ "learning_rate": 2.357219671431594e-06,
3444
+ "loss": 1.0017,
3445
+ "step": 2685
3446
+ },
3447
+ {
3448
+ "epoch": 2.36,
3449
+ "learning_rate": 2.326683643371197e-06,
3450
+ "loss": 1.012,
3451
+ "step": 2690
3452
+ },
3453
+ {
3454
+ "epoch": 2.36,
3455
+ "learning_rate": 2.296320633212198e-06,
3456
+ "loss": 1.022,
3457
+ "step": 2695
3458
+ },
3459
+ {
3460
+ "epoch": 2.37,
3461
+ "learning_rate": 2.266131325579606e-06,
3462
+ "loss": 1.0087,
3463
+ "step": 2700
3464
+ },
3465
+ {
3466
+ "epoch": 2.37,
3467
+ "eval_loss": 1.1207467317581177,
3468
+ "eval_runtime": 426.3134,
3469
+ "eval_samples_per_second": 37.92,
3470
+ "eval_steps_per_second": 1.187,
3471
+ "step": 2700
3472
+ },
3473
+ {
3474
+ "epoch": 2.37,
3475
+ "learning_rate": 2.2361164011817904e-06,
3476
+ "loss": 1.0097,
3477
+ "step": 2705
3478
+ },
3479
+ {
3480
+ "epoch": 2.37,
3481
+ "learning_rate": 2.20627653679513e-06,
3482
+ "loss": 1.0224,
3483
+ "step": 2710
3484
+ },
3485
+ {
3486
+ "epoch": 2.38,
3487
+ "learning_rate": 2.1766124052487436e-06,
3488
+ "loss": 1.0247,
3489
+ "step": 2715
3490
+ },
3491
+ {
3492
+ "epoch": 2.38,
3493
+ "learning_rate": 2.1471246754093333e-06,
3494
+ "loss": 1.0146,
3495
+ "step": 2720
3496
+ },
3497
+ {
3498
+ "epoch": 2.39,
3499
+ "learning_rate": 2.117814012166094e-06,
3500
+ "loss": 1.026,
3501
+ "step": 2725
3502
+ },
3503
+ {
3504
+ "epoch": 2.39,
3505
+ "learning_rate": 2.0886810764157216e-06,
3506
+ "loss": 1.0186,
3507
+ "step": 2730
3508
+ },
3509
+ {
3510
+ "epoch": 2.4,
3511
+ "learning_rate": 2.059726525047515e-06,
3512
+ "loss": 1.0157,
3513
+ "step": 2735
3514
+ },
3515
+ {
3516
+ "epoch": 2.4,
3517
+ "learning_rate": 2.0309510109285633e-06,
3518
+ "loss": 1.0008,
3519
+ "step": 2740
3520
+ },
3521
+ {
3522
+ "epoch": 2.4,
3523
+ "learning_rate": 2.0023551828890296e-06,
3524
+ "loss": 1.0221,
3525
+ "step": 2745
3526
+ },
3527
+ {
3528
+ "epoch": 2.41,
3529
+ "learning_rate": 1.9739396857075077e-06,
3530
+ "loss": 1.0119,
3531
+ "step": 2750
3532
+ },
3533
+ {
3534
+ "epoch": 2.41,
3535
+ "learning_rate": 1.9457051600964948e-06,
3536
+ "loss": 1.0127,
3537
+ "step": 2755
3538
+ },
3539
+ {
3540
+ "epoch": 2.42,
3541
+ "learning_rate": 1.9176522426879395e-06,
3542
+ "loss": 1.018,
3543
+ "step": 2760
3544
+ },
3545
+ {
3546
+ "epoch": 2.42,
3547
+ "learning_rate": 1.8897815660189e-06,
3548
+ "loss": 1.0266,
3549
+ "step": 2765
3550
+ },
3551
+ {
3552
+ "epoch": 2.43,
3553
+ "learning_rate": 1.8620937585172571e-06,
3554
+ "loss": 1.0075,
3555
+ "step": 2770
3556
+ },
3557
+ {
3558
+ "epoch": 2.43,
3559
+ "learning_rate": 1.8345894444875668e-06,
3560
+ "loss": 1.0179,
3561
+ "step": 2775
3562
+ },
3563
+ {
3564
+ "epoch": 2.44,
3565
+ "learning_rate": 1.8072692440969786e-06,
3566
+ "loss": 1.0182,
3567
+ "step": 2780
3568
+ },
3569
+ {
3570
+ "epoch": 2.44,
3571
+ "learning_rate": 1.780133773361239e-06,
3572
+ "loss": 1.0154,
3573
+ "step": 2785
3574
+ },
3575
+ {
3576
+ "epoch": 2.44,
3577
+ "learning_rate": 1.7531836441308147e-06,
3578
+ "loss": 1.0012,
3579
+ "step": 2790
3580
+ },
3581
+ {
3582
+ "epoch": 2.45,
3583
+ "learning_rate": 1.7264194640770937e-06,
3584
+ "loss": 1.0101,
3585
+ "step": 2795
3586
+ },
3587
+ {
3588
+ "epoch": 2.45,
3589
+ "learning_rate": 1.6998418366786872e-06,
3590
+ "loss": 1.0158,
3591
+ "step": 2800
3592
+ },
3593
+ {
3594
+ "epoch": 2.45,
3595
+ "eval_loss": 1.1203794479370117,
3596
+ "eval_runtime": 425.5194,
3597
+ "eval_samples_per_second": 37.991,
3598
+ "eval_steps_per_second": 1.189,
3599
+ "step": 2800
3600
+ },
3601
+ {
3602
+ "epoch": 2.46,
3603
+ "learning_rate": 1.6734513612078075e-06,
3604
+ "loss": 1.0146,
3605
+ "step": 2805
3606
+ },
3607
+ {
3608
+ "epoch": 2.46,
3609
+ "learning_rate": 1.647248632716776e-06,
3610
+ "loss": 1.0203,
3611
+ "step": 2810
3612
+ },
3613
+ {
3614
+ "epoch": 2.47,
3615
+ "learning_rate": 1.6212342420245963e-06,
3616
+ "loss": 1.0231,
3617
+ "step": 2815
3618
+ },
3619
+ {
3620
+ "epoch": 2.47,
3621
+ "learning_rate": 1.5954087757036196e-06,
3622
+ "loss": 1.0136,
3623
+ "step": 2820
3624
+ },
3625
+ {
3626
+ "epoch": 2.47,
3627
+ "learning_rate": 1.5697728160663473e-06,
3628
+ "loss": 1.0032,
3629
+ "step": 2825
3630
+ },
3631
+ {
3632
+ "epoch": 2.48,
3633
+ "learning_rate": 1.5443269411522722e-06,
3634
+ "loss": 1.0221,
3635
+ "step": 2830
3636
+ },
3637
+ {
3638
+ "epoch": 2.48,
3639
+ "learning_rate": 1.5190717247148678e-06,
3640
+ "loss": 1.0323,
3641
+ "step": 2835
3642
+ },
3643
+ {
3644
+ "epoch": 2.49,
3645
+ "learning_rate": 1.4940077362086324e-06,
3646
+ "loss": 1.0261,
3647
+ "step": 2840
3648
+ },
3649
+ {
3650
+ "epoch": 2.49,
3651
+ "learning_rate": 1.4691355407762663e-06,
3652
+ "loss": 1.0107,
3653
+ "step": 2845
3654
+ },
3655
+ {
3656
+ "epoch": 2.5,
3657
+ "learning_rate": 1.4444556992359127e-06,
3658
+ "loss": 1.0204,
3659
+ "step": 2850
3660
+ },
3661
+ {
3662
+ "epoch": 2.5,
3663
+ "learning_rate": 1.4199687680685236e-06,
3664
+ "loss": 1.0248,
3665
+ "step": 2855
3666
+ },
3667
+ {
3668
+ "epoch": 2.51,
3669
+ "learning_rate": 1.3956752994053102e-06,
3670
+ "loss": 1.0003,
3671
+ "step": 2860
3672
+ },
3673
+ {
3674
+ "epoch": 2.51,
3675
+ "learning_rate": 1.371575841015289e-06,
3676
+ "loss": 1.0359,
3677
+ "step": 2865
3678
+ },
3679
+ {
3680
+ "epoch": 2.51,
3681
+ "learning_rate": 1.3476709362929375e-06,
3682
+ "loss": 1.014,
3683
+ "step": 2870
3684
+ },
3685
+ {
3686
+ "epoch": 2.52,
3687
+ "learning_rate": 1.3239611242459328e-06,
3688
+ "loss": 0.9979,
3689
+ "step": 2875
3690
+ },
3691
+ {
3692
+ "epoch": 2.52,
3693
+ "learning_rate": 1.3004469394830089e-06,
3694
+ "loss": 1.0152,
3695
+ "step": 2880
3696
+ },
3697
+ {
3698
+ "epoch": 2.53,
3699
+ "learning_rate": 1.2771289122018948e-06,
3700
+ "loss": 1.0292,
3701
+ "step": 2885
3702
+ },
3703
+ {
3704
+ "epoch": 2.53,
3705
+ "learning_rate": 1.2540075681773557e-06,
3706
+ "loss": 1.0347,
3707
+ "step": 2890
3708
+ },
3709
+ {
3710
+ "epoch": 2.54,
3711
+ "learning_rate": 1.2310834287493546e-06,
3712
+ "loss": 1.0203,
3713
+ "step": 2895
3714
+ },
3715
+ {
3716
+ "epoch": 2.54,
3717
+ "learning_rate": 1.2083570108112764e-06,
3718
+ "loss": 1.0219,
3719
+ "step": 2900
3720
+ },
3721
+ {
3722
+ "epoch": 2.54,
3723
+ "eval_loss": 1.119923710823059,
3724
+ "eval_runtime": 424.9645,
3725
+ "eval_samples_per_second": 38.041,
3726
+ "eval_steps_per_second": 1.191,
3727
+ "step": 2900
3728
+ },
3729
+ {
3730
+ "epoch": 2.54,
3731
+ "learning_rate": 1.18582882679829e-06,
3732
+ "loss": 1.0003,
3733
+ "step": 2905
3734
+ },
3735
+ {
3736
+ "epoch": 2.55,
3737
+ "learning_rate": 1.1634993846757803e-06,
3738
+ "loss": 1.0112,
3739
+ "step": 2910
3740
+ },
3741
+ {
3742
+ "epoch": 2.55,
3743
+ "learning_rate": 1.1413691879279055e-06,
3744
+ "loss": 1.0171,
3745
+ "step": 2915
3746
+ },
3747
+ {
3748
+ "epoch": 2.56,
3749
+ "learning_rate": 1.1194387355462389e-06,
3750
+ "loss": 1.0143,
3751
+ "step": 2920
3752
+ },
3753
+ {
3754
+ "epoch": 2.56,
3755
+ "learning_rate": 1.0977085220185169e-06,
3756
+ "loss": 1.0182,
3757
+ "step": 2925
3758
+ },
3759
+ {
3760
+ "epoch": 2.57,
3761
+ "learning_rate": 1.076179037317495e-06,
3762
+ "loss": 1.0094,
3763
+ "step": 2930
3764
+ },
3765
+ {
3766
+ "epoch": 2.57,
3767
+ "learning_rate": 1.054850766889891e-06,
3768
+ "loss": 1.0258,
3769
+ "step": 2935
3770
+ },
3771
+ {
3772
+ "epoch": 2.58,
3773
+ "learning_rate": 1.033724191645451e-06,
3774
+ "loss": 1.0134,
3775
+ "step": 2940
3776
+ },
3777
+ {
3778
+ "epoch": 2.58,
3779
+ "learning_rate": 1.0127997879460916e-06,
3780
+ "loss": 1.0129,
3781
+ "step": 2945
3782
+ },
3783
+ {
3784
+ "epoch": 2.58,
3785
+ "learning_rate": 9.920780275951703e-07,
3786
+ "loss": 1.0007,
3787
+ "step": 2950
3788
+ },
3789
+ {
3790
+ "epoch": 2.59,
3791
+ "learning_rate": 9.71559377826844e-07,
3792
+ "loss": 1.0153,
3793
+ "step": 2955
3794
+ },
3795
+ {
3796
+ "epoch": 2.59,
3797
+ "learning_rate": 9.512443012955286e-07,
3798
+ "loss": 1.0032,
3799
+ "step": 2960
3800
+ },
3801
+ {
3802
+ "epoch": 2.6,
3803
+ "learning_rate": 9.311332560654806e-07,
3804
+ "loss": 1.0144,
3805
+ "step": 2965
3806
+ },
3807
+ {
3808
+ "epoch": 2.6,
3809
+ "learning_rate": 9.11226695600449e-07,
3810
+ "loss": 1.0304,
3811
+ "step": 2970
3812
+ },
3813
+ {
3814
+ "epoch": 2.61,
3815
+ "learning_rate": 8.915250687534693e-07,
3816
+ "loss": 1.0236,
3817
+ "step": 2975
3818
+ },
3819
+ {
3820
+ "epoch": 2.61,
3821
+ "learning_rate": 8.720288197567306e-07,
3822
+ "loss": 1.0137,
3823
+ "step": 2980
3824
+ },
3825
+ {
3826
+ "epoch": 2.61,
3827
+ "learning_rate": 8.527383882115625e-07,
3828
+ "loss": 1.0259,
3829
+ "step": 2985
3830
+ },
3831
+ {
3832
+ "epoch": 2.62,
3833
+ "learning_rate": 8.336542090785282e-07,
3834
+ "loss": 1.0137,
3835
+ "step": 2990
3836
+ },
3837
+ {
3838
+ "epoch": 2.62,
3839
+ "learning_rate": 8.147767126676076e-07,
3840
+ "loss": 1.0194,
3841
+ "step": 2995
3842
+ },
3843
+ {
3844
+ "epoch": 2.63,
3845
+ "learning_rate": 7.96106324628505e-07,
3846
+ "loss": 1.0024,
3847
+ "step": 3000
3848
+ },
3849
+ {
3850
+ "epoch": 2.63,
3851
+ "eval_loss": 1.1197476387023926,
3852
+ "eval_runtime": 426.0129,
3853
+ "eval_samples_per_second": 37.947,
3854
+ "eval_steps_per_second": 1.188,
3855
+ "step": 3000
3856
+ },
3857
+ {
3858
+ "epoch": 2.63,
3859
+ "learning_rate": 7.776434659410404e-07,
3860
+ "loss": 1.0164,
3861
+ "step": 3005
3862
+ },
3863
+ {
3864
+ "epoch": 2.64,
3865
+ "learning_rate": 7.593885529056633e-07,
3866
+ "loss": 1.0041,
3867
+ "step": 3010
3868
+ },
3869
+ {
3870
+ "epoch": 2.64,
3871
+ "learning_rate": 7.413419971340707e-07,
3872
+ "loss": 1.0118,
3873
+ "step": 3015
3874
+ },
3875
+ {
3876
+ "epoch": 2.65,
3877
+ "learning_rate": 7.235042055399166e-07,
3878
+ "loss": 1.0086,
3879
+ "step": 3020
3880
+ },
3881
+ {
3882
+ "epoch": 2.65,
3883
+ "learning_rate": 7.058755803296435e-07,
3884
+ "loss": 1.0265,
3885
+ "step": 3025
3886
+ },
3887
+ {
3888
+ "epoch": 2.65,
3889
+ "learning_rate": 6.884565189934089e-07,
3890
+ "loss": 1.005,
3891
+ "step": 3030
3892
+ },
3893
+ {
3894
+ "epoch": 2.66,
3895
+ "learning_rate": 6.712474142961289e-07,
3896
+ "loss": 1.0006,
3897
+ "step": 3035
3898
+ },
3899
+ {
3900
+ "epoch": 2.66,
3901
+ "learning_rate": 6.542486542686155e-07,
3902
+ "loss": 1.0181,
3903
+ "step": 3040
3904
+ },
3905
+ {
3906
+ "epoch": 2.67,
3907
+ "learning_rate": 6.374606221988266e-07,
3908
+ "loss": 1.0163,
3909
+ "step": 3045
3910
+ },
3911
+ {
3912
+ "epoch": 2.67,
3913
+ "learning_rate": 6.208836966232356e-07,
3914
+ "loss": 1.0124,
3915
+ "step": 3050
3916
+ },
3917
+ {
3918
+ "epoch": 2.68,
3919
+ "learning_rate": 6.045182513182802e-07,
3920
+ "loss": 1.024,
3921
+ "step": 3055
3922
+ },
3923
+ {
3924
+ "epoch": 2.68,
3925
+ "learning_rate": 5.883646552919464e-07,
3926
+ "loss": 1.0187,
3927
+ "step": 3060
3928
+ },
3929
+ {
3930
+ "epoch": 2.69,
3931
+ "learning_rate": 5.724232727754408e-07,
3932
+ "loss": 1.031,
3933
+ "step": 3065
3934
+ },
3935
+ {
3936
+ "epoch": 2.69,
3937
+ "learning_rate": 5.566944632149851e-07,
3938
+ "loss": 1.0107,
3939
+ "step": 3070
3940
+ },
3941
+ {
3942
+ "epoch": 2.69,
3943
+ "learning_rate": 5.411785812636983e-07,
3944
+ "loss": 1.0061,
3945
+ "step": 3075
3946
+ },
3947
+ {
3948
+ "epoch": 2.7,
3949
+ "learning_rate": 5.258759767736176e-07,
3950
+ "loss": 1.017,
3951
+ "step": 3080
3952
+ },
3953
+ {
3954
+ "epoch": 2.7,
3955
+ "learning_rate": 5.107869947877985e-07,
3956
+ "loss": 1.0316,
3957
+ "step": 3085
3958
+ },
3959
+ {
3960
+ "epoch": 2.71,
3961
+ "learning_rate": 4.959119755325337e-07,
3962
+ "loss": 1.0141,
3963
+ "step": 3090
3964
+ },
3965
+ {
3966
+ "epoch": 2.71,
3967
+ "learning_rate": 4.812512544096915e-07,
3968
+ "loss": 1.0167,
3969
+ "step": 3095
3970
+ },
3971
+ {
3972
+ "epoch": 2.72,
3973
+ "learning_rate": 4.6680516198914004e-07,
3974
+ "loss": 1.019,
3975
+ "step": 3100
3976
+ },
3977
+ {
3978
+ "epoch": 2.72,
3979
+ "eval_loss": 1.11965012550354,
3980
+ "eval_runtime": 426.3607,
3981
+ "eval_samples_per_second": 37.916,
3982
+ "eval_steps_per_second": 1.187,
3983
+ "step": 3100
3984
+ },
3985
+ {
3986
+ "epoch": 2.72,
3987
+ "learning_rate": 4.5257402400130657e-07,
3988
+ "loss": 1.0237,
3989
+ "step": 3105
3990
+ },
3991
+ {
3992
+ "epoch": 2.72,
3993
+ "learning_rate": 4.385581613298195e-07,
3994
+ "loss": 1.0134,
3995
+ "step": 3110
3996
+ },
3997
+ {
3998
+ "epoch": 2.73,
3999
+ "learning_rate": 4.2475789000428924e-07,
4000
+ "loss": 1.0134,
4001
+ "step": 3115
4002
+ },
4003
+ {
4004
+ "epoch": 2.73,
4005
+ "learning_rate": 4.111735211931689e-07,
4006
+ "loss": 1.0057,
4007
+ "step": 3120
4008
+ },
4009
+ {
4010
+ "epoch": 2.74,
4011
+ "learning_rate": 3.978053611967403e-07,
4012
+ "loss": 1.0074,
4013
+ "step": 3125
4014
+ },
4015
+ {
4016
+ "epoch": 2.74,
4017
+ "learning_rate": 3.846537114402171e-07,
4018
+ "loss": 1.0232,
4019
+ "step": 3130
4020
+ },
4021
+ {
4022
+ "epoch": 2.75,
4023
+ "learning_rate": 3.717188684669326e-07,
4024
+ "loss": 1.0263,
4025
+ "step": 3135
4026
+ },
4027
+ {
4028
+ "epoch": 2.75,
4029
+ "learning_rate": 3.5900112393166707e-07,
4030
+ "loss": 1.0025,
4031
+ "step": 3140
4032
+ },
4033
+ {
4034
+ "epoch": 2.76,
4035
+ "learning_rate": 3.4650076459406434e-07,
4036
+ "loss": 1.0045,
4037
+ "step": 3145
4038
+ },
4039
+ {
4040
+ "epoch": 2.76,
4041
+ "learning_rate": 3.3421807231216905e-07,
4042
+ "loss": 1.0212,
4043
+ "step": 3150
4044
+ },
4045
+ {
4046
+ "epoch": 2.76,
4047
+ "learning_rate": 3.221533240360708e-07,
4048
+ "loss": 1.0244,
4049
+ "step": 3155
4050
+ },
4051
+ {
4052
+ "epoch": 2.77,
4053
+ "learning_rate": 3.103067918016556e-07,
4054
+ "loss": 1.0024,
4055
+ "step": 3160
4056
+ },
4057
+ {
4058
+ "epoch": 2.77,
4059
+ "learning_rate": 2.986787427244775e-07,
4060
+ "loss": 1.0014,
4061
+ "step": 3165
4062
+ },
4063
+ {
4064
+ "epoch": 2.78,
4065
+ "learning_rate": 2.872694389937325e-07,
4066
+ "loss": 1.0153,
4067
+ "step": 3170
4068
+ },
4069
+ {
4070
+ "epoch": 2.78,
4071
+ "learning_rate": 2.7607913786634743e-07,
4072
+ "loss": 1.0185,
4073
+ "step": 3175
4074
+ },
4075
+ {
4076
+ "epoch": 2.79,
4077
+ "learning_rate": 2.6510809166118257e-07,
4078
+ "loss": 1.0194,
4079
+ "step": 3180
4080
+ },
4081
+ {
4082
+ "epoch": 2.79,
4083
+ "learning_rate": 2.5435654775333276e-07,
4084
+ "loss": 1.0098,
4085
+ "step": 3185
4086
+ },
4087
+ {
4088
+ "epoch": 2.79,
4089
+ "learning_rate": 2.4382474856856296e-07,
4090
+ "loss": 1.0244,
4091
+ "step": 3190
4092
+ },
4093
+ {
4094
+ "epoch": 2.8,
4095
+ "learning_rate": 2.3351293157783063e-07,
4096
+ "loss": 1.0191,
4097
+ "step": 3195
4098
+ },
4099
+ {
4100
+ "epoch": 2.8,
4101
+ "learning_rate": 2.2342132929193737e-07,
4102
+ "loss": 1.0135,
4103
+ "step": 3200
4104
+ },
4105
+ {
4106
+ "epoch": 2.8,
4107
+ "eval_loss": 1.11944580078125,
4108
+ "eval_runtime": 426.0948,
4109
+ "eval_samples_per_second": 37.94,
4110
+ "eval_steps_per_second": 1.188,
4111
+ "step": 3200
4112
+ },
4113
+ {
4114
+ "epoch": 2.81,
4115
+ "learning_rate": 2.135501692562836e-07,
4116
+ "loss": 1.0034,
4117
+ "step": 3205
4118
+ },
4119
+ {
4120
+ "epoch": 2.81,
4121
+ "learning_rate": 2.0389967404573997e-07,
4122
+ "loss": 1.0126,
4123
+ "step": 3210
4124
+ },
4125
+ {
4126
+ "epoch": 2.82,
4127
+ "learning_rate": 1.9447006125962952e-07,
4128
+ "loss": 1.0073,
4129
+ "step": 3215
4130
+ },
4131
+ {
4132
+ "epoch": 2.82,
4133
+ "learning_rate": 1.852615435168148e-07,
4134
+ "loss": 1.0135,
4135
+ "step": 3220
4136
+ },
4137
+ {
4138
+ "epoch": 2.83,
4139
+ "learning_rate": 1.7627432845091275e-07,
4140
+ "loss": 1.0119,
4141
+ "step": 3225
4142
+ },
4143
+ {
4144
+ "epoch": 2.83,
4145
+ "learning_rate": 1.675086187056063e-07,
4146
+ "loss": 1.0211,
4147
+ "step": 3230
4148
+ },
4149
+ {
4150
+ "epoch": 2.83,
4151
+ "learning_rate": 1.5896461193007694e-07,
4152
+ "loss": 1.0098,
4153
+ "step": 3235
4154
+ },
4155
+ {
4156
+ "epoch": 2.84,
4157
+ "learning_rate": 1.5064250077454934e-07,
4158
+ "loss": 1.0283,
4159
+ "step": 3240
4160
+ },
4161
+ {
4162
+ "epoch": 2.84,
4163
+ "learning_rate": 1.4254247288594813e-07,
4164
+ "loss": 0.9987,
4165
+ "step": 3245
4166
+ },
4167
+ {
4168
+ "epoch": 2.85,
4169
+ "learning_rate": 1.3466471090366473e-07,
4170
+ "loss": 1.0331,
4171
+ "step": 3250
4172
+ },
4173
+ {
4174
+ "epoch": 2.85,
4175
+ "learning_rate": 1.270093924554383e-07,
4176
+ "loss": 1.0144,
4177
+ "step": 3255
4178
+ },
4179
+ {
4180
+ "epoch": 2.86,
4181
+ "learning_rate": 1.195766901533546e-07,
4182
+ "loss": 1.0239,
4183
+ "step": 3260
4184
+ },
4185
+ {
4186
+ "epoch": 2.86,
4187
+ "learning_rate": 1.1236677158994902e-07,
4188
+ "loss": 1.0065,
4189
+ "step": 3265
4190
+ },
4191
+ {
4192
+ "epoch": 2.86,
4193
+ "learning_rate": 1.0537979933443188e-07,
4194
+ "loss": 1.0079,
4195
+ "step": 3270
4196
+ },
4197
+ {
4198
+ "epoch": 2.87,
4199
+ "learning_rate": 9.861593092902244e-08,
4200
+ "loss": 1.0166,
4201
+ "step": 3275
4202
+ },
4203
+ {
4204
+ "epoch": 2.87,
4205
+ "learning_rate": 9.20753188853929e-08,
4206
+ "loss": 1.0202,
4207
+ "step": 3280
4208
+ },
4209
+ {
4210
+ "epoch": 2.88,
4211
+ "learning_rate": 8.57581106812333e-08,
4212
+ "loss": 1.0039,
4213
+ "step": 3285
4214
+ },
4215
+ {
4216
+ "epoch": 2.88,
4217
+ "learning_rate": 7.966444875692536e-08,
4218
+ "loss": 1.0087,
4219
+ "step": 3290
4220
+ },
4221
+ {
4222
+ "epoch": 2.89,
4223
+ "learning_rate": 7.379447051232836e-08,
4224
+ "loss": 1.022,
4225
+ "step": 3295
4226
+ },
4227
+ {
4228
+ "epoch": 2.89,
4229
+ "learning_rate": 6.814830830368491e-08,
4230
+ "loss": 1.0094,
4231
+ "step": 3300
4232
+ },
4233
+ {
4234
+ "epoch": 2.89,
4235
+ "eval_loss": 1.1193827390670776,
4236
+ "eval_runtime": 425.5309,
4237
+ "eval_samples_per_second": 37.99,
4238
+ "eval_steps_per_second": 1.189,
4239
+ "step": 3300
4240
+ },
4241
+ {
4242
+ "epoch": 2.9,
4243
+ "learning_rate": 6.272608944063451e-08,
4244
+ "loss": 0.9974,
4245
+ "step": 3305
4246
+ },
4247
+ {
4248
+ "epoch": 2.9,
4249
+ "learning_rate": 5.7527936183342515e-08,
4250
+ "loss": 0.9925,
4251
+ "step": 3310
4252
+ },
4253
+ {
4254
+ "epoch": 2.9,
4255
+ "learning_rate": 5.255396573974447e-08,
4256
+ "loss": 1.0052,
4257
+ "step": 3315
4258
+ },
4259
+ {
4260
+ "epoch": 2.91,
4261
+ "learning_rate": 4.7804290262903895e-08,
4262
+ "loss": 1.0154,
4263
+ "step": 3320
4264
+ },
4265
+ {
4266
+ "epoch": 2.91,
4267
+ "learning_rate": 4.3279016848478685e-08,
4268
+ "loss": 1.0163,
4269
+ "step": 3325
4270
+ },
4271
+ {
4272
+ "epoch": 2.92,
4273
+ "learning_rate": 3.897824753231416e-08,
4274
+ "loss": 1.0158,
4275
+ "step": 3330
4276
+ },
4277
+ {
4278
+ "epoch": 2.92,
4279
+ "learning_rate": 3.490207928813605e-08,
4280
+ "loss": 1.0208,
4281
+ "step": 3335
4282
+ },
4283
+ {
4284
+ "epoch": 2.93,
4285
+ "learning_rate": 3.105060402536886e-08,
4286
+ "loss": 1.0156,
4287
+ "step": 3340
4288
+ },
4289
+ {
4290
+ "epoch": 2.93,
4291
+ "learning_rate": 2.7423908587056458e-08,
4292
+ "loss": 1.0159,
4293
+ "step": 3345
4294
+ },
4295
+ {
4296
+ "epoch": 2.93,
4297
+ "learning_rate": 2.4022074747912515e-08,
4298
+ "loss": 1.002,
4299
+ "step": 3350
4300
+ },
4301
+ {
4302
+ "epoch": 2.94,
4303
+ "learning_rate": 2.0845179212468646e-08,
4304
+ "loss": 1.017,
4305
+ "step": 3355
4306
+ },
4307
+ {
4308
+ "epoch": 2.94,
4309
+ "learning_rate": 1.789329361335246e-08,
4310
+ "loss": 1.0269,
4311
+ "step": 3360
4312
+ },
4313
+ {
4314
+ "epoch": 2.95,
4315
+ "learning_rate": 1.5166484509665512e-08,
4316
+ "loss": 1.0201,
4317
+ "step": 3365
4318
+ },
4319
+ {
4320
+ "epoch": 2.95,
4321
+ "learning_rate": 1.2664813385486752e-08,
4322
+ "loss": 1.0123,
4323
+ "step": 3370
4324
+ },
4325
+ {
4326
+ "epoch": 2.96,
4327
+ "learning_rate": 1.038833664848582e-08,
4328
+ "loss": 1.0125,
4329
+ "step": 3375
4330
+ },
4331
+ {
4332
+ "epoch": 2.96,
4333
+ "learning_rate": 8.337105628648535e-09,
4334
+ "loss": 1.018,
4335
+ "step": 3380
4336
+ },
4337
+ {
4338
+ "epoch": 2.97,
4339
+ "learning_rate": 6.511166577123362e-09,
4340
+ "loss": 1.0043,
4341
+ "step": 3385
4342
+ },
4343
+ {
4344
+ "epoch": 2.97,
4345
+ "learning_rate": 4.91056066517448e-09,
4346
+ "loss": 1.0165,
4347
+ "step": 3390
4348
+ },
4349
+ {
4350
+ "epoch": 2.97,
4351
+ "learning_rate": 3.5353239832580656e-09,
4352
+ "loss": 0.9968,
4353
+ "step": 3395
4354
+ },
4355
+ {
4356
+ "epoch": 2.98,
4357
+ "learning_rate": 2.3854875402029575e-09,
4358
+ "loss": 1.0284,
4359
+ "step": 3400
4360
+ },
4361
+ {
4362
+ "epoch": 2.98,
4363
+ "eval_loss": 1.119370937347412,
4364
+ "eval_runtime": 426.6328,
4365
+ "eval_samples_per_second": 37.892,
4366
+ "eval_steps_per_second": 1.186,
4367
+ "step": 3400
4368
+ },
4369
+ {
4370
+ "epoch": 2.98,
4371
+ "learning_rate": 1.4610772625156532e-09,
4372
+ "loss": 1.0189,
4373
+ "step": 3405
4374
+ },
4375
+ {
4376
+ "epoch": 2.99,
4377
+ "learning_rate": 7.621139937963317e-10,
4378
+ "loss": 1.0086,
4379
+ "step": 3410
4380
+ },
4381
+ {
4382
+ "epoch": 2.99,
4383
+ "learning_rate": 2.886134942647889e-10,
4384
+ "loss": 1.0118,
4385
+ "step": 3415
4386
+ },
4387
+ {
4388
+ "epoch": 3.0,
4389
+ "learning_rate": 4.0586440407386354e-11,
4390
+ "loss": 1.0181,
4391
+ "step": 3420
4392
+ },
4393
+ {
4394
+ "epoch": 3.0,
4395
+ "step": 3423,
4396
+ "total_flos": 3.426180505951378e+19,
4397
+ "train_loss": 1.1024342694896863,
4398
+ "train_runtime": 51497.7783,
4399
+ "train_samples_per_second": 8.508,
4400
+ "train_steps_per_second": 0.066
4401
+ }
4402
+ ],
4403
+ "logging_steps": 5,
4404
+ "max_steps": 3423,
4405
+ "num_input_tokens_seen": 0,
4406
+ "num_train_epochs": 3,
4407
+ "save_steps": 100,
4408
+ "total_flos": 3.426180505951378e+19,
4409
+ "train_batch_size": 16,
4410
+ "trial_name": null,
4411
+ "trial_params": null
4412
+ }