SKNahin commited on
Commit
abffb87
·
verified ·
1 Parent(s): 1282d3c

Training in progress, step 400

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "google/gemma-2-2b",
3
+ "architectures": [
4
+ "Gemma2ForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "attn_logit_softcapping": 50.0,
9
+ "bos_token_id": 2,
10
+ "cache_implementation": "hybrid",
11
+ "eos_token_id": 1,
12
+ "final_logit_softcapping": 30.0,
13
+ "head_dim": 256,
14
+ "hidden_act": "gelu_pytorch_tanh",
15
+ "hidden_activation": "gelu_pytorch_tanh",
16
+ "hidden_size": 2304,
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 9216,
19
+ "max_position_embeddings": 8192,
20
+ "model_type": "gemma2",
21
+ "num_attention_heads": 8,
22
+ "num_hidden_layers": 26,
23
+ "num_key_value_heads": 4,
24
+ "pad_token_id": 0,
25
+ "query_pre_attn_scalar": 256,
26
+ "rms_norm_eps": 1e-06,
27
+ "rope_theta": 10000.0,
28
+ "sliding_window": 4096,
29
+ "torch_dtype": "bfloat16",
30
+ "transformers_version": "4.44.2",
31
+ "use_cache": false,
32
+ "vocab_size": 256000
33
+ }
model-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a025cf8e388d5133fd8a48471b771c33e13a5418b14b15409d7136a7d313a65
3
+ size 4988025760
model-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dcb68596bff6d898bb4354681a1667915cc3293b7d9156022bff46e181d0d2c2
3
+ size 240691728
model.safetensors.index.json ADDED
@@ -0,0 +1,295 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 5228683776
4
+ },
5
+ "weight_map": {
6
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
12
+ "model.layers.0.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
13
+ "model.layers.0.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
16
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
17
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
18
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
19
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
20
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
21
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
22
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
23
+ "model.layers.1.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
24
+ "model.layers.1.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
25
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
26
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
27
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
28
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
30
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
31
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
32
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
34
+ "model.layers.10.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
35
+ "model.layers.10.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
36
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
37
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
39
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
40
+ "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
41
+ "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
42
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
43
+ "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
44
+ "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
45
+ "model.layers.11.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
46
+ "model.layers.11.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
47
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
48
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
49
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
50
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
51
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
52
+ "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
53
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
54
+ "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
56
+ "model.layers.12.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
57
+ "model.layers.12.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
58
+ "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
59
+ "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
60
+ "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
61
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
62
+ "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
63
+ "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
64
+ "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
65
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
66
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
67
+ "model.layers.13.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.13.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
72
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
73
+ "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
76
+ "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
78
+ "model.layers.14.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.14.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
80
+ "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
81
+ "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
83
+ "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
84
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
85
+ "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
88
+ "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.15.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
90
+ "model.layers.15.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
97
+ "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
98
+ "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
99
+ "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
100
+ "model.layers.16.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.16.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
102
+ "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
103
+ "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
107
+ "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
108
+ "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
109
+ "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
110
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
111
+ "model.layers.17.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
112
+ "model.layers.17.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
113
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
114
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
115
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
116
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
117
+ "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
118
+ "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
119
+ "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
120
+ "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
121
+ "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
122
+ "model.layers.18.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
123
+ "model.layers.18.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
124
+ "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
125
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
126
+ "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
127
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
128
+ "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
129
+ "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
130
+ "model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
131
+ "model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
132
+ "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
133
+ "model.layers.19.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
134
+ "model.layers.19.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
135
+ "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
136
+ "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
137
+ "model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
138
+ "model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
139
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
140
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
141
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
142
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
143
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
144
+ "model.layers.2.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
145
+ "model.layers.2.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
146
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
147
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
148
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
149
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
150
+ "model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
151
+ "model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
152
+ "model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
153
+ "model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
154
+ "model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
155
+ "model.layers.20.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
156
+ "model.layers.20.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
157
+ "model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
158
+ "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
159
+ "model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
160
+ "model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
161
+ "model.layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
162
+ "model.layers.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
163
+ "model.layers.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
164
+ "model.layers.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
165
+ "model.layers.21.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
166
+ "model.layers.21.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
167
+ "model.layers.21.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
168
+ "model.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
169
+ "model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
170
+ "model.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
171
+ "model.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
172
+ "model.layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
173
+ "model.layers.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
174
+ "model.layers.22.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
175
+ "model.layers.22.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
176
+ "model.layers.22.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
177
+ "model.layers.22.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
178
+ "model.layers.22.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
179
+ "model.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
180
+ "model.layers.22.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
181
+ "model.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
182
+ "model.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
183
+ "model.layers.23.input_layernorm.weight": "model-00001-of-00002.safetensors",
184
+ "model.layers.23.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
185
+ "model.layers.23.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
186
+ "model.layers.23.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
187
+ "model.layers.23.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
188
+ "model.layers.23.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
189
+ "model.layers.23.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
190
+ "model.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
191
+ "model.layers.23.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
192
+ "model.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
193
+ "model.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
194
+ "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
195
+ "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
196
+ "model.layers.24.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
197
+ "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
198
+ "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
199
+ "model.layers.24.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors",
200
+ "model.layers.24.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors",
201
+ "model.layers.24.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
202
+ "model.layers.24.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
203
+ "model.layers.24.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
204
+ "model.layers.24.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
205
+ "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
206
+ "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
207
+ "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
208
+ "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
209
+ "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
210
+ "model.layers.25.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors",
211
+ "model.layers.25.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors",
212
+ "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
213
+ "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
214
+ "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
215
+ "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
216
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
217
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
218
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
219
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
220
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
221
+ "model.layers.3.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
222
+ "model.layers.3.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
223
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
224
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
225
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
226
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
227
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
228
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
229
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
230
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
231
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
232
+ "model.layers.4.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
233
+ "model.layers.4.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
234
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
235
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
236
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
237
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
238
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
239
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
240
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
241
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
242
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
243
+ "model.layers.5.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
244
+ "model.layers.5.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
245
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
246
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
247
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
248
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
249
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
250
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
251
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
252
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
253
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
254
+ "model.layers.6.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
255
+ "model.layers.6.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
256
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
257
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
258
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
259
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
260
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
261
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
262
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
263
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
264
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
265
+ "model.layers.7.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
266
+ "model.layers.7.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
267
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
268
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
269
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
270
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
271
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
272
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
273
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
274
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
275
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
276
+ "model.layers.8.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
277
+ "model.layers.8.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
278
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
279
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
280
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
281
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
282
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
283
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
284
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
285
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
286
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
287
+ "model.layers.9.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
288
+ "model.layers.9.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors",
289
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
290
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
291
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
292
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
293
+ "model.norm.weight": "model-00002-of-00002.safetensors"
294
+ }
295
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<start_of_turn>",
4
+ "<end_of_turn>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<bos>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<eos>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<pad>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<unk>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f289bc05132635a8bc7aca7aa21255efd5e18f3710f43e3cdb96bcd41be4922
3
+ size 17525357
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61a7b147390c64585d6c3543dd6fc636906c9af3865a5548f27f31aee1d4c8e2
3
+ size 4241003
tokenizer_config.json ADDED
@@ -0,0 +1,2015 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<pad>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<eos>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "<bos>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "3": {
30
+ "content": "<unk>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "4": {
38
+ "content": "<mask>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": false
44
+ },
45
+ "5": {
46
+ "content": "<2mass>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": false
52
+ },
53
+ "6": {
54
+ "content": "[@BOS@]",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": false
60
+ },
61
+ "7": {
62
+ "content": "<unused0>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": false
68
+ },
69
+ "8": {
70
+ "content": "<unused1>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": false
76
+ },
77
+ "9": {
78
+ "content": "<unused2>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": false
84
+ },
85
+ "10": {
86
+ "content": "<unused3>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": false
92
+ },
93
+ "11": {
94
+ "content": "<unused4>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": false
100
+ },
101
+ "12": {
102
+ "content": "<unused5>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": false
108
+ },
109
+ "13": {
110
+ "content": "<unused6>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": false
116
+ },
117
+ "14": {
118
+ "content": "<unused7>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "15": {
126
+ "content": "<unused8>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "16": {
134
+ "content": "<unused9>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "17": {
142
+ "content": "<unused10>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "18": {
150
+ "content": "<unused11>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "19": {
158
+ "content": "<unused12>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "20": {
166
+ "content": "<unused13>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "21": {
174
+ "content": "<unused14>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "22": {
182
+ "content": "<unused15>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": false
188
+ },
189
+ "23": {
190
+ "content": "<unused16>",
191
+ "lstrip": false,
192
+ "normalized": false,
193
+ "rstrip": false,
194
+ "single_word": false,
195
+ "special": false
196
+ },
197
+ "24": {
198
+ "content": "<unused17>",
199
+ "lstrip": false,
200
+ "normalized": false,
201
+ "rstrip": false,
202
+ "single_word": false,
203
+ "special": false
204
+ },
205
+ "25": {
206
+ "content": "<unused18>",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": false,
211
+ "special": false
212
+ },
213
+ "26": {
214
+ "content": "<unused19>",
215
+ "lstrip": false,
216
+ "normalized": false,
217
+ "rstrip": false,
218
+ "single_word": false,
219
+ "special": false
220
+ },
221
+ "27": {
222
+ "content": "<unused20>",
223
+ "lstrip": false,
224
+ "normalized": false,
225
+ "rstrip": false,
226
+ "single_word": false,
227
+ "special": false
228
+ },
229
+ "28": {
230
+ "content": "<unused21>",
231
+ "lstrip": false,
232
+ "normalized": false,
233
+ "rstrip": false,
234
+ "single_word": false,
235
+ "special": false
236
+ },
237
+ "29": {
238
+ "content": "<unused22>",
239
+ "lstrip": false,
240
+ "normalized": false,
241
+ "rstrip": false,
242
+ "single_word": false,
243
+ "special": false
244
+ },
245
+ "30": {
246
+ "content": "<unused23>",
247
+ "lstrip": false,
248
+ "normalized": false,
249
+ "rstrip": false,
250
+ "single_word": false,
251
+ "special": false
252
+ },
253
+ "31": {
254
+ "content": "<unused24>",
255
+ "lstrip": false,
256
+ "normalized": false,
257
+ "rstrip": false,
258
+ "single_word": false,
259
+ "special": false
260
+ },
261
+ "32": {
262
+ "content": "<unused25>",
263
+ "lstrip": false,
264
+ "normalized": false,
265
+ "rstrip": false,
266
+ "single_word": false,
267
+ "special": false
268
+ },
269
+ "33": {
270
+ "content": "<unused26>",
271
+ "lstrip": false,
272
+ "normalized": false,
273
+ "rstrip": false,
274
+ "single_word": false,
275
+ "special": false
276
+ },
277
+ "34": {
278
+ "content": "<unused27>",
279
+ "lstrip": false,
280
+ "normalized": false,
281
+ "rstrip": false,
282
+ "single_word": false,
283
+ "special": false
284
+ },
285
+ "35": {
286
+ "content": "<unused28>",
287
+ "lstrip": false,
288
+ "normalized": false,
289
+ "rstrip": false,
290
+ "single_word": false,
291
+ "special": false
292
+ },
293
+ "36": {
294
+ "content": "<unused29>",
295
+ "lstrip": false,
296
+ "normalized": false,
297
+ "rstrip": false,
298
+ "single_word": false,
299
+ "special": false
300
+ },
301
+ "37": {
302
+ "content": "<unused30>",
303
+ "lstrip": false,
304
+ "normalized": false,
305
+ "rstrip": false,
306
+ "single_word": false,
307
+ "special": false
308
+ },
309
+ "38": {
310
+ "content": "<unused31>",
311
+ "lstrip": false,
312
+ "normalized": false,
313
+ "rstrip": false,
314
+ "single_word": false,
315
+ "special": false
316
+ },
317
+ "39": {
318
+ "content": "<unused32>",
319
+ "lstrip": false,
320
+ "normalized": false,
321
+ "rstrip": false,
322
+ "single_word": false,
323
+ "special": false
324
+ },
325
+ "40": {
326
+ "content": "<unused33>",
327
+ "lstrip": false,
328
+ "normalized": false,
329
+ "rstrip": false,
330
+ "single_word": false,
331
+ "special": false
332
+ },
333
+ "41": {
334
+ "content": "<unused34>",
335
+ "lstrip": false,
336
+ "normalized": false,
337
+ "rstrip": false,
338
+ "single_word": false,
339
+ "special": false
340
+ },
341
+ "42": {
342
+ "content": "<unused35>",
343
+ "lstrip": false,
344
+ "normalized": false,
345
+ "rstrip": false,
346
+ "single_word": false,
347
+ "special": false
348
+ },
349
+ "43": {
350
+ "content": "<unused36>",
351
+ "lstrip": false,
352
+ "normalized": false,
353
+ "rstrip": false,
354
+ "single_word": false,
355
+ "special": false
356
+ },
357
+ "44": {
358
+ "content": "<unused37>",
359
+ "lstrip": false,
360
+ "normalized": false,
361
+ "rstrip": false,
362
+ "single_word": false,
363
+ "special": false
364
+ },
365
+ "45": {
366
+ "content": "<unused38>",
367
+ "lstrip": false,
368
+ "normalized": false,
369
+ "rstrip": false,
370
+ "single_word": false,
371
+ "special": false
372
+ },
373
+ "46": {
374
+ "content": "<unused39>",
375
+ "lstrip": false,
376
+ "normalized": false,
377
+ "rstrip": false,
378
+ "single_word": false,
379
+ "special": false
380
+ },
381
+ "47": {
382
+ "content": "<unused40>",
383
+ "lstrip": false,
384
+ "normalized": false,
385
+ "rstrip": false,
386
+ "single_word": false,
387
+ "special": false
388
+ },
389
+ "48": {
390
+ "content": "<unused41>",
391
+ "lstrip": false,
392
+ "normalized": false,
393
+ "rstrip": false,
394
+ "single_word": false,
395
+ "special": false
396
+ },
397
+ "49": {
398
+ "content": "<unused42>",
399
+ "lstrip": false,
400
+ "normalized": false,
401
+ "rstrip": false,
402
+ "single_word": false,
403
+ "special": false
404
+ },
405
+ "50": {
406
+ "content": "<unused43>",
407
+ "lstrip": false,
408
+ "normalized": false,
409
+ "rstrip": false,
410
+ "single_word": false,
411
+ "special": false
412
+ },
413
+ "51": {
414
+ "content": "<unused44>",
415
+ "lstrip": false,
416
+ "normalized": false,
417
+ "rstrip": false,
418
+ "single_word": false,
419
+ "special": false
420
+ },
421
+ "52": {
422
+ "content": "<unused45>",
423
+ "lstrip": false,
424
+ "normalized": false,
425
+ "rstrip": false,
426
+ "single_word": false,
427
+ "special": false
428
+ },
429
+ "53": {
430
+ "content": "<unused46>",
431
+ "lstrip": false,
432
+ "normalized": false,
433
+ "rstrip": false,
434
+ "single_word": false,
435
+ "special": false
436
+ },
437
+ "54": {
438
+ "content": "<unused47>",
439
+ "lstrip": false,
440
+ "normalized": false,
441
+ "rstrip": false,
442
+ "single_word": false,
443
+ "special": false
444
+ },
445
+ "55": {
446
+ "content": "<unused48>",
447
+ "lstrip": false,
448
+ "normalized": false,
449
+ "rstrip": false,
450
+ "single_word": false,
451
+ "special": false
452
+ },
453
+ "56": {
454
+ "content": "<unused49>",
455
+ "lstrip": false,
456
+ "normalized": false,
457
+ "rstrip": false,
458
+ "single_word": false,
459
+ "special": false
460
+ },
461
+ "57": {
462
+ "content": "<unused50>",
463
+ "lstrip": false,
464
+ "normalized": false,
465
+ "rstrip": false,
466
+ "single_word": false,
467
+ "special": false
468
+ },
469
+ "58": {
470
+ "content": "<unused51>",
471
+ "lstrip": false,
472
+ "normalized": false,
473
+ "rstrip": false,
474
+ "single_word": false,
475
+ "special": false
476
+ },
477
+ "59": {
478
+ "content": "<unused52>",
479
+ "lstrip": false,
480
+ "normalized": false,
481
+ "rstrip": false,
482
+ "single_word": false,
483
+ "special": false
484
+ },
485
+ "60": {
486
+ "content": "<unused53>",
487
+ "lstrip": false,
488
+ "normalized": false,
489
+ "rstrip": false,
490
+ "single_word": false,
491
+ "special": false
492
+ },
493
+ "61": {
494
+ "content": "<unused54>",
495
+ "lstrip": false,
496
+ "normalized": false,
497
+ "rstrip": false,
498
+ "single_word": false,
499
+ "special": false
500
+ },
501
+ "62": {
502
+ "content": "<unused55>",
503
+ "lstrip": false,
504
+ "normalized": false,
505
+ "rstrip": false,
506
+ "single_word": false,
507
+ "special": false
508
+ },
509
+ "63": {
510
+ "content": "<unused56>",
511
+ "lstrip": false,
512
+ "normalized": false,
513
+ "rstrip": false,
514
+ "single_word": false,
515
+ "special": false
516
+ },
517
+ "64": {
518
+ "content": "<unused57>",
519
+ "lstrip": false,
520
+ "normalized": false,
521
+ "rstrip": false,
522
+ "single_word": false,
523
+ "special": false
524
+ },
525
+ "65": {
526
+ "content": "<unused58>",
527
+ "lstrip": false,
528
+ "normalized": false,
529
+ "rstrip": false,
530
+ "single_word": false,
531
+ "special": false
532
+ },
533
+ "66": {
534
+ "content": "<unused59>",
535
+ "lstrip": false,
536
+ "normalized": false,
537
+ "rstrip": false,
538
+ "single_word": false,
539
+ "special": false
540
+ },
541
+ "67": {
542
+ "content": "<unused60>",
543
+ "lstrip": false,
544
+ "normalized": false,
545
+ "rstrip": false,
546
+ "single_word": false,
547
+ "special": false
548
+ },
549
+ "68": {
550
+ "content": "<unused61>",
551
+ "lstrip": false,
552
+ "normalized": false,
553
+ "rstrip": false,
554
+ "single_word": false,
555
+ "special": false
556
+ },
557
+ "69": {
558
+ "content": "<unused62>",
559
+ "lstrip": false,
560
+ "normalized": false,
561
+ "rstrip": false,
562
+ "single_word": false,
563
+ "special": false
564
+ },
565
+ "70": {
566
+ "content": "<unused63>",
567
+ "lstrip": false,
568
+ "normalized": false,
569
+ "rstrip": false,
570
+ "single_word": false,
571
+ "special": false
572
+ },
573
+ "71": {
574
+ "content": "<unused64>",
575
+ "lstrip": false,
576
+ "normalized": false,
577
+ "rstrip": false,
578
+ "single_word": false,
579
+ "special": false
580
+ },
581
+ "72": {
582
+ "content": "<unused65>",
583
+ "lstrip": false,
584
+ "normalized": false,
585
+ "rstrip": false,
586
+ "single_word": false,
587
+ "special": false
588
+ },
589
+ "73": {
590
+ "content": "<unused66>",
591
+ "lstrip": false,
592
+ "normalized": false,
593
+ "rstrip": false,
594
+ "single_word": false,
595
+ "special": false
596
+ },
597
+ "74": {
598
+ "content": "<unused67>",
599
+ "lstrip": false,
600
+ "normalized": false,
601
+ "rstrip": false,
602
+ "single_word": false,
603
+ "special": false
604
+ },
605
+ "75": {
606
+ "content": "<unused68>",
607
+ "lstrip": false,
608
+ "normalized": false,
609
+ "rstrip": false,
610
+ "single_word": false,
611
+ "special": false
612
+ },
613
+ "76": {
614
+ "content": "<unused69>",
615
+ "lstrip": false,
616
+ "normalized": false,
617
+ "rstrip": false,
618
+ "single_word": false,
619
+ "special": false
620
+ },
621
+ "77": {
622
+ "content": "<unused70>",
623
+ "lstrip": false,
624
+ "normalized": false,
625
+ "rstrip": false,
626
+ "single_word": false,
627
+ "special": false
628
+ },
629
+ "78": {
630
+ "content": "<unused71>",
631
+ "lstrip": false,
632
+ "normalized": false,
633
+ "rstrip": false,
634
+ "single_word": false,
635
+ "special": false
636
+ },
637
+ "79": {
638
+ "content": "<unused72>",
639
+ "lstrip": false,
640
+ "normalized": false,
641
+ "rstrip": false,
642
+ "single_word": false,
643
+ "special": false
644
+ },
645
+ "80": {
646
+ "content": "<unused73>",
647
+ "lstrip": false,
648
+ "normalized": false,
649
+ "rstrip": false,
650
+ "single_word": false,
651
+ "special": false
652
+ },
653
+ "81": {
654
+ "content": "<unused74>",
655
+ "lstrip": false,
656
+ "normalized": false,
657
+ "rstrip": false,
658
+ "single_word": false,
659
+ "special": false
660
+ },
661
+ "82": {
662
+ "content": "<unused75>",
663
+ "lstrip": false,
664
+ "normalized": false,
665
+ "rstrip": false,
666
+ "single_word": false,
667
+ "special": false
668
+ },
669
+ "83": {
670
+ "content": "<unused76>",
671
+ "lstrip": false,
672
+ "normalized": false,
673
+ "rstrip": false,
674
+ "single_word": false,
675
+ "special": false
676
+ },
677
+ "84": {
678
+ "content": "<unused77>",
679
+ "lstrip": false,
680
+ "normalized": false,
681
+ "rstrip": false,
682
+ "single_word": false,
683
+ "special": false
684
+ },
685
+ "85": {
686
+ "content": "<unused78>",
687
+ "lstrip": false,
688
+ "normalized": false,
689
+ "rstrip": false,
690
+ "single_word": false,
691
+ "special": false
692
+ },
693
+ "86": {
694
+ "content": "<unused79>",
695
+ "lstrip": false,
696
+ "normalized": false,
697
+ "rstrip": false,
698
+ "single_word": false,
699
+ "special": false
700
+ },
701
+ "87": {
702
+ "content": "<unused80>",
703
+ "lstrip": false,
704
+ "normalized": false,
705
+ "rstrip": false,
706
+ "single_word": false,
707
+ "special": false
708
+ },
709
+ "88": {
710
+ "content": "<unused81>",
711
+ "lstrip": false,
712
+ "normalized": false,
713
+ "rstrip": false,
714
+ "single_word": false,
715
+ "special": false
716
+ },
717
+ "89": {
718
+ "content": "<unused82>",
719
+ "lstrip": false,
720
+ "normalized": false,
721
+ "rstrip": false,
722
+ "single_word": false,
723
+ "special": false
724
+ },
725
+ "90": {
726
+ "content": "<unused83>",
727
+ "lstrip": false,
728
+ "normalized": false,
729
+ "rstrip": false,
730
+ "single_word": false,
731
+ "special": false
732
+ },
733
+ "91": {
734
+ "content": "<unused84>",
735
+ "lstrip": false,
736
+ "normalized": false,
737
+ "rstrip": false,
738
+ "single_word": false,
739
+ "special": false
740
+ },
741
+ "92": {
742
+ "content": "<unused85>",
743
+ "lstrip": false,
744
+ "normalized": false,
745
+ "rstrip": false,
746
+ "single_word": false,
747
+ "special": false
748
+ },
749
+ "93": {
750
+ "content": "<unused86>",
751
+ "lstrip": false,
752
+ "normalized": false,
753
+ "rstrip": false,
754
+ "single_word": false,
755
+ "special": false
756
+ },
757
+ "94": {
758
+ "content": "<unused87>",
759
+ "lstrip": false,
760
+ "normalized": false,
761
+ "rstrip": false,
762
+ "single_word": false,
763
+ "special": false
764
+ },
765
+ "95": {
766
+ "content": "<unused88>",
767
+ "lstrip": false,
768
+ "normalized": false,
769
+ "rstrip": false,
770
+ "single_word": false,
771
+ "special": false
772
+ },
773
+ "96": {
774
+ "content": "<unused89>",
775
+ "lstrip": false,
776
+ "normalized": false,
777
+ "rstrip": false,
778
+ "single_word": false,
779
+ "special": false
780
+ },
781
+ "97": {
782
+ "content": "<unused90>",
783
+ "lstrip": false,
784
+ "normalized": false,
785
+ "rstrip": false,
786
+ "single_word": false,
787
+ "special": false
788
+ },
789
+ "98": {
790
+ "content": "<unused91>",
791
+ "lstrip": false,
792
+ "normalized": false,
793
+ "rstrip": false,
794
+ "single_word": false,
795
+ "special": false
796
+ },
797
+ "99": {
798
+ "content": "<unused92>",
799
+ "lstrip": false,
800
+ "normalized": false,
801
+ "rstrip": false,
802
+ "single_word": false,
803
+ "special": false
804
+ },
805
+ "100": {
806
+ "content": "<unused93>",
807
+ "lstrip": false,
808
+ "normalized": false,
809
+ "rstrip": false,
810
+ "single_word": false,
811
+ "special": false
812
+ },
813
+ "101": {
814
+ "content": "<unused94>",
815
+ "lstrip": false,
816
+ "normalized": false,
817
+ "rstrip": false,
818
+ "single_word": false,
819
+ "special": false
820
+ },
821
+ "102": {
822
+ "content": "<unused95>",
823
+ "lstrip": false,
824
+ "normalized": false,
825
+ "rstrip": false,
826
+ "single_word": false,
827
+ "special": false
828
+ },
829
+ "103": {
830
+ "content": "<unused96>",
831
+ "lstrip": false,
832
+ "normalized": false,
833
+ "rstrip": false,
834
+ "single_word": false,
835
+ "special": false
836
+ },
837
+ "104": {
838
+ "content": "<unused97>",
839
+ "lstrip": false,
840
+ "normalized": false,
841
+ "rstrip": false,
842
+ "single_word": false,
843
+ "special": false
844
+ },
845
+ "105": {
846
+ "content": "<unused98>",
847
+ "lstrip": false,
848
+ "normalized": false,
849
+ "rstrip": false,
850
+ "single_word": false,
851
+ "special": false
852
+ },
853
+ "106": {
854
+ "content": "<start_of_turn>",
855
+ "lstrip": false,
856
+ "normalized": false,
857
+ "rstrip": false,
858
+ "single_word": false,
859
+ "special": true
860
+ },
861
+ "107": {
862
+ "content": "<end_of_turn>",
863
+ "lstrip": false,
864
+ "normalized": false,
865
+ "rstrip": false,
866
+ "single_word": false,
867
+ "special": true
868
+ },
869
+ "108": {
870
+ "content": "\n",
871
+ "lstrip": false,
872
+ "normalized": false,
873
+ "rstrip": false,
874
+ "single_word": false,
875
+ "special": false
876
+ },
877
+ "109": {
878
+ "content": "\n\n",
879
+ "lstrip": false,
880
+ "normalized": false,
881
+ "rstrip": false,
882
+ "single_word": false,
883
+ "special": false
884
+ },
885
+ "110": {
886
+ "content": "\n\n\n",
887
+ "lstrip": false,
888
+ "normalized": false,
889
+ "rstrip": false,
890
+ "single_word": false,
891
+ "special": false
892
+ },
893
+ "111": {
894
+ "content": "\n\n\n\n",
895
+ "lstrip": false,
896
+ "normalized": false,
897
+ "rstrip": false,
898
+ "single_word": false,
899
+ "special": false
900
+ },
901
+ "112": {
902
+ "content": "\n\n\n\n\n",
903
+ "lstrip": false,
904
+ "normalized": false,
905
+ "rstrip": false,
906
+ "single_word": false,
907
+ "special": false
908
+ },
909
+ "113": {
910
+ "content": "\n\n\n\n\n\n",
911
+ "lstrip": false,
912
+ "normalized": false,
913
+ "rstrip": false,
914
+ "single_word": false,
915
+ "special": false
916
+ },
917
+ "114": {
918
+ "content": "\n\n\n\n\n\n\n",
919
+ "lstrip": false,
920
+ "normalized": false,
921
+ "rstrip": false,
922
+ "single_word": false,
923
+ "special": false
924
+ },
925
+ "115": {
926
+ "content": "\n\n\n\n\n\n\n\n",
927
+ "lstrip": false,
928
+ "normalized": false,
929
+ "rstrip": false,
930
+ "single_word": false,
931
+ "special": false
932
+ },
933
+ "116": {
934
+ "content": "\n\n\n\n\n\n\n\n\n",
935
+ "lstrip": false,
936
+ "normalized": false,
937
+ "rstrip": false,
938
+ "single_word": false,
939
+ "special": false
940
+ },
941
+ "117": {
942
+ "content": "\n\n\n\n\n\n\n\n\n\n",
943
+ "lstrip": false,
944
+ "normalized": false,
945
+ "rstrip": false,
946
+ "single_word": false,
947
+ "special": false
948
+ },
949
+ "118": {
950
+ "content": "\n\n\n\n\n\n\n\n\n\n\n",
951
+ "lstrip": false,
952
+ "normalized": false,
953
+ "rstrip": false,
954
+ "single_word": false,
955
+ "special": false
956
+ },
957
+ "119": {
958
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n",
959
+ "lstrip": false,
960
+ "normalized": false,
961
+ "rstrip": false,
962
+ "single_word": false,
963
+ "special": false
964
+ },
965
+ "120": {
966
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n",
967
+ "lstrip": false,
968
+ "normalized": false,
969
+ "rstrip": false,
970
+ "single_word": false,
971
+ "special": false
972
+ },
973
+ "121": {
974
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
975
+ "lstrip": false,
976
+ "normalized": false,
977
+ "rstrip": false,
978
+ "single_word": false,
979
+ "special": false
980
+ },
981
+ "122": {
982
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
983
+ "lstrip": false,
984
+ "normalized": false,
985
+ "rstrip": false,
986
+ "single_word": false,
987
+ "special": false
988
+ },
989
+ "123": {
990
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
991
+ "lstrip": false,
992
+ "normalized": false,
993
+ "rstrip": false,
994
+ "single_word": false,
995
+ "special": false
996
+ },
997
+ "124": {
998
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
999
+ "lstrip": false,
1000
+ "normalized": false,
1001
+ "rstrip": false,
1002
+ "single_word": false,
1003
+ "special": false
1004
+ },
1005
+ "125": {
1006
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1007
+ "lstrip": false,
1008
+ "normalized": false,
1009
+ "rstrip": false,
1010
+ "single_word": false,
1011
+ "special": false
1012
+ },
1013
+ "126": {
1014
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1015
+ "lstrip": false,
1016
+ "normalized": false,
1017
+ "rstrip": false,
1018
+ "single_word": false,
1019
+ "special": false
1020
+ },
1021
+ "127": {
1022
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1023
+ "lstrip": false,
1024
+ "normalized": false,
1025
+ "rstrip": false,
1026
+ "single_word": false,
1027
+ "special": false
1028
+ },
1029
+ "128": {
1030
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1031
+ "lstrip": false,
1032
+ "normalized": false,
1033
+ "rstrip": false,
1034
+ "single_word": false,
1035
+ "special": false
1036
+ },
1037
+ "129": {
1038
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1039
+ "lstrip": false,
1040
+ "normalized": false,
1041
+ "rstrip": false,
1042
+ "single_word": false,
1043
+ "special": false
1044
+ },
1045
+ "130": {
1046
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1047
+ "lstrip": false,
1048
+ "normalized": false,
1049
+ "rstrip": false,
1050
+ "single_word": false,
1051
+ "special": false
1052
+ },
1053
+ "131": {
1054
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1055
+ "lstrip": false,
1056
+ "normalized": false,
1057
+ "rstrip": false,
1058
+ "single_word": false,
1059
+ "special": false
1060
+ },
1061
+ "132": {
1062
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1063
+ "lstrip": false,
1064
+ "normalized": false,
1065
+ "rstrip": false,
1066
+ "single_word": false,
1067
+ "special": false
1068
+ },
1069
+ "133": {
1070
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1071
+ "lstrip": false,
1072
+ "normalized": false,
1073
+ "rstrip": false,
1074
+ "single_word": false,
1075
+ "special": false
1076
+ },
1077
+ "134": {
1078
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1079
+ "lstrip": false,
1080
+ "normalized": false,
1081
+ "rstrip": false,
1082
+ "single_word": false,
1083
+ "special": false
1084
+ },
1085
+ "135": {
1086
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1087
+ "lstrip": false,
1088
+ "normalized": false,
1089
+ "rstrip": false,
1090
+ "single_word": false,
1091
+ "special": false
1092
+ },
1093
+ "136": {
1094
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1095
+ "lstrip": false,
1096
+ "normalized": false,
1097
+ "rstrip": false,
1098
+ "single_word": false,
1099
+ "special": false
1100
+ },
1101
+ "137": {
1102
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1103
+ "lstrip": false,
1104
+ "normalized": false,
1105
+ "rstrip": false,
1106
+ "single_word": false,
1107
+ "special": false
1108
+ },
1109
+ "138": {
1110
+ "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
1111
+ "lstrip": false,
1112
+ "normalized": false,
1113
+ "rstrip": false,
1114
+ "single_word": false,
1115
+ "special": false
1116
+ },
1117
+ "139": {
1118
+ "content": "▁▁",
1119
+ "lstrip": false,
1120
+ "normalized": false,
1121
+ "rstrip": false,
1122
+ "single_word": false,
1123
+ "special": false
1124
+ },
1125
+ "140": {
1126
+ "content": "▁▁▁",
1127
+ "lstrip": false,
1128
+ "normalized": false,
1129
+ "rstrip": false,
1130
+ "single_word": false,
1131
+ "special": false
1132
+ },
1133
+ "141": {
1134
+ "content": "▁▁▁▁",
1135
+ "lstrip": false,
1136
+ "normalized": false,
1137
+ "rstrip": false,
1138
+ "single_word": false,
1139
+ "special": false
1140
+ },
1141
+ "142": {
1142
+ "content": "▁▁▁▁▁",
1143
+ "lstrip": false,
1144
+ "normalized": false,
1145
+ "rstrip": false,
1146
+ "single_word": false,
1147
+ "special": false
1148
+ },
1149
+ "143": {
1150
+ "content": "▁▁▁▁▁▁",
1151
+ "lstrip": false,
1152
+ "normalized": false,
1153
+ "rstrip": false,
1154
+ "single_word": false,
1155
+ "special": false
1156
+ },
1157
+ "144": {
1158
+ "content": "▁▁▁▁▁▁▁",
1159
+ "lstrip": false,
1160
+ "normalized": false,
1161
+ "rstrip": false,
1162
+ "single_word": false,
1163
+ "special": false
1164
+ },
1165
+ "145": {
1166
+ "content": "▁▁▁▁▁▁▁▁",
1167
+ "lstrip": false,
1168
+ "normalized": false,
1169
+ "rstrip": false,
1170
+ "single_word": false,
1171
+ "special": false
1172
+ },
1173
+ "146": {
1174
+ "content": "▁▁▁▁▁▁▁▁▁",
1175
+ "lstrip": false,
1176
+ "normalized": false,
1177
+ "rstrip": false,
1178
+ "single_word": false,
1179
+ "special": false
1180
+ },
1181
+ "147": {
1182
+ "content": "▁▁▁▁▁▁▁▁▁▁",
1183
+ "lstrip": false,
1184
+ "normalized": false,
1185
+ "rstrip": false,
1186
+ "single_word": false,
1187
+ "special": false
1188
+ },
1189
+ "148": {
1190
+ "content": "▁▁▁▁▁▁▁▁▁▁▁",
1191
+ "lstrip": false,
1192
+ "normalized": false,
1193
+ "rstrip": false,
1194
+ "single_word": false,
1195
+ "special": false
1196
+ },
1197
+ "149": {
1198
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁",
1199
+ "lstrip": false,
1200
+ "normalized": false,
1201
+ "rstrip": false,
1202
+ "single_word": false,
1203
+ "special": false
1204
+ },
1205
+ "150": {
1206
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁",
1207
+ "lstrip": false,
1208
+ "normalized": false,
1209
+ "rstrip": false,
1210
+ "single_word": false,
1211
+ "special": false
1212
+ },
1213
+ "151": {
1214
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1215
+ "lstrip": false,
1216
+ "normalized": false,
1217
+ "rstrip": false,
1218
+ "single_word": false,
1219
+ "special": false
1220
+ },
1221
+ "152": {
1222
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1223
+ "lstrip": false,
1224
+ "normalized": false,
1225
+ "rstrip": false,
1226
+ "single_word": false,
1227
+ "special": false
1228
+ },
1229
+ "153": {
1230
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1231
+ "lstrip": false,
1232
+ "normalized": false,
1233
+ "rstrip": false,
1234
+ "single_word": false,
1235
+ "special": false
1236
+ },
1237
+ "154": {
1238
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1239
+ "lstrip": false,
1240
+ "normalized": false,
1241
+ "rstrip": false,
1242
+ "single_word": false,
1243
+ "special": false
1244
+ },
1245
+ "155": {
1246
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1247
+ "lstrip": false,
1248
+ "normalized": false,
1249
+ "rstrip": false,
1250
+ "single_word": false,
1251
+ "special": false
1252
+ },
1253
+ "156": {
1254
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1255
+ "lstrip": false,
1256
+ "normalized": false,
1257
+ "rstrip": false,
1258
+ "single_word": false,
1259
+ "special": false
1260
+ },
1261
+ "157": {
1262
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1263
+ "lstrip": false,
1264
+ "normalized": false,
1265
+ "rstrip": false,
1266
+ "single_word": false,
1267
+ "special": false
1268
+ },
1269
+ "158": {
1270
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1271
+ "lstrip": false,
1272
+ "normalized": false,
1273
+ "rstrip": false,
1274
+ "single_word": false,
1275
+ "special": false
1276
+ },
1277
+ "159": {
1278
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1279
+ "lstrip": false,
1280
+ "normalized": false,
1281
+ "rstrip": false,
1282
+ "single_word": false,
1283
+ "special": false
1284
+ },
1285
+ "160": {
1286
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1287
+ "lstrip": false,
1288
+ "normalized": false,
1289
+ "rstrip": false,
1290
+ "single_word": false,
1291
+ "special": false
1292
+ },
1293
+ "161": {
1294
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1295
+ "lstrip": false,
1296
+ "normalized": false,
1297
+ "rstrip": false,
1298
+ "single_word": false,
1299
+ "special": false
1300
+ },
1301
+ "162": {
1302
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1303
+ "lstrip": false,
1304
+ "normalized": false,
1305
+ "rstrip": false,
1306
+ "single_word": false,
1307
+ "special": false
1308
+ },
1309
+ "163": {
1310
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1311
+ "lstrip": false,
1312
+ "normalized": false,
1313
+ "rstrip": false,
1314
+ "single_word": false,
1315
+ "special": false
1316
+ },
1317
+ "164": {
1318
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1319
+ "lstrip": false,
1320
+ "normalized": false,
1321
+ "rstrip": false,
1322
+ "single_word": false,
1323
+ "special": false
1324
+ },
1325
+ "165": {
1326
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1327
+ "lstrip": false,
1328
+ "normalized": false,
1329
+ "rstrip": false,
1330
+ "single_word": false,
1331
+ "special": false
1332
+ },
1333
+ "166": {
1334
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1335
+ "lstrip": false,
1336
+ "normalized": false,
1337
+ "rstrip": false,
1338
+ "single_word": false,
1339
+ "special": false
1340
+ },
1341
+ "167": {
1342
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1343
+ "lstrip": false,
1344
+ "normalized": false,
1345
+ "rstrip": false,
1346
+ "single_word": false,
1347
+ "special": false
1348
+ },
1349
+ "168": {
1350
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1351
+ "lstrip": false,
1352
+ "normalized": false,
1353
+ "rstrip": false,
1354
+ "single_word": false,
1355
+ "special": false
1356
+ },
1357
+ "169": {
1358
+ "content": "<table>",
1359
+ "lstrip": false,
1360
+ "normalized": false,
1361
+ "rstrip": false,
1362
+ "single_word": false,
1363
+ "special": false
1364
+ },
1365
+ "170": {
1366
+ "content": "<caption>",
1367
+ "lstrip": false,
1368
+ "normalized": false,
1369
+ "rstrip": false,
1370
+ "single_word": false,
1371
+ "special": false
1372
+ },
1373
+ "171": {
1374
+ "content": "<thead>",
1375
+ "lstrip": false,
1376
+ "normalized": false,
1377
+ "rstrip": false,
1378
+ "single_word": false,
1379
+ "special": false
1380
+ },
1381
+ "172": {
1382
+ "content": "<tbody>",
1383
+ "lstrip": false,
1384
+ "normalized": false,
1385
+ "rstrip": false,
1386
+ "single_word": false,
1387
+ "special": false
1388
+ },
1389
+ "173": {
1390
+ "content": "<tfoot>",
1391
+ "lstrip": false,
1392
+ "normalized": false,
1393
+ "rstrip": false,
1394
+ "single_word": false,
1395
+ "special": false
1396
+ },
1397
+ "174": {
1398
+ "content": "<tr>",
1399
+ "lstrip": false,
1400
+ "normalized": false,
1401
+ "rstrip": false,
1402
+ "single_word": false,
1403
+ "special": false
1404
+ },
1405
+ "175": {
1406
+ "content": "<th>",
1407
+ "lstrip": false,
1408
+ "normalized": false,
1409
+ "rstrip": false,
1410
+ "single_word": false,
1411
+ "special": false
1412
+ },
1413
+ "176": {
1414
+ "content": "<td>",
1415
+ "lstrip": false,
1416
+ "normalized": false,
1417
+ "rstrip": false,
1418
+ "single_word": false,
1419
+ "special": false
1420
+ },
1421
+ "177": {
1422
+ "content": "</table>",
1423
+ "lstrip": false,
1424
+ "normalized": false,
1425
+ "rstrip": false,
1426
+ "single_word": false,
1427
+ "special": false
1428
+ },
1429
+ "178": {
1430
+ "content": "</caption>",
1431
+ "lstrip": false,
1432
+ "normalized": false,
1433
+ "rstrip": false,
1434
+ "single_word": false,
1435
+ "special": false
1436
+ },
1437
+ "179": {
1438
+ "content": "</thead>",
1439
+ "lstrip": false,
1440
+ "normalized": false,
1441
+ "rstrip": false,
1442
+ "single_word": false,
1443
+ "special": false
1444
+ },
1445
+ "180": {
1446
+ "content": "</tbody>",
1447
+ "lstrip": false,
1448
+ "normalized": false,
1449
+ "rstrip": false,
1450
+ "single_word": false,
1451
+ "special": false
1452
+ },
1453
+ "181": {
1454
+ "content": "</tfoot>",
1455
+ "lstrip": false,
1456
+ "normalized": false,
1457
+ "rstrip": false,
1458
+ "single_word": false,
1459
+ "special": false
1460
+ },
1461
+ "182": {
1462
+ "content": "</tr>",
1463
+ "lstrip": false,
1464
+ "normalized": false,
1465
+ "rstrip": false,
1466
+ "single_word": false,
1467
+ "special": false
1468
+ },
1469
+ "183": {
1470
+ "content": "</th>",
1471
+ "lstrip": false,
1472
+ "normalized": false,
1473
+ "rstrip": false,
1474
+ "single_word": false,
1475
+ "special": false
1476
+ },
1477
+ "184": {
1478
+ "content": "</td>",
1479
+ "lstrip": false,
1480
+ "normalized": false,
1481
+ "rstrip": false,
1482
+ "single_word": false,
1483
+ "special": false
1484
+ },
1485
+ "185": {
1486
+ "content": "<h1>",
1487
+ "lstrip": false,
1488
+ "normalized": false,
1489
+ "rstrip": false,
1490
+ "single_word": false,
1491
+ "special": false
1492
+ },
1493
+ "186": {
1494
+ "content": "<h2>",
1495
+ "lstrip": false,
1496
+ "normalized": false,
1497
+ "rstrip": false,
1498
+ "single_word": false,
1499
+ "special": false
1500
+ },
1501
+ "187": {
1502
+ "content": "<h3>",
1503
+ "lstrip": false,
1504
+ "normalized": false,
1505
+ "rstrip": false,
1506
+ "single_word": false,
1507
+ "special": false
1508
+ },
1509
+ "188": {
1510
+ "content": "<h4>",
1511
+ "lstrip": false,
1512
+ "normalized": false,
1513
+ "rstrip": false,
1514
+ "single_word": false,
1515
+ "special": false
1516
+ },
1517
+ "189": {
1518
+ "content": "<h5>",
1519
+ "lstrip": false,
1520
+ "normalized": false,
1521
+ "rstrip": false,
1522
+ "single_word": false,
1523
+ "special": false
1524
+ },
1525
+ "190": {
1526
+ "content": "<h6>",
1527
+ "lstrip": false,
1528
+ "normalized": false,
1529
+ "rstrip": false,
1530
+ "single_word": false,
1531
+ "special": false
1532
+ },
1533
+ "191": {
1534
+ "content": "<blockquote>",
1535
+ "lstrip": false,
1536
+ "normalized": false,
1537
+ "rstrip": false,
1538
+ "single_word": false,
1539
+ "special": false
1540
+ },
1541
+ "192": {
1542
+ "content": "</h1>",
1543
+ "lstrip": false,
1544
+ "normalized": false,
1545
+ "rstrip": false,
1546
+ "single_word": false,
1547
+ "special": false
1548
+ },
1549
+ "193": {
1550
+ "content": "</h2>",
1551
+ "lstrip": false,
1552
+ "normalized": false,
1553
+ "rstrip": false,
1554
+ "single_word": false,
1555
+ "special": false
1556
+ },
1557
+ "194": {
1558
+ "content": "</h3>",
1559
+ "lstrip": false,
1560
+ "normalized": false,
1561
+ "rstrip": false,
1562
+ "single_word": false,
1563
+ "special": false
1564
+ },
1565
+ "195": {
1566
+ "content": "</h4>",
1567
+ "lstrip": false,
1568
+ "normalized": false,
1569
+ "rstrip": false,
1570
+ "single_word": false,
1571
+ "special": false
1572
+ },
1573
+ "196": {
1574
+ "content": "</h5>",
1575
+ "lstrip": false,
1576
+ "normalized": false,
1577
+ "rstrip": false,
1578
+ "single_word": false,
1579
+ "special": false
1580
+ },
1581
+ "197": {
1582
+ "content": "</h6>",
1583
+ "lstrip": false,
1584
+ "normalized": false,
1585
+ "rstrip": false,
1586
+ "single_word": false,
1587
+ "special": false
1588
+ },
1589
+ "198": {
1590
+ "content": "</blockquote>",
1591
+ "lstrip": false,
1592
+ "normalized": false,
1593
+ "rstrip": false,
1594
+ "single_word": false,
1595
+ "special": false
1596
+ },
1597
+ "199": {
1598
+ "content": "<strong>",
1599
+ "lstrip": false,
1600
+ "normalized": false,
1601
+ "rstrip": false,
1602
+ "single_word": false,
1603
+ "special": false
1604
+ },
1605
+ "200": {
1606
+ "content": "<em>",
1607
+ "lstrip": false,
1608
+ "normalized": false,
1609
+ "rstrip": false,
1610
+ "single_word": false,
1611
+ "special": false
1612
+ },
1613
+ "201": {
1614
+ "content": "<b>",
1615
+ "lstrip": false,
1616
+ "normalized": false,
1617
+ "rstrip": false,
1618
+ "single_word": false,
1619
+ "special": false
1620
+ },
1621
+ "202": {
1622
+ "content": "<i>",
1623
+ "lstrip": false,
1624
+ "normalized": false,
1625
+ "rstrip": false,
1626
+ "single_word": false,
1627
+ "special": false
1628
+ },
1629
+ "203": {
1630
+ "content": "<u>",
1631
+ "lstrip": false,
1632
+ "normalized": false,
1633
+ "rstrip": false,
1634
+ "single_word": false,
1635
+ "special": false
1636
+ },
1637
+ "204": {
1638
+ "content": "<s>",
1639
+ "lstrip": false,
1640
+ "normalized": false,
1641
+ "rstrip": false,
1642
+ "single_word": false,
1643
+ "special": false
1644
+ },
1645
+ "205": {
1646
+ "content": "<sub>",
1647
+ "lstrip": false,
1648
+ "normalized": false,
1649
+ "rstrip": false,
1650
+ "single_word": false,
1651
+ "special": false
1652
+ },
1653
+ "206": {
1654
+ "content": "<sup>",
1655
+ "lstrip": false,
1656
+ "normalized": false,
1657
+ "rstrip": false,
1658
+ "single_word": false,
1659
+ "special": false
1660
+ },
1661
+ "207": {
1662
+ "content": "<code>",
1663
+ "lstrip": false,
1664
+ "normalized": false,
1665
+ "rstrip": false,
1666
+ "single_word": false,
1667
+ "special": false
1668
+ },
1669
+ "208": {
1670
+ "content": "</strong>",
1671
+ "lstrip": false,
1672
+ "normalized": false,
1673
+ "rstrip": false,
1674
+ "single_word": false,
1675
+ "special": false
1676
+ },
1677
+ "209": {
1678
+ "content": "</em>",
1679
+ "lstrip": false,
1680
+ "normalized": false,
1681
+ "rstrip": false,
1682
+ "single_word": false,
1683
+ "special": false
1684
+ },
1685
+ "210": {
1686
+ "content": "</b>",
1687
+ "lstrip": false,
1688
+ "normalized": false,
1689
+ "rstrip": false,
1690
+ "single_word": false,
1691
+ "special": false
1692
+ },
1693
+ "211": {
1694
+ "content": "</i>",
1695
+ "lstrip": false,
1696
+ "normalized": false,
1697
+ "rstrip": false,
1698
+ "single_word": false,
1699
+ "special": false
1700
+ },
1701
+ "212": {
1702
+ "content": "</u>",
1703
+ "lstrip": false,
1704
+ "normalized": false,
1705
+ "rstrip": false,
1706
+ "single_word": false,
1707
+ "special": false
1708
+ },
1709
+ "213": {
1710
+ "content": "</s>",
1711
+ "lstrip": false,
1712
+ "normalized": false,
1713
+ "rstrip": false,
1714
+ "single_word": false,
1715
+ "special": false
1716
+ },
1717
+ "214": {
1718
+ "content": "</sub>",
1719
+ "lstrip": false,
1720
+ "normalized": false,
1721
+ "rstrip": false,
1722
+ "single_word": false,
1723
+ "special": false
1724
+ },
1725
+ "215": {
1726
+ "content": "</sup>",
1727
+ "lstrip": false,
1728
+ "normalized": false,
1729
+ "rstrip": false,
1730
+ "single_word": false,
1731
+ "special": false
1732
+ },
1733
+ "216": {
1734
+ "content": "</code>",
1735
+ "lstrip": false,
1736
+ "normalized": false,
1737
+ "rstrip": false,
1738
+ "single_word": false,
1739
+ "special": false
1740
+ },
1741
+ "255968": {
1742
+ "content": "[toxicity=0]",
1743
+ "lstrip": false,
1744
+ "normalized": false,
1745
+ "rstrip": false,
1746
+ "single_word": false,
1747
+ "special": false
1748
+ },
1749
+ "255969": {
1750
+ "content": "\t\t",
1751
+ "lstrip": false,
1752
+ "normalized": false,
1753
+ "rstrip": false,
1754
+ "single_word": false,
1755
+ "special": false
1756
+ },
1757
+ "255970": {
1758
+ "content": "\t\t\t",
1759
+ "lstrip": false,
1760
+ "normalized": false,
1761
+ "rstrip": false,
1762
+ "single_word": false,
1763
+ "special": false
1764
+ },
1765
+ "255971": {
1766
+ "content": "\t\t\t\t",
1767
+ "lstrip": false,
1768
+ "normalized": false,
1769
+ "rstrip": false,
1770
+ "single_word": false,
1771
+ "special": false
1772
+ },
1773
+ "255972": {
1774
+ "content": "\t\t\t\t\t",
1775
+ "lstrip": false,
1776
+ "normalized": false,
1777
+ "rstrip": false,
1778
+ "single_word": false,
1779
+ "special": false
1780
+ },
1781
+ "255973": {
1782
+ "content": "\t\t\t\t\t\t",
1783
+ "lstrip": false,
1784
+ "normalized": false,
1785
+ "rstrip": false,
1786
+ "single_word": false,
1787
+ "special": false
1788
+ },
1789
+ "255974": {
1790
+ "content": "\t\t\t\t\t\t\t",
1791
+ "lstrip": false,
1792
+ "normalized": false,
1793
+ "rstrip": false,
1794
+ "single_word": false,
1795
+ "special": false
1796
+ },
1797
+ "255975": {
1798
+ "content": "\t\t\t\t\t\t\t\t",
1799
+ "lstrip": false,
1800
+ "normalized": false,
1801
+ "rstrip": false,
1802
+ "single_word": false,
1803
+ "special": false
1804
+ },
1805
+ "255976": {
1806
+ "content": "\t\t\t\t\t\t\t\t\t",
1807
+ "lstrip": false,
1808
+ "normalized": false,
1809
+ "rstrip": false,
1810
+ "single_word": false,
1811
+ "special": false
1812
+ },
1813
+ "255977": {
1814
+ "content": "\t\t\t\t\t\t\t\t\t\t",
1815
+ "lstrip": false,
1816
+ "normalized": false,
1817
+ "rstrip": false,
1818
+ "single_word": false,
1819
+ "special": false
1820
+ },
1821
+ "255978": {
1822
+ "content": "\t\t\t\t\t\t\t\t\t\t\t",
1823
+ "lstrip": false,
1824
+ "normalized": false,
1825
+ "rstrip": false,
1826
+ "single_word": false,
1827
+ "special": false
1828
+ },
1829
+ "255979": {
1830
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t",
1831
+ "lstrip": false,
1832
+ "normalized": false,
1833
+ "rstrip": false,
1834
+ "single_word": false,
1835
+ "special": false
1836
+ },
1837
+ "255980": {
1838
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t",
1839
+ "lstrip": false,
1840
+ "normalized": false,
1841
+ "rstrip": false,
1842
+ "single_word": false,
1843
+ "special": false
1844
+ },
1845
+ "255981": {
1846
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1847
+ "lstrip": false,
1848
+ "normalized": false,
1849
+ "rstrip": false,
1850
+ "single_word": false,
1851
+ "special": false
1852
+ },
1853
+ "255982": {
1854
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1855
+ "lstrip": false,
1856
+ "normalized": false,
1857
+ "rstrip": false,
1858
+ "single_word": false,
1859
+ "special": false
1860
+ },
1861
+ "255983": {
1862
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1863
+ "lstrip": false,
1864
+ "normalized": false,
1865
+ "rstrip": false,
1866
+ "single_word": false,
1867
+ "special": false
1868
+ },
1869
+ "255984": {
1870
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1871
+ "lstrip": false,
1872
+ "normalized": false,
1873
+ "rstrip": false,
1874
+ "single_word": false,
1875
+ "special": false
1876
+ },
1877
+ "255985": {
1878
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1879
+ "lstrip": false,
1880
+ "normalized": false,
1881
+ "rstrip": false,
1882
+ "single_word": false,
1883
+ "special": false
1884
+ },
1885
+ "255986": {
1886
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1887
+ "lstrip": false,
1888
+ "normalized": false,
1889
+ "rstrip": false,
1890
+ "single_word": false,
1891
+ "special": false
1892
+ },
1893
+ "255987": {
1894
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1895
+ "lstrip": false,
1896
+ "normalized": false,
1897
+ "rstrip": false,
1898
+ "single_word": false,
1899
+ "special": false
1900
+ },
1901
+ "255988": {
1902
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1903
+ "lstrip": false,
1904
+ "normalized": false,
1905
+ "rstrip": false,
1906
+ "single_word": false,
1907
+ "special": false
1908
+ },
1909
+ "255989": {
1910
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1911
+ "lstrip": false,
1912
+ "normalized": false,
1913
+ "rstrip": false,
1914
+ "single_word": false,
1915
+ "special": false
1916
+ },
1917
+ "255990": {
1918
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1919
+ "lstrip": false,
1920
+ "normalized": false,
1921
+ "rstrip": false,
1922
+ "single_word": false,
1923
+ "special": false
1924
+ },
1925
+ "255991": {
1926
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1927
+ "lstrip": false,
1928
+ "normalized": false,
1929
+ "rstrip": false,
1930
+ "single_word": false,
1931
+ "special": false
1932
+ },
1933
+ "255992": {
1934
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1935
+ "lstrip": false,
1936
+ "normalized": false,
1937
+ "rstrip": false,
1938
+ "single_word": false,
1939
+ "special": false
1940
+ },
1941
+ "255993": {
1942
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1943
+ "lstrip": false,
1944
+ "normalized": false,
1945
+ "rstrip": false,
1946
+ "single_word": false,
1947
+ "special": false
1948
+ },
1949
+ "255994": {
1950
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1951
+ "lstrip": false,
1952
+ "normalized": false,
1953
+ "rstrip": false,
1954
+ "single_word": false,
1955
+ "special": false
1956
+ },
1957
+ "255995": {
1958
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1959
+ "lstrip": false,
1960
+ "normalized": false,
1961
+ "rstrip": false,
1962
+ "single_word": false,
1963
+ "special": false
1964
+ },
1965
+ "255996": {
1966
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1967
+ "lstrip": false,
1968
+ "normalized": false,
1969
+ "rstrip": false,
1970
+ "single_word": false,
1971
+ "special": false
1972
+ },
1973
+ "255997": {
1974
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1975
+ "lstrip": false,
1976
+ "normalized": false,
1977
+ "rstrip": false,
1978
+ "single_word": false,
1979
+ "special": false
1980
+ },
1981
+ "255998": {
1982
+ "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
1983
+ "lstrip": false,
1984
+ "normalized": false,
1985
+ "rstrip": false,
1986
+ "single_word": false,
1987
+ "special": false
1988
+ },
1989
+ "255999": {
1990
+ "content": "<unused99>",
1991
+ "lstrip": false,
1992
+ "normalized": false,
1993
+ "rstrip": false,
1994
+ "single_word": false,
1995
+ "special": false
1996
+ }
1997
+ },
1998
+ "additional_special_tokens": [
1999
+ "<start_of_turn>",
2000
+ "<end_of_turn>"
2001
+ ],
2002
+ "bos_token": "<bos>",
2003
+ "chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ system_message }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ content }}{% elif message['role'] == 'assistant' %}{{ content }}{% endif %}{% endfor %}",
2004
+ "clean_up_tokenization_spaces": false,
2005
+ "eos_token": "<eos>",
2006
+ "model_max_length": 1000000000000000019884624838656,
2007
+ "pad_token": "<pad>",
2008
+ "padding_side": "right",
2009
+ "sp_model_kwargs": {},
2010
+ "spaces_between_special_tokens": false,
2011
+ "split_special_tokens": false,
2012
+ "tokenizer_class": "GemmaTokenizer",
2013
+ "unk_token": "<unk>",
2014
+ "use_default_system_prompt": false
2015
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,412 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 1, "total_steps": 3906, "loss": 7.1027, "learning_rate": 1.0000000000000002e-06, "epoch": 0.000256, "percentage": 0.03, "elapsed_time": "0:00:21", "remaining_time": "23:22:04"}
2
+ {"current_steps": 2, "total_steps": 3906, "loss": 7.1034, "learning_rate": 2.0000000000000003e-06, "epoch": 0.000512, "percentage": 0.05, "elapsed_time": "0:00:34", "remaining_time": "18:50:02"}
3
+ {"current_steps": 3, "total_steps": 3906, "loss": 6.5723, "learning_rate": 3e-06, "epoch": 0.000768, "percentage": 0.08, "elapsed_time": "0:00:47", "remaining_time": "17:18:37"}
4
+ {"current_steps": 4, "total_steps": 3906, "loss": 5.2188, "learning_rate": 4.000000000000001e-06, "epoch": 0.001024, "percentage": 0.1, "elapsed_time": "0:01:01", "remaining_time": "16:32:40"}
5
+ {"current_steps": 5, "total_steps": 3906, "loss": 4.0969, "learning_rate": 5e-06, "epoch": 0.00128, "percentage": 0.13, "elapsed_time": "0:01:14", "remaining_time": "16:04:54"}
6
+ {"current_steps": 6, "total_steps": 3906, "loss": 3.4141, "learning_rate": 6e-06, "epoch": 0.001536, "percentage": 0.15, "elapsed_time": "0:01:27", "remaining_time": "15:46:27"}
7
+ {"current_steps": 7, "total_steps": 3906, "loss": 3.2294, "learning_rate": 7e-06, "epoch": 0.001792, "percentage": 0.18, "elapsed_time": "0:01:40", "remaining_time": "15:33:30"}
8
+ {"current_steps": 8, "total_steps": 3906, "loss": 2.7187, "learning_rate": 8.000000000000001e-06, "epoch": 0.002048, "percentage": 0.2, "elapsed_time": "0:01:53", "remaining_time": "15:23:36"}
9
+ {"current_steps": 9, "total_steps": 3906, "loss": 2.6165, "learning_rate": 9e-06, "epoch": 0.002304, "percentage": 0.23, "elapsed_time": "0:02:06", "remaining_time": "15:15:54"}
10
+ {"current_steps": 10, "total_steps": 3906, "loss": 2.5339, "learning_rate": 1e-05, "epoch": 0.00256, "percentage": 0.26, "elapsed_time": "0:02:20", "remaining_time": "15:09:52"}
11
+ {"current_steps": 11, "total_steps": 3906, "loss": 2.4265, "learning_rate": 1.1000000000000001e-05, "epoch": 0.002816, "percentage": 0.28, "elapsed_time": "0:02:33", "remaining_time": "15:04:57"}
12
+ {"current_steps": 12, "total_steps": 3906, "loss": 2.3719, "learning_rate": 1.2e-05, "epoch": 0.003072, "percentage": 0.31, "elapsed_time": "0:02:46", "remaining_time": "15:00:39"}
13
+ {"current_steps": 13, "total_steps": 3906, "loss": 2.2678, "learning_rate": 1.3000000000000001e-05, "epoch": 0.003328, "percentage": 0.33, "elapsed_time": "0:02:59", "remaining_time": "14:57:01"}
14
+ {"current_steps": 14, "total_steps": 3906, "loss": 2.4163, "learning_rate": 1.4e-05, "epoch": 0.003584, "percentage": 0.36, "elapsed_time": "0:03:12", "remaining_time": "14:53:48"}
15
+ {"current_steps": 15, "total_steps": 3906, "loss": 2.2873, "learning_rate": 1.5000000000000002e-05, "epoch": 0.00384, "percentage": 0.38, "elapsed_time": "0:03:26", "remaining_time": "14:50:55"}
16
+ {"current_steps": 16, "total_steps": 3906, "loss": 2.3126, "learning_rate": 1.6000000000000003e-05, "epoch": 0.004096, "percentage": 0.41, "elapsed_time": "0:03:39", "remaining_time": "14:48:20"}
17
+ {"current_steps": 17, "total_steps": 3906, "loss": 2.1398, "learning_rate": 1.7e-05, "epoch": 0.004352, "percentage": 0.44, "elapsed_time": "0:03:52", "remaining_time": "14:46:03"}
18
+ {"current_steps": 18, "total_steps": 3906, "loss": 2.1752, "learning_rate": 1.8e-05, "epoch": 0.004608, "percentage": 0.46, "elapsed_time": "0:04:05", "remaining_time": "14:44:05"}
19
+ {"current_steps": 19, "total_steps": 3906, "loss": 2.2289, "learning_rate": 1.9e-05, "epoch": 0.004864, "percentage": 0.49, "elapsed_time": "0:04:18", "remaining_time": "14:42:19"}
20
+ {"current_steps": 20, "total_steps": 3906, "loss": 2.2723, "learning_rate": 2e-05, "epoch": 0.00512, "percentage": 0.51, "elapsed_time": "0:04:31", "remaining_time": "14:40:39"}
21
+ {"current_steps": 21, "total_steps": 3906, "loss": 2.1972, "learning_rate": 2.1000000000000002e-05, "epoch": 0.005376, "percentage": 0.54, "elapsed_time": "0:04:45", "remaining_time": "14:39:08"}
22
+ {"current_steps": 22, "total_steps": 3906, "loss": 2.1221, "learning_rate": 2.2000000000000003e-05, "epoch": 0.005632, "percentage": 0.56, "elapsed_time": "0:04:58", "remaining_time": "14:37:48"}
23
+ {"current_steps": 23, "total_steps": 3906, "loss": 2.144, "learning_rate": 2.3e-05, "epoch": 0.005888, "percentage": 0.59, "elapsed_time": "0:05:11", "remaining_time": "14:36:36"}
24
+ {"current_steps": 24, "total_steps": 3906, "loss": 2.1498, "learning_rate": 2.4e-05, "epoch": 0.006144, "percentage": 0.61, "elapsed_time": "0:05:24", "remaining_time": "14:35:28"}
25
+ {"current_steps": 25, "total_steps": 3906, "loss": 2.2104, "learning_rate": 2.5e-05, "epoch": 0.0064, "percentage": 0.64, "elapsed_time": "0:05:37", "remaining_time": "14:34:23"}
26
+ {"current_steps": 26, "total_steps": 3906, "loss": 2.1118, "learning_rate": 2.6000000000000002e-05, "epoch": 0.006656, "percentage": 0.67, "elapsed_time": "0:05:51", "remaining_time": "14:33:11"}
27
+ {"current_steps": 27, "total_steps": 3906, "loss": 2.0654, "learning_rate": 2.7000000000000002e-05, "epoch": 0.006912, "percentage": 0.69, "elapsed_time": "0:06:04", "remaining_time": "14:32:01"}
28
+ {"current_steps": 28, "total_steps": 3906, "loss": 2.0889, "learning_rate": 2.8e-05, "epoch": 0.007168, "percentage": 0.72, "elapsed_time": "0:06:17", "remaining_time": "14:31:07"}
29
+ {"current_steps": 29, "total_steps": 3906, "loss": 2.0099, "learning_rate": 2.9e-05, "epoch": 0.007424, "percentage": 0.74, "elapsed_time": "0:06:30", "remaining_time": "14:30:08"}
30
+ {"current_steps": 30, "total_steps": 3906, "loss": 2.0453, "learning_rate": 3.0000000000000004e-05, "epoch": 0.00768, "percentage": 0.77, "elapsed_time": "0:06:43", "remaining_time": "14:29:18"}
31
+ {"current_steps": 31, "total_steps": 3906, "loss": 2.0467, "learning_rate": 3.1e-05, "epoch": 0.007936, "percentage": 0.79, "elapsed_time": "0:06:56", "remaining_time": "14:28:33"}
32
+ {"current_steps": 32, "total_steps": 3906, "loss": 2.064, "learning_rate": 3.2000000000000005e-05, "epoch": 0.008192, "percentage": 0.82, "elapsed_time": "0:07:10", "remaining_time": "14:27:51"}
33
+ {"current_steps": 33, "total_steps": 3906, "loss": 2.0269, "learning_rate": 3.3e-05, "epoch": 0.008448, "percentage": 0.84, "elapsed_time": "0:07:23", "remaining_time": "14:27:08"}
34
+ {"current_steps": 34, "total_steps": 3906, "loss": 2.0346, "learning_rate": 3.4e-05, "epoch": 0.008704, "percentage": 0.87, "elapsed_time": "0:07:36", "remaining_time": "14:26:27"}
35
+ {"current_steps": 35, "total_steps": 3906, "loss": 1.9448, "learning_rate": 3.5000000000000004e-05, "epoch": 0.00896, "percentage": 0.9, "elapsed_time": "0:07:49", "remaining_time": "14:25:48"}
36
+ {"current_steps": 36, "total_steps": 3906, "loss": 1.9426, "learning_rate": 3.6e-05, "epoch": 0.009216, "percentage": 0.92, "elapsed_time": "0:08:02", "remaining_time": "14:25:09"}
37
+ {"current_steps": 37, "total_steps": 3906, "loss": 2.0256, "learning_rate": 3.7000000000000005e-05, "epoch": 0.009472, "percentage": 0.95, "elapsed_time": "0:08:16", "remaining_time": "14:24:31"}
38
+ {"current_steps": 38, "total_steps": 3906, "loss": 1.9751, "learning_rate": 3.8e-05, "epoch": 0.009728, "percentage": 0.97, "elapsed_time": "0:08:29", "remaining_time": "14:23:57"}
39
+ {"current_steps": 39, "total_steps": 3906, "loss": 1.9972, "learning_rate": 3.9e-05, "epoch": 0.009984, "percentage": 1.0, "elapsed_time": "0:08:42", "remaining_time": "14:23:22"}
40
+ {"current_steps": 40, "total_steps": 3906, "loss": 1.9717, "learning_rate": 4e-05, "epoch": 0.01024, "percentage": 1.02, "elapsed_time": "0:08:55", "remaining_time": "14:22:49"}
41
+ {"current_steps": 41, "total_steps": 3906, "loss": 2.0233, "learning_rate": 3.999999339647203e-05, "epoch": 0.010496, "percentage": 1.05, "elapsed_time": "0:09:08", "remaining_time": "14:22:14"}
42
+ {"current_steps": 42, "total_steps": 3906, "loss": 2.051, "learning_rate": 3.9999973585892455e-05, "epoch": 0.010752, "percentage": 1.08, "elapsed_time": "0:09:22", "remaining_time": "14:21:44"}
43
+ {"current_steps": 43, "total_steps": 3906, "loss": 1.9142, "learning_rate": 3.999994056827438e-05, "epoch": 0.011008, "percentage": 1.1, "elapsed_time": "0:09:35", "remaining_time": "14:21:13"}
44
+ {"current_steps": 44, "total_steps": 3906, "loss": 2.0122, "learning_rate": 3.999989434363959e-05, "epoch": 0.011264, "percentage": 1.13, "elapsed_time": "0:09:48", "remaining_time": "14:20:44"}
45
+ {"current_steps": 45, "total_steps": 3906, "loss": 1.9614, "learning_rate": 3.999983491201863e-05, "epoch": 0.01152, "percentage": 1.15, "elapsed_time": "0:10:01", "remaining_time": "14:20:16"}
46
+ {"current_steps": 46, "total_steps": 3906, "loss": 1.9205, "learning_rate": 3.999976227345072e-05, "epoch": 0.011776, "percentage": 1.18, "elapsed_time": "0:10:14", "remaining_time": "14:19:50"}
47
+ {"current_steps": 47, "total_steps": 3906, "loss": 1.9214, "learning_rate": 3.999967642798385e-05, "epoch": 0.012032, "percentage": 1.2, "elapsed_time": "0:10:27", "remaining_time": "14:19:22"}
48
+ {"current_steps": 48, "total_steps": 3906, "loss": 1.9153, "learning_rate": 3.99995773756747e-05, "epoch": 0.012288, "percentage": 1.23, "elapsed_time": "0:10:41", "remaining_time": "14:18:56"}
49
+ {"current_steps": 49, "total_steps": 3906, "loss": 1.8511, "learning_rate": 3.9999465116588664e-05, "epoch": 0.012544, "percentage": 1.25, "elapsed_time": "0:10:54", "remaining_time": "14:18:29"}
50
+ {"current_steps": 50, "total_steps": 3906, "loss": 1.8317, "learning_rate": 3.99993396507999e-05, "epoch": 0.0128, "percentage": 1.28, "elapsed_time": "0:11:07", "remaining_time": "14:17:57"}
51
+ {"current_steps": 51, "total_steps": 3906, "loss": 1.8839, "learning_rate": 3.999920097839124e-05, "epoch": 0.013056, "percentage": 1.31, "elapsed_time": "0:11:20", "remaining_time": "14:17:28"}
52
+ {"current_steps": 52, "total_steps": 3906, "loss": 1.8511, "learning_rate": 3.999904909945427e-05, "epoch": 0.013312, "percentage": 1.33, "elapsed_time": "0:11:33", "remaining_time": "14:17:04"}
53
+ {"current_steps": 53, "total_steps": 3906, "loss": 1.8425, "learning_rate": 3.999888401408927e-05, "epoch": 0.013568, "percentage": 1.36, "elapsed_time": "0:11:47", "remaining_time": "14:16:39"}
54
+ {"current_steps": 54, "total_steps": 3906, "loss": 1.841, "learning_rate": 3.999870572240527e-05, "epoch": 0.013824, "percentage": 1.38, "elapsed_time": "0:12:00", "remaining_time": "14:16:16"}
55
+ {"current_steps": 55, "total_steps": 3906, "loss": 1.8339, "learning_rate": 3.999851422451999e-05, "epoch": 0.01408, "percentage": 1.41, "elapsed_time": "0:12:13", "remaining_time": "14:15:54"}
56
+ {"current_steps": 56, "total_steps": 3906, "loss": 1.9235, "learning_rate": 3.9998309520559904e-05, "epoch": 0.014336, "percentage": 1.43, "elapsed_time": "0:12:26", "remaining_time": "14:15:31"}
57
+ {"current_steps": 57, "total_steps": 3906, "loss": 1.9388, "learning_rate": 3.999809161066017e-05, "epoch": 0.014592, "percentage": 1.46, "elapsed_time": "0:12:39", "remaining_time": "14:15:10"}
58
+ {"current_steps": 58, "total_steps": 3906, "loss": 1.815, "learning_rate": 3.999786049496469e-05, "epoch": 0.014848, "percentage": 1.48, "elapsed_time": "0:12:53", "remaining_time": "14:14:48"}
59
+ {"current_steps": 59, "total_steps": 3906, "loss": 1.8619, "learning_rate": 3.999761617362609e-05, "epoch": 0.015104, "percentage": 1.51, "elapsed_time": "0:13:06", "remaining_time": "14:14:27"}
60
+ {"current_steps": 60, "total_steps": 3906, "loss": 1.8577, "learning_rate": 3.99973586468057e-05, "epoch": 0.01536, "percentage": 1.54, "elapsed_time": "0:13:19", "remaining_time": "14:14:07"}
61
+ {"current_steps": 61, "total_steps": 3906, "loss": 1.8654, "learning_rate": 3.9997087914673585e-05, "epoch": 0.015616, "percentage": 1.56, "elapsed_time": "0:13:32", "remaining_time": "14:13:43"}
62
+ {"current_steps": 62, "total_steps": 3906, "loss": 1.8033, "learning_rate": 3.999680397740852e-05, "epoch": 0.015872, "percentage": 1.59, "elapsed_time": "0:13:45", "remaining_time": "14:13:21"}
63
+ {"current_steps": 63, "total_steps": 3906, "loss": 1.7934, "learning_rate": 3.9996506835198005e-05, "epoch": 0.016128, "percentage": 1.61, "elapsed_time": "0:13:59", "remaining_time": "14:13:00"}
64
+ {"current_steps": 64, "total_steps": 3906, "loss": 1.8388, "learning_rate": 3.999619648823826e-05, "epoch": 0.016384, "percentage": 1.64, "elapsed_time": "0:14:12", "remaining_time": "14:12:37"}
65
+ {"current_steps": 65, "total_steps": 3906, "loss": 1.8471, "learning_rate": 3.999587293673422e-05, "epoch": 0.01664, "percentage": 1.66, "elapsed_time": "0:14:25", "remaining_time": "14:12:18"}
66
+ {"current_steps": 66, "total_steps": 3906, "loss": 1.7755, "learning_rate": 3.9995536180899544e-05, "epoch": 0.016896, "percentage": 1.69, "elapsed_time": "0:14:38", "remaining_time": "14:11:59"}
67
+ {"current_steps": 67, "total_steps": 3906, "loss": 1.8353, "learning_rate": 3.9995186220956616e-05, "epoch": 0.017152, "percentage": 1.72, "elapsed_time": "0:14:51", "remaining_time": "14:11:38"}
68
+ {"current_steps": 68, "total_steps": 3906, "loss": 1.8891, "learning_rate": 3.999482305713653e-05, "epoch": 0.017408, "percentage": 1.74, "elapsed_time": "0:15:04", "remaining_time": "14:11:18"}
69
+ {"current_steps": 69, "total_steps": 3906, "loss": 1.8241, "learning_rate": 3.9994446689679085e-05, "epoch": 0.017664, "percentage": 1.77, "elapsed_time": "0:15:18", "remaining_time": "14:10:58"}
70
+ {"current_steps": 70, "total_steps": 3906, "loss": 1.7861, "learning_rate": 3.999405711883285e-05, "epoch": 0.01792, "percentage": 1.79, "elapsed_time": "0:15:31", "remaining_time": "14:10:41"}
71
+ {"current_steps": 71, "total_steps": 3906, "loss": 1.7952, "learning_rate": 3.999365434485504e-05, "epoch": 0.018176, "percentage": 1.82, "elapsed_time": "0:15:44", "remaining_time": "14:10:22"}
72
+ {"current_steps": 72, "total_steps": 3906, "loss": 1.8357, "learning_rate": 3.999323836801166e-05, "epoch": 0.018432, "percentage": 1.84, "elapsed_time": "0:15:57", "remaining_time": "14:10:03"}
73
+ {"current_steps": 73, "total_steps": 3906, "loss": 1.7782, "learning_rate": 3.999280918857739e-05, "epoch": 0.018688, "percentage": 1.87, "elapsed_time": "0:16:10", "remaining_time": "14:09:43"}
74
+ {"current_steps": 74, "total_steps": 3906, "loss": 1.817, "learning_rate": 3.999236680683564e-05, "epoch": 0.018944, "percentage": 1.89, "elapsed_time": "0:16:24", "remaining_time": "14:09:20"}
75
+ {"current_steps": 75, "total_steps": 3906, "loss": 1.7895, "learning_rate": 3.9991911223078537e-05, "epoch": 0.0192, "percentage": 1.92, "elapsed_time": "0:16:37", "remaining_time": "14:09:04"}
76
+ {"current_steps": 76, "total_steps": 3906, "loss": 1.7798, "learning_rate": 3.999144243760693e-05, "epoch": 0.019456, "percentage": 1.95, "elapsed_time": "0:16:50", "remaining_time": "14:08:46"}
77
+ {"current_steps": 77, "total_steps": 3906, "loss": 1.7809, "learning_rate": 3.9990960450730376e-05, "epoch": 0.019712, "percentage": 1.97, "elapsed_time": "0:17:03", "remaining_time": "14:08:27"}
78
+ {"current_steps": 78, "total_steps": 3906, "loss": 1.7588, "learning_rate": 3.999046526276716e-05, "epoch": 0.019968, "percentage": 2.0, "elapsed_time": "0:17:16", "remaining_time": "14:08:09"}
79
+ {"current_steps": 79, "total_steps": 3906, "loss": 1.7642, "learning_rate": 3.998995687404429e-05, "epoch": 0.020224, "percentage": 2.02, "elapsed_time": "0:17:30", "remaining_time": "14:07:52"}
80
+ {"current_steps": 80, "total_steps": 3906, "loss": 1.741, "learning_rate": 3.998943528489746e-05, "epoch": 0.02048, "percentage": 2.05, "elapsed_time": "0:17:43", "remaining_time": "14:07:34"}
81
+ {"current_steps": 81, "total_steps": 3906, "loss": 1.7976, "learning_rate": 3.998890049567113e-05, "epoch": 0.020736, "percentage": 2.07, "elapsed_time": "0:17:56", "remaining_time": "14:07:18"}
82
+ {"current_steps": 82, "total_steps": 3906, "loss": 1.7517, "learning_rate": 3.9988352506718426e-05, "epoch": 0.020992, "percentage": 2.1, "elapsed_time": "0:18:09", "remaining_time": "14:07:01"}
83
+ {"current_steps": 83, "total_steps": 3906, "loss": 1.721, "learning_rate": 3.998779131840123e-05, "epoch": 0.021248, "percentage": 2.12, "elapsed_time": "0:18:22", "remaining_time": "14:06:43"}
84
+ {"current_steps": 84, "total_steps": 3906, "loss": 1.735, "learning_rate": 3.998721693109011e-05, "epoch": 0.021504, "percentage": 2.15, "elapsed_time": "0:18:36", "remaining_time": "14:06:25"}
85
+ {"current_steps": 85, "total_steps": 3906, "loss": 1.7742, "learning_rate": 3.998662934516438e-05, "epoch": 0.02176, "percentage": 2.18, "elapsed_time": "0:18:49", "remaining_time": "14:06:07"}
86
+ {"current_steps": 86, "total_steps": 3906, "loss": 1.7198, "learning_rate": 3.998602856101205e-05, "epoch": 0.022016, "percentage": 2.2, "elapsed_time": "0:19:02", "remaining_time": "14:05:48"}
87
+ {"current_steps": 87, "total_steps": 3906, "loss": 1.7398, "learning_rate": 3.9985414579029836e-05, "epoch": 0.022272, "percentage": 2.23, "elapsed_time": "0:19:15", "remaining_time": "14:05:28"}
88
+ {"current_steps": 88, "total_steps": 3906, "loss": 1.735, "learning_rate": 3.99847873996232e-05, "epoch": 0.022528, "percentage": 2.25, "elapsed_time": "0:19:28", "remaining_time": "14:05:11"}
89
+ {"current_steps": 89, "total_steps": 3906, "loss": 1.7487, "learning_rate": 3.9984147023206295e-05, "epoch": 0.022784, "percentage": 2.28, "elapsed_time": "0:19:42", "remaining_time": "14:04:54"}
90
+ {"current_steps": 90, "total_steps": 3906, "loss": 1.7461, "learning_rate": 3.9983493450202e-05, "epoch": 0.02304, "percentage": 2.3, "elapsed_time": "0:19:55", "remaining_time": "14:04:37"}
91
+ {"current_steps": 91, "total_steps": 3906, "loss": 1.7403, "learning_rate": 3.998282668104188e-05, "epoch": 0.023296, "percentage": 2.33, "elapsed_time": "0:20:08", "remaining_time": "14:04:20"}
92
+ {"current_steps": 92, "total_steps": 3906, "loss": 1.6954, "learning_rate": 3.998214671616627e-05, "epoch": 0.023552, "percentage": 2.36, "elapsed_time": "0:20:21", "remaining_time": "14:04:04"}
93
+ {"current_steps": 93, "total_steps": 3906, "loss": 1.8071, "learning_rate": 3.9981453556024175e-05, "epoch": 0.023808, "percentage": 2.38, "elapsed_time": "0:20:34", "remaining_time": "14:03:48"}
94
+ {"current_steps": 94, "total_steps": 3906, "loss": 1.711, "learning_rate": 3.998074720107332e-05, "epoch": 0.024064, "percentage": 2.41, "elapsed_time": "0:20:48", "remaining_time": "14:03:31"}
95
+ {"current_steps": 95, "total_steps": 3906, "loss": 1.7698, "learning_rate": 3.998002765178015e-05, "epoch": 0.02432, "percentage": 2.43, "elapsed_time": "0:21:01", "remaining_time": "14:03:13"}
96
+ {"current_steps": 96, "total_steps": 3906, "loss": 1.7301, "learning_rate": 3.997929490861983e-05, "epoch": 0.024576, "percentage": 2.46, "elapsed_time": "0:21:14", "remaining_time": "14:02:57"}
97
+ {"current_steps": 97, "total_steps": 3906, "loss": 1.7179, "learning_rate": 3.997854897207621e-05, "epoch": 0.024832, "percentage": 2.48, "elapsed_time": "0:21:27", "remaining_time": "14:02:39"}
98
+ {"current_steps": 98, "total_steps": 3906, "loss": 1.7119, "learning_rate": 3.997778984264189e-05, "epoch": 0.025088, "percentage": 2.51, "elapsed_time": "0:21:40", "remaining_time": "14:02:23"}
99
+ {"current_steps": 99, "total_steps": 3906, "loss": 1.7394, "learning_rate": 3.997701752081816e-05, "epoch": 0.025344, "percentage": 2.53, "elapsed_time": "0:21:53", "remaining_time": "14:02:06"}
100
+ {"current_steps": 100, "total_steps": 3906, "loss": 1.6803, "learning_rate": 3.9976232007115015e-05, "epoch": 0.0256, "percentage": 2.56, "elapsed_time": "0:22:07", "remaining_time": "14:01:51"}
101
+ {"current_steps": 101, "total_steps": 3906, "loss": 1.7406, "learning_rate": 3.9975433302051184e-05, "epoch": 0.025856, "percentage": 2.59, "elapsed_time": "0:22:20", "remaining_time": "14:01:34"}
102
+ {"current_steps": 102, "total_steps": 3906, "loss": 1.7512, "learning_rate": 3.997462140615408e-05, "epoch": 0.026112, "percentage": 2.61, "elapsed_time": "0:22:33", "remaining_time": "14:01:19"}
103
+ {"current_steps": 103, "total_steps": 3906, "loss": 1.7349, "learning_rate": 3.9973796319959854e-05, "epoch": 0.026368, "percentage": 2.64, "elapsed_time": "0:22:46", "remaining_time": "14:01:03"}
104
+ {"current_steps": 104, "total_steps": 3906, "loss": 1.8028, "learning_rate": 3.997295804401335e-05, "epoch": 0.026624, "percentage": 2.66, "elapsed_time": "0:22:59", "remaining_time": "14:00:48"}
105
+ {"current_steps": 105, "total_steps": 3906, "loss": 1.6684, "learning_rate": 3.997210657886813e-05, "epoch": 0.02688, "percentage": 2.69, "elapsed_time": "0:23:13", "remaining_time": "14:00:33"}
106
+ {"current_steps": 106, "total_steps": 3906, "loss": 1.6906, "learning_rate": 3.9971241925086444e-05, "epoch": 0.027136, "percentage": 2.71, "elapsed_time": "0:23:26", "remaining_time": "14:00:18"}
107
+ {"current_steps": 107, "total_steps": 3906, "loss": 1.6753, "learning_rate": 3.9970364083239284e-05, "epoch": 0.027392, "percentage": 2.74, "elapsed_time": "0:23:39", "remaining_time": "14:00:07"}
108
+ {"current_steps": 108, "total_steps": 3906, "loss": 1.6673, "learning_rate": 3.996947305390634e-05, "epoch": 0.027648, "percentage": 2.76, "elapsed_time": "0:23:52", "remaining_time": "13:59:52"}
109
+ {"current_steps": 109, "total_steps": 3906, "loss": 1.6906, "learning_rate": 3.9968568837675986e-05, "epoch": 0.027904, "percentage": 2.79, "elapsed_time": "0:24:06", "remaining_time": "13:59:35"}
110
+ {"current_steps": 110, "total_steps": 3906, "loss": 1.7352, "learning_rate": 3.996765143514534e-05, "epoch": 0.02816, "percentage": 2.82, "elapsed_time": "0:24:19", "remaining_time": "13:59:17"}
111
+ {"current_steps": 111, "total_steps": 3906, "loss": 1.6813, "learning_rate": 3.996672084692021e-05, "epoch": 0.028416, "percentage": 2.84, "elapsed_time": "0:24:32", "remaining_time": "13:59:01"}
112
+ {"current_steps": 112, "total_steps": 3906, "loss": 1.6841, "learning_rate": 3.9965777073615105e-05, "epoch": 0.028672, "percentage": 2.87, "elapsed_time": "0:24:45", "remaining_time": "13:58:43"}
113
+ {"current_steps": 113, "total_steps": 3906, "loss": 1.7104, "learning_rate": 3.996482011585325e-05, "epoch": 0.028928, "percentage": 2.89, "elapsed_time": "0:24:58", "remaining_time": "13:58:26"}
114
+ {"current_steps": 114, "total_steps": 3906, "loss": 1.6565, "learning_rate": 3.996384997426658e-05, "epoch": 0.029184, "percentage": 2.92, "elapsed_time": "0:25:11", "remaining_time": "13:58:10"}
115
+ {"current_steps": 115, "total_steps": 3906, "loss": 1.6614, "learning_rate": 3.996286664949573e-05, "epoch": 0.02944, "percentage": 2.94, "elapsed_time": "0:25:25", "remaining_time": "13:57:55"}
116
+ {"current_steps": 116, "total_steps": 3906, "loss": 1.716, "learning_rate": 3.996187014219004e-05, "epoch": 0.029696, "percentage": 2.97, "elapsed_time": "0:25:38", "remaining_time": "13:57:39"}
117
+ {"current_steps": 117, "total_steps": 3906, "loss": 1.6901, "learning_rate": 3.9960860453007555e-05, "epoch": 0.029952, "percentage": 3.0, "elapsed_time": "0:25:51", "remaining_time": "13:57:24"}
118
+ {"current_steps": 118, "total_steps": 3906, "loss": 1.7093, "learning_rate": 3.995983758261502e-05, "epoch": 0.030208, "percentage": 3.02, "elapsed_time": "0:26:04", "remaining_time": "13:57:09"}
119
+ {"current_steps": 119, "total_steps": 3906, "loss": 1.6738, "learning_rate": 3.9958801531687904e-05, "epoch": 0.030464, "percentage": 3.05, "elapsed_time": "0:26:17", "remaining_time": "13:56:54"}
120
+ {"current_steps": 120, "total_steps": 3906, "loss": 1.6735, "learning_rate": 3.995775230091035e-05, "epoch": 0.03072, "percentage": 3.07, "elapsed_time": "0:26:31", "remaining_time": "13:56:39"}
121
+ {"current_steps": 121, "total_steps": 3906, "loss": 1.7062, "learning_rate": 3.995668989097525e-05, "epoch": 0.030976, "percentage": 3.1, "elapsed_time": "0:26:44", "remaining_time": "13:56:23"}
122
+ {"current_steps": 122, "total_steps": 3906, "loss": 1.6642, "learning_rate": 3.995561430258413e-05, "epoch": 0.031232, "percentage": 3.12, "elapsed_time": "0:26:57", "remaining_time": "13:56:07"}
123
+ {"current_steps": 123, "total_steps": 3906, "loss": 1.6459, "learning_rate": 3.995452553644728e-05, "epoch": 0.031488, "percentage": 3.15, "elapsed_time": "0:27:10", "remaining_time": "13:55:51"}
124
+ {"current_steps": 124, "total_steps": 3906, "loss": 1.6897, "learning_rate": 3.995342359328367e-05, "epoch": 0.031744, "percentage": 3.17, "elapsed_time": "0:27:23", "remaining_time": "13:55:36"}
125
+ {"current_steps": 125, "total_steps": 3906, "loss": 1.7132, "learning_rate": 3.995230847382096e-05, "epoch": 0.032, "percentage": 3.2, "elapsed_time": "0:27:37", "remaining_time": "13:55:21"}
126
+ {"current_steps": 126, "total_steps": 3906, "loss": 1.581, "learning_rate": 3.995118017879554e-05, "epoch": 0.032256, "percentage": 3.23, "elapsed_time": "0:27:50", "remaining_time": "13:55:06"}
127
+ {"current_steps": 127, "total_steps": 3906, "loss": 1.6804, "learning_rate": 3.995003870895247e-05, "epoch": 0.032512, "percentage": 3.25, "elapsed_time": "0:28:03", "remaining_time": "13:54:49"}
128
+ {"current_steps": 128, "total_steps": 3906, "loss": 1.7045, "learning_rate": 3.994888406504552e-05, "epoch": 0.032768, "percentage": 3.28, "elapsed_time": "0:28:16", "remaining_time": "13:54:35"}
129
+ {"current_steps": 129, "total_steps": 3906, "loss": 1.6389, "learning_rate": 3.994771624783718e-05, "epoch": 0.033024, "percentage": 3.3, "elapsed_time": "0:28:29", "remaining_time": "13:54:19"}
130
+ {"current_steps": 130, "total_steps": 3906, "loss": 1.7005, "learning_rate": 3.994653525809859e-05, "epoch": 0.03328, "percentage": 3.33, "elapsed_time": "0:28:42", "remaining_time": "13:54:03"}
131
+ {"current_steps": 131, "total_steps": 3906, "loss": 1.6992, "learning_rate": 3.9945341096609655e-05, "epoch": 0.033536, "percentage": 3.35, "elapsed_time": "0:28:56", "remaining_time": "13:53:48"}
132
+ {"current_steps": 132, "total_steps": 3906, "loss": 1.6899, "learning_rate": 3.9944133764158925e-05, "epoch": 0.033792, "percentage": 3.38, "elapsed_time": "0:29:09", "remaining_time": "13:53:33"}
133
+ {"current_steps": 133, "total_steps": 3906, "loss": 1.6429, "learning_rate": 3.9942913261543664e-05, "epoch": 0.034048, "percentage": 3.41, "elapsed_time": "0:29:22", "remaining_time": "13:53:17"}
134
+ {"current_steps": 134, "total_steps": 3906, "loss": 1.6891, "learning_rate": 3.9941679589569836e-05, "epoch": 0.034304, "percentage": 3.43, "elapsed_time": "0:29:35", "remaining_time": "13:53:02"}
135
+ {"current_steps": 135, "total_steps": 3906, "loss": 1.6226, "learning_rate": 3.99404327490521e-05, "epoch": 0.03456, "percentage": 3.46, "elapsed_time": "0:29:48", "remaining_time": "13:52:47"}
136
+ {"current_steps": 136, "total_steps": 3906, "loss": 1.6255, "learning_rate": 3.9939172740813825e-05, "epoch": 0.034816, "percentage": 3.48, "elapsed_time": "0:30:01", "remaining_time": "13:52:32"}
137
+ {"current_steps": 137, "total_steps": 3906, "loss": 1.7171, "learning_rate": 3.993789956568704e-05, "epoch": 0.035072, "percentage": 3.51, "elapsed_time": "0:30:15", "remaining_time": "13:52:17"}
138
+ {"current_steps": 138, "total_steps": 3906, "loss": 1.5579, "learning_rate": 3.9936613224512495e-05, "epoch": 0.035328, "percentage": 3.53, "elapsed_time": "0:30:28", "remaining_time": "13:52:03"}
139
+ {"current_steps": 139, "total_steps": 3906, "loss": 1.6577, "learning_rate": 3.993531371813963e-05, "epoch": 0.035584, "percentage": 3.56, "elapsed_time": "0:30:41", "remaining_time": "13:51:48"}
140
+ {"current_steps": 140, "total_steps": 3906, "loss": 1.6809, "learning_rate": 3.9934001047426585e-05, "epoch": 0.03584, "percentage": 3.58, "elapsed_time": "0:30:54", "remaining_time": "13:51:34"}
141
+ {"current_steps": 141, "total_steps": 3906, "loss": 1.627, "learning_rate": 3.9932675213240175e-05, "epoch": 0.036096, "percentage": 3.61, "elapsed_time": "0:31:08", "remaining_time": "13:51:19"}
142
+ {"current_steps": 142, "total_steps": 3906, "loss": 1.6764, "learning_rate": 3.993133621645593e-05, "epoch": 0.036352, "percentage": 3.64, "elapsed_time": "0:31:21", "remaining_time": "13:51:05"}
143
+ {"current_steps": 143, "total_steps": 3906, "loss": 1.6521, "learning_rate": 3.9929984057958055e-05, "epoch": 0.036608, "percentage": 3.66, "elapsed_time": "0:31:34", "remaining_time": "13:50:50"}
144
+ {"current_steps": 144, "total_steps": 3906, "loss": 1.6497, "learning_rate": 3.992861873863944e-05, "epoch": 0.036864, "percentage": 3.69, "elapsed_time": "0:31:47", "remaining_time": "13:50:35"}
145
+ {"current_steps": 145, "total_steps": 3906, "loss": 1.5884, "learning_rate": 3.9927240259401694e-05, "epoch": 0.03712, "percentage": 3.71, "elapsed_time": "0:32:00", "remaining_time": "13:50:21"}
146
+ {"current_steps": 146, "total_steps": 3906, "loss": 1.5987, "learning_rate": 3.992584862115509e-05, "epoch": 0.037376, "percentage": 3.74, "elapsed_time": "0:32:13", "remaining_time": "13:50:06"}
147
+ {"current_steps": 147, "total_steps": 3906, "loss": 1.5948, "learning_rate": 3.992444382481862e-05, "epoch": 0.037632, "percentage": 3.76, "elapsed_time": "0:32:27", "remaining_time": "13:49:51"}
148
+ {"current_steps": 148, "total_steps": 3906, "loss": 1.6598, "learning_rate": 3.992302587131991e-05, "epoch": 0.037888, "percentage": 3.79, "elapsed_time": "0:32:40", "remaining_time": "13:49:37"}
149
+ {"current_steps": 149, "total_steps": 3906, "loss": 1.5991, "learning_rate": 3.9921594761595334e-05, "epoch": 0.038144, "percentage": 3.81, "elapsed_time": "0:32:53", "remaining_time": "13:49:22"}
150
+ {"current_steps": 150, "total_steps": 3906, "loss": 1.6543, "learning_rate": 3.992015049658992e-05, "epoch": 0.0384, "percentage": 3.84, "elapsed_time": "0:33:06", "remaining_time": "13:49:08"}
151
+ {"current_steps": 151, "total_steps": 3906, "loss": 1.6648, "learning_rate": 3.991869307725741e-05, "epoch": 0.038656, "percentage": 3.87, "elapsed_time": "0:33:19", "remaining_time": "13:48:53"}
152
+ {"current_steps": 152, "total_steps": 3906, "loss": 1.6041, "learning_rate": 3.9917222504560184e-05, "epoch": 0.038912, "percentage": 3.89, "elapsed_time": "0:33:33", "remaining_time": "13:48:39"}
153
+ {"current_steps": 153, "total_steps": 3906, "loss": 1.6443, "learning_rate": 3.991573877946937e-05, "epoch": 0.039168, "percentage": 3.92, "elapsed_time": "0:33:46", "remaining_time": "13:48:23"}
154
+ {"current_steps": 154, "total_steps": 3906, "loss": 1.7052, "learning_rate": 3.9914241902964725e-05, "epoch": 0.039424, "percentage": 3.94, "elapsed_time": "0:33:59", "remaining_time": "13:48:06"}
155
+ {"current_steps": 155, "total_steps": 3906, "loss": 1.6251, "learning_rate": 3.991273187603473e-05, "epoch": 0.03968, "percentage": 3.97, "elapsed_time": "0:34:12", "remaining_time": "13:47:49"}
156
+ {"current_steps": 156, "total_steps": 3906, "loss": 1.6287, "learning_rate": 3.9911208699676537e-05, "epoch": 0.039936, "percentage": 3.99, "elapsed_time": "0:34:25", "remaining_time": "13:47:34"}
157
+ {"current_steps": 157, "total_steps": 3906, "loss": 1.6348, "learning_rate": 3.990967237489597e-05, "epoch": 0.040192, "percentage": 4.02, "elapsed_time": "0:34:38", "remaining_time": "13:47:19"}
158
+ {"current_steps": 158, "total_steps": 3906, "loss": 1.6133, "learning_rate": 3.990812290270755e-05, "epoch": 0.040448, "percentage": 4.05, "elapsed_time": "0:34:51", "remaining_time": "13:47:05"}
159
+ {"current_steps": 159, "total_steps": 3906, "loss": 1.6297, "learning_rate": 3.990656028413448e-05, "epoch": 0.040704, "percentage": 4.07, "elapsed_time": "0:35:05", "remaining_time": "13:46:51"}
160
+ {"current_steps": 160, "total_steps": 3906, "loss": 1.6098, "learning_rate": 3.9904984520208626e-05, "epoch": 0.04096, "percentage": 4.1, "elapsed_time": "0:35:18", "remaining_time": "13:46:37"}
161
+ {"current_steps": 161, "total_steps": 3906, "loss": 1.6131, "learning_rate": 3.9903395611970565e-05, "epoch": 0.041216, "percentage": 4.12, "elapsed_time": "0:35:31", "remaining_time": "13:46:23"}
162
+ {"current_steps": 162, "total_steps": 3906, "loss": 1.5826, "learning_rate": 3.990179356046952e-05, "epoch": 0.041472, "percentage": 4.15, "elapsed_time": "0:35:44", "remaining_time": "13:46:08"}
163
+ {"current_steps": 163, "total_steps": 3906, "loss": 1.6436, "learning_rate": 3.990017836676344e-05, "epoch": 0.041728, "percentage": 4.17, "elapsed_time": "0:35:58", "remaining_time": "13:45:58"}
164
+ {"current_steps": 164, "total_steps": 3906, "loss": 1.6775, "learning_rate": 3.9898550031918886e-05, "epoch": 0.041984, "percentage": 4.2, "elapsed_time": "0:36:11", "remaining_time": "13:45:44"}
165
+ {"current_steps": 165, "total_steps": 3906, "loss": 1.6481, "learning_rate": 3.9896908557011146e-05, "epoch": 0.04224, "percentage": 4.22, "elapsed_time": "0:36:24", "remaining_time": "13:45:29"}
166
+ {"current_steps": 166, "total_steps": 3906, "loss": 1.6508, "learning_rate": 3.989525394312418e-05, "epoch": 0.042496, "percentage": 4.25, "elapsed_time": "0:36:37", "remaining_time": "13:45:16"}
167
+ {"current_steps": 167, "total_steps": 3906, "loss": 1.6493, "learning_rate": 3.989358619135062e-05, "epoch": 0.042752, "percentage": 4.28, "elapsed_time": "0:36:50", "remaining_time": "13:44:59"}
168
+ {"current_steps": 168, "total_steps": 3906, "loss": 1.6417, "learning_rate": 3.989190530279175e-05, "epoch": 0.043008, "percentage": 4.3, "elapsed_time": "0:37:03", "remaining_time": "13:44:43"}
169
+ {"current_steps": 169, "total_steps": 3906, "loss": 1.5947, "learning_rate": 3.9890211278557576e-05, "epoch": 0.043264, "percentage": 4.33, "elapsed_time": "0:37:17", "remaining_time": "13:44:29"}
170
+ {"current_steps": 170, "total_steps": 3906, "loss": 1.6538, "learning_rate": 3.988850411976674e-05, "epoch": 0.04352, "percentage": 4.35, "elapsed_time": "0:37:30", "remaining_time": "13:44:15"}
171
+ {"current_steps": 171, "total_steps": 3906, "loss": 1.5596, "learning_rate": 3.988678382754656e-05, "epoch": 0.043776, "percentage": 4.38, "elapsed_time": "0:37:43", "remaining_time": "13:44:00"}
172
+ {"current_steps": 172, "total_steps": 3906, "loss": 1.6272, "learning_rate": 3.988505040303304e-05, "epoch": 0.044032, "percentage": 4.4, "elapsed_time": "0:37:56", "remaining_time": "13:43:46"}
173
+ {"current_steps": 173, "total_steps": 3906, "loss": 1.6455, "learning_rate": 3.9883303847370866e-05, "epoch": 0.044288, "percentage": 4.43, "elapsed_time": "0:38:09", "remaining_time": "13:43:32"}
174
+ {"current_steps": 174, "total_steps": 3906, "loss": 1.6205, "learning_rate": 3.988154416171336e-05, "epoch": 0.044544, "percentage": 4.45, "elapsed_time": "0:38:23", "remaining_time": "13:43:18"}
175
+ {"current_steps": 175, "total_steps": 3906, "loss": 1.6421, "learning_rate": 3.987977134722255e-05, "epoch": 0.0448, "percentage": 4.48, "elapsed_time": "0:38:36", "remaining_time": "13:43:04"}
176
+ {"current_steps": 176, "total_steps": 3906, "loss": 1.6395, "learning_rate": 3.987798540506911e-05, "epoch": 0.045056, "percentage": 4.51, "elapsed_time": "0:38:49", "remaining_time": "13:42:49"}
177
+ {"current_steps": 177, "total_steps": 3906, "loss": 1.5974, "learning_rate": 3.9876186336432404e-05, "epoch": 0.045312, "percentage": 4.53, "elapsed_time": "0:39:02", "remaining_time": "13:42:34"}
178
+ {"current_steps": 178, "total_steps": 3906, "loss": 1.6324, "learning_rate": 3.987437414250044e-05, "epoch": 0.045568, "percentage": 4.56, "elapsed_time": "0:39:15", "remaining_time": "13:42:20"}
179
+ {"current_steps": 179, "total_steps": 3906, "loss": 1.6155, "learning_rate": 3.9872548824469913e-05, "epoch": 0.045824, "percentage": 4.58, "elapsed_time": "0:39:28", "remaining_time": "13:42:04"}
180
+ {"current_steps": 180, "total_steps": 3906, "loss": 1.6201, "learning_rate": 3.9870710383546166e-05, "epoch": 0.04608, "percentage": 4.61, "elapsed_time": "0:39:42", "remaining_time": "13:41:50"}
181
+ {"current_steps": 181, "total_steps": 3906, "loss": 1.6282, "learning_rate": 3.986885882094324e-05, "epoch": 0.046336, "percentage": 4.63, "elapsed_time": "0:39:55", "remaining_time": "13:41:35"}
182
+ {"current_steps": 182, "total_steps": 3906, "loss": 1.6151, "learning_rate": 3.98669941378838e-05, "epoch": 0.046592, "percentage": 4.66, "elapsed_time": "0:40:08", "remaining_time": "13:41:21"}
183
+ {"current_steps": 183, "total_steps": 3906, "loss": 1.6322, "learning_rate": 3.986511633559919e-05, "epoch": 0.046848, "percentage": 4.69, "elapsed_time": "0:40:21", "remaining_time": "13:41:07"}
184
+ {"current_steps": 184, "total_steps": 3906, "loss": 1.5858, "learning_rate": 3.986322541532944e-05, "epoch": 0.047104, "percentage": 4.71, "elapsed_time": "0:40:34", "remaining_time": "13:40:54"}
185
+ {"current_steps": 185, "total_steps": 3906, "loss": 1.6501, "learning_rate": 3.986132137832322e-05, "epoch": 0.04736, "percentage": 4.74, "elapsed_time": "0:40:48", "remaining_time": "13:40:40"}
186
+ {"current_steps": 186, "total_steps": 3906, "loss": 1.619, "learning_rate": 3.985940422583786e-05, "epoch": 0.047616, "percentage": 4.76, "elapsed_time": "0:41:01", "remaining_time": "13:40:26"}
187
+ {"current_steps": 187, "total_steps": 3906, "loss": 1.6122, "learning_rate": 3.985747395913936e-05, "epoch": 0.047872, "percentage": 4.79, "elapsed_time": "0:41:14", "remaining_time": "13:40:13"}
188
+ {"current_steps": 188, "total_steps": 3906, "loss": 1.5991, "learning_rate": 3.9855530579502375e-05, "epoch": 0.048128, "percentage": 4.81, "elapsed_time": "0:41:27", "remaining_time": "13:39:58"}
189
+ {"current_steps": 189, "total_steps": 3906, "loss": 1.6093, "learning_rate": 3.9853574088210224e-05, "epoch": 0.048384, "percentage": 4.84, "elapsed_time": "0:41:40", "remaining_time": "13:39:43"}
190
+ {"current_steps": 190, "total_steps": 3906, "loss": 1.644, "learning_rate": 3.985160448655488e-05, "epoch": 0.04864, "percentage": 4.86, "elapsed_time": "0:41:54", "remaining_time": "13:39:28"}
191
+ {"current_steps": 191, "total_steps": 3906, "loss": 1.6516, "learning_rate": 3.984962177583698e-05, "epoch": 0.048896, "percentage": 4.89, "elapsed_time": "0:42:07", "remaining_time": "13:39:15"}
192
+ {"current_steps": 192, "total_steps": 3906, "loss": 1.5579, "learning_rate": 3.984762595736581e-05, "epoch": 0.049152, "percentage": 4.92, "elapsed_time": "0:42:20", "remaining_time": "13:39:01"}
193
+ {"current_steps": 193, "total_steps": 3906, "loss": 1.6606, "learning_rate": 3.98456170324593e-05, "epoch": 0.049408, "percentage": 4.94, "elapsed_time": "0:42:33", "remaining_time": "13:38:46"}
194
+ {"current_steps": 194, "total_steps": 3906, "loss": 1.602, "learning_rate": 3.9843595002444075e-05, "epoch": 0.049664, "percentage": 4.97, "elapsed_time": "0:42:46", "remaining_time": "13:38:32"}
195
+ {"current_steps": 195, "total_steps": 3906, "loss": 1.5955, "learning_rate": 3.984155986865537e-05, "epoch": 0.04992, "percentage": 4.99, "elapsed_time": "0:42:59", "remaining_time": "13:38:16"}
196
+ {"current_steps": 196, "total_steps": 3906, "loss": 1.5357, "learning_rate": 3.9839511632437096e-05, "epoch": 0.050176, "percentage": 5.02, "elapsed_time": "0:43:13", "remaining_time": "13:38:02"}
197
+ {"current_steps": 197, "total_steps": 3906, "loss": 1.5909, "learning_rate": 3.9837450295141816e-05, "epoch": 0.050432, "percentage": 5.04, "elapsed_time": "0:43:26", "remaining_time": "13:37:48"}
198
+ {"current_steps": 198, "total_steps": 3906, "loss": 1.6708, "learning_rate": 3.983537585813073e-05, "epoch": 0.050688, "percentage": 5.07, "elapsed_time": "0:43:39", "remaining_time": "13:37:34"}
199
+ {"current_steps": 199, "total_steps": 3906, "loss": 1.5605, "learning_rate": 3.98332883227737e-05, "epoch": 0.050944, "percentage": 5.09, "elapsed_time": "0:43:52", "remaining_time": "13:37:18"}
200
+ {"current_steps": 200, "total_steps": 3906, "loss": 1.6156, "learning_rate": 3.9831187690449254e-05, "epoch": 0.0512, "percentage": 5.12, "elapsed_time": "0:44:05", "remaining_time": "13:37:04"}
201
+ {"current_steps": 201, "total_steps": 3906, "loss": 1.624, "learning_rate": 3.9829073962544525e-05, "epoch": 0.051456, "percentage": 5.15, "elapsed_time": "0:44:18", "remaining_time": "13:36:50"}
202
+ {"current_steps": 202, "total_steps": 3906, "loss": 1.6255, "learning_rate": 3.9826947140455334e-05, "epoch": 0.051712, "percentage": 5.17, "elapsed_time": "0:44:31", "remaining_time": "13:36:35"}
203
+ {"current_steps": 203, "total_steps": 3906, "loss": 1.5554, "learning_rate": 3.982480722558613e-05, "epoch": 0.051968, "percentage": 5.2, "elapsed_time": "0:44:45", "remaining_time": "13:36:21"}
204
+ {"current_steps": 204, "total_steps": 3906, "loss": 1.6158, "learning_rate": 3.982265421935001e-05, "epoch": 0.052224, "percentage": 5.22, "elapsed_time": "0:44:58", "remaining_time": "13:36:07"}
205
+ {"current_steps": 205, "total_steps": 3906, "loss": 1.6056, "learning_rate": 3.982048812316872e-05, "epoch": 0.05248, "percentage": 5.25, "elapsed_time": "0:45:11", "remaining_time": "13:35:54"}
206
+ {"current_steps": 206, "total_steps": 3906, "loss": 1.6017, "learning_rate": 3.981830893847265e-05, "epoch": 0.052736, "percentage": 5.27, "elapsed_time": "0:45:24", "remaining_time": "13:35:40"}
207
+ {"current_steps": 207, "total_steps": 3906, "loss": 1.6227, "learning_rate": 3.981611666670082e-05, "epoch": 0.052992, "percentage": 5.3, "elapsed_time": "0:45:38", "remaining_time": "13:35:27"}
208
+ {"current_steps": 208, "total_steps": 3906, "loss": 1.6319, "learning_rate": 3.9813911309300916e-05, "epoch": 0.053248, "percentage": 5.33, "elapsed_time": "0:45:51", "remaining_time": "13:35:13"}
209
+ {"current_steps": 209, "total_steps": 3906, "loss": 1.627, "learning_rate": 3.981169286772925e-05, "epoch": 0.053504, "percentage": 5.35, "elapsed_time": "0:46:04", "remaining_time": "13:34:58"}
210
+ {"current_steps": 210, "total_steps": 3906, "loss": 1.5803, "learning_rate": 3.980946134345077e-05, "epoch": 0.05376, "percentage": 5.38, "elapsed_time": "0:46:17", "remaining_time": "13:34:44"}
211
+ {"current_steps": 211, "total_steps": 3906, "loss": 1.5945, "learning_rate": 3.980721673793907e-05, "epoch": 0.054016, "percentage": 5.4, "elapsed_time": "0:46:30", "remaining_time": "13:34:30"}
212
+ {"current_steps": 212, "total_steps": 3906, "loss": 1.6166, "learning_rate": 3.9804959052676386e-05, "epoch": 0.054272, "percentage": 5.43, "elapsed_time": "0:46:43", "remaining_time": "13:34:16"}
213
+ {"current_steps": 213, "total_steps": 3906, "loss": 1.6014, "learning_rate": 3.980268828915359e-05, "epoch": 0.054528, "percentage": 5.45, "elapsed_time": "0:46:57", "remaining_time": "13:34:02"}
214
+ {"current_steps": 214, "total_steps": 3906, "loss": 1.6061, "learning_rate": 3.980040444887017e-05, "epoch": 0.054784, "percentage": 5.48, "elapsed_time": "0:47:10", "remaining_time": "13:33:48"}
215
+ {"current_steps": 215, "total_steps": 3906, "loss": 1.5752, "learning_rate": 3.979810753333428e-05, "epoch": 0.05504, "percentage": 5.5, "elapsed_time": "0:47:23", "remaining_time": "13:33:35"}
216
+ {"current_steps": 216, "total_steps": 3906, "loss": 1.5667, "learning_rate": 3.979579754406271e-05, "epoch": 0.055296, "percentage": 5.53, "elapsed_time": "0:47:36", "remaining_time": "13:33:22"}
217
+ {"current_steps": 217, "total_steps": 3906, "loss": 1.577, "learning_rate": 3.979347448258083e-05, "epoch": 0.055552, "percentage": 5.56, "elapsed_time": "0:47:49", "remaining_time": "13:33:09"}
218
+ {"current_steps": 218, "total_steps": 3906, "loss": 1.6513, "learning_rate": 3.979113835042271e-05, "epoch": 0.055808, "percentage": 5.58, "elapsed_time": "0:48:03", "remaining_time": "13:32:55"}
219
+ {"current_steps": 219, "total_steps": 3906, "loss": 1.5861, "learning_rate": 3.9788789149131003e-05, "epoch": 0.056064, "percentage": 5.61, "elapsed_time": "0:48:16", "remaining_time": "13:32:39"}
220
+ {"current_steps": 220, "total_steps": 3906, "loss": 1.6002, "learning_rate": 3.978642688025703e-05, "epoch": 0.05632, "percentage": 5.63, "elapsed_time": "0:48:29", "remaining_time": "13:32:25"}
221
+ {"current_steps": 221, "total_steps": 3906, "loss": 1.5682, "learning_rate": 3.978405154536071e-05, "epoch": 0.056576, "percentage": 5.66, "elapsed_time": "0:48:42", "remaining_time": "13:32:11"}
222
+ {"current_steps": 222, "total_steps": 3906, "loss": 1.5688, "learning_rate": 3.9781663146010595e-05, "epoch": 0.056832, "percentage": 5.68, "elapsed_time": "0:48:55", "remaining_time": "13:31:56"}
223
+ {"current_steps": 223, "total_steps": 3906, "loss": 1.5846, "learning_rate": 3.977926168378389e-05, "epoch": 0.057088, "percentage": 5.71, "elapsed_time": "0:49:08", "remaining_time": "13:31:43"}
224
+ {"current_steps": 224, "total_steps": 3906, "loss": 1.6461, "learning_rate": 3.977684716026639e-05, "epoch": 0.057344, "percentage": 5.73, "elapsed_time": "0:49:22", "remaining_time": "13:31:29"}
225
+ {"current_steps": 225, "total_steps": 3906, "loss": 1.5788, "learning_rate": 3.977441957705254e-05, "epoch": 0.0576, "percentage": 5.76, "elapsed_time": "0:49:35", "remaining_time": "13:31:15"}
226
+ {"current_steps": 226, "total_steps": 3906, "loss": 1.6239, "learning_rate": 3.97719789357454e-05, "epoch": 0.057856, "percentage": 5.79, "elapsed_time": "0:49:48", "remaining_time": "13:31:02"}
227
+ {"current_steps": 227, "total_steps": 3906, "loss": 1.5473, "learning_rate": 3.9769525237956655e-05, "epoch": 0.058112, "percentage": 5.81, "elapsed_time": "0:50:01", "remaining_time": "13:30:48"}
228
+ {"current_steps": 228, "total_steps": 3906, "loss": 1.5923, "learning_rate": 3.976705848530662e-05, "epoch": 0.058368, "percentage": 5.84, "elapsed_time": "0:50:14", "remaining_time": "13:30:34"}
229
+ {"current_steps": 229, "total_steps": 3906, "loss": 1.603, "learning_rate": 3.9764578679424204e-05, "epoch": 0.058624, "percentage": 5.86, "elapsed_time": "0:50:28", "remaining_time": "13:30:21"}
230
+ {"current_steps": 230, "total_steps": 3906, "loss": 1.5565, "learning_rate": 3.976208582194696e-05, "epoch": 0.05888, "percentage": 5.89, "elapsed_time": "0:50:41", "remaining_time": "13:30:08"}
231
+ {"current_steps": 231, "total_steps": 3906, "loss": 1.5812, "learning_rate": 3.975957991452107e-05, "epoch": 0.059136, "percentage": 5.91, "elapsed_time": "0:50:54", "remaining_time": "13:29:54"}
232
+ {"current_steps": 232, "total_steps": 3906, "loss": 1.581, "learning_rate": 3.9757060958801286e-05, "epoch": 0.059392, "percentage": 5.94, "elapsed_time": "0:51:07", "remaining_time": "13:29:40"}
233
+ {"current_steps": 233, "total_steps": 3906, "loss": 1.5698, "learning_rate": 3.9754528956451034e-05, "epoch": 0.059648, "percentage": 5.97, "elapsed_time": "0:51:20", "remaining_time": "13:29:25"}
234
+ {"current_steps": 234, "total_steps": 3906, "loss": 1.6069, "learning_rate": 3.975198390914232e-05, "epoch": 0.059904, "percentage": 5.99, "elapsed_time": "0:51:33", "remaining_time": "13:29:10"}
235
+ {"current_steps": 235, "total_steps": 3906, "loss": 1.586, "learning_rate": 3.974942581855577e-05, "epoch": 0.06016, "percentage": 6.02, "elapsed_time": "0:51:47", "remaining_time": "13:28:56"}
236
+ {"current_steps": 236, "total_steps": 3906, "loss": 1.5755, "learning_rate": 3.974685468638063e-05, "epoch": 0.060416, "percentage": 6.04, "elapsed_time": "0:52:00", "remaining_time": "13:28:43"}
237
+ {"current_steps": 237, "total_steps": 3906, "loss": 1.5765, "learning_rate": 3.9744270514314756e-05, "epoch": 0.060672, "percentage": 6.07, "elapsed_time": "0:52:13", "remaining_time": "13:28:29"}
238
+ {"current_steps": 238, "total_steps": 3906, "loss": 1.5478, "learning_rate": 3.974167330406461e-05, "epoch": 0.060928, "percentage": 6.09, "elapsed_time": "0:52:26", "remaining_time": "13:28:16"}
239
+ {"current_steps": 239, "total_steps": 3906, "loss": 1.575, "learning_rate": 3.973906305734526e-05, "epoch": 0.061184, "percentage": 6.12, "elapsed_time": "0:52:39", "remaining_time": "13:28:03"}
240
+ {"current_steps": 240, "total_steps": 3906, "loss": 1.5564, "learning_rate": 3.973643977588041e-05, "epoch": 0.06144, "percentage": 6.14, "elapsed_time": "0:52:53", "remaining_time": "13:27:50"}
241
+ {"current_steps": 241, "total_steps": 3906, "loss": 1.5936, "learning_rate": 3.973380346140233e-05, "epoch": 0.061696, "percentage": 6.17, "elapsed_time": "0:53:06", "remaining_time": "13:27:36"}
242
+ {"current_steps": 242, "total_steps": 3906, "loss": 1.5702, "learning_rate": 3.973115411565193e-05, "epoch": 0.061952, "percentage": 6.2, "elapsed_time": "0:53:19", "remaining_time": "13:27:22"}
243
+ {"current_steps": 243, "total_steps": 3906, "loss": 1.5163, "learning_rate": 3.9728491740378705e-05, "epoch": 0.062208, "percentage": 6.22, "elapsed_time": "0:53:32", "remaining_time": "13:27:09"}
244
+ {"current_steps": 244, "total_steps": 3906, "loss": 1.5727, "learning_rate": 3.972581633734077e-05, "epoch": 0.062464, "percentage": 6.25, "elapsed_time": "0:53:45", "remaining_time": "13:26:53"}
245
+ {"current_steps": 245, "total_steps": 3906, "loss": 1.5451, "learning_rate": 3.972312790830483e-05, "epoch": 0.06272, "percentage": 6.27, "elapsed_time": "0:53:59", "remaining_time": "13:26:40"}
246
+ {"current_steps": 246, "total_steps": 3906, "loss": 1.5725, "learning_rate": 3.9720426455046195e-05, "epoch": 0.062976, "percentage": 6.3, "elapsed_time": "0:54:12", "remaining_time": "13:26:26"}
247
+ {"current_steps": 247, "total_steps": 3906, "loss": 1.5935, "learning_rate": 3.971771197934878e-05, "epoch": 0.063232, "percentage": 6.32, "elapsed_time": "0:54:25", "remaining_time": "13:26:13"}
248
+ {"current_steps": 248, "total_steps": 3906, "loss": 1.5908, "learning_rate": 3.97149844830051e-05, "epoch": 0.063488, "percentage": 6.35, "elapsed_time": "0:54:38", "remaining_time": "13:26:00"}
249
+ {"current_steps": 249, "total_steps": 3906, "loss": 1.5662, "learning_rate": 3.971224396781626e-05, "epoch": 0.063744, "percentage": 6.37, "elapsed_time": "0:54:51", "remaining_time": "13:25:46"}
250
+ {"current_steps": 250, "total_steps": 3906, "loss": 1.5763, "learning_rate": 3.970949043559197e-05, "epoch": 0.064, "percentage": 6.4, "elapsed_time": "0:55:05", "remaining_time": "13:25:33"}
251
+ {"current_steps": 251, "total_steps": 3906, "loss": 1.5633, "learning_rate": 3.970672388815052e-05, "epoch": 0.064256, "percentage": 6.43, "elapsed_time": "0:55:18", "remaining_time": "13:25:19"}
252
+ {"current_steps": 252, "total_steps": 3906, "loss": 1.5489, "learning_rate": 3.970394432731883e-05, "epoch": 0.064512, "percentage": 6.45, "elapsed_time": "0:55:31", "remaining_time": "13:25:05"}
253
+ {"current_steps": 253, "total_steps": 3906, "loss": 1.5615, "learning_rate": 3.9701151754932376e-05, "epoch": 0.064768, "percentage": 6.48, "elapsed_time": "0:55:44", "remaining_time": "13:24:50"}
254
+ {"current_steps": 254, "total_steps": 3906, "loss": 1.5535, "learning_rate": 3.969834617283525e-05, "epoch": 0.065024, "percentage": 6.5, "elapsed_time": "0:55:57", "remaining_time": "13:24:35"}
255
+ {"current_steps": 255, "total_steps": 3906, "loss": 1.6105, "learning_rate": 3.9695527582880105e-05, "epoch": 0.06528, "percentage": 6.53, "elapsed_time": "0:56:10", "remaining_time": "13:24:20"}
256
+ {"current_steps": 256, "total_steps": 3906, "loss": 1.6258, "learning_rate": 3.969269598692823e-05, "epoch": 0.065536, "percentage": 6.55, "elapsed_time": "0:56:23", "remaining_time": "13:24:05"}
257
+ {"current_steps": 257, "total_steps": 3906, "loss": 1.595, "learning_rate": 3.968985138684947e-05, "epoch": 0.065792, "percentage": 6.58, "elapsed_time": "0:56:36", "remaining_time": "13:23:51"}
258
+ {"current_steps": 258, "total_steps": 3906, "loss": 1.5708, "learning_rate": 3.968699378452226e-05, "epoch": 0.066048, "percentage": 6.61, "elapsed_time": "0:56:50", "remaining_time": "13:23:38"}
259
+ {"current_steps": 259, "total_steps": 3906, "loss": 1.524, "learning_rate": 3.968412318183362e-05, "epoch": 0.066304, "percentage": 6.63, "elapsed_time": "0:57:03", "remaining_time": "13:23:25"}
260
+ {"current_steps": 260, "total_steps": 3906, "loss": 1.5344, "learning_rate": 3.968123958067917e-05, "epoch": 0.06656, "percentage": 6.66, "elapsed_time": "0:57:16", "remaining_time": "13:23:12"}
261
+ {"current_steps": 261, "total_steps": 3906, "loss": 1.557, "learning_rate": 3.9678342982963105e-05, "epoch": 0.066816, "percentage": 6.68, "elapsed_time": "0:57:29", "remaining_time": "13:22:58"}
262
+ {"current_steps": 262, "total_steps": 3906, "loss": 1.5299, "learning_rate": 3.96754333905982e-05, "epoch": 0.067072, "percentage": 6.71, "elapsed_time": "0:57:43", "remaining_time": "13:22:45"}
263
+ {"current_steps": 263, "total_steps": 3906, "loss": 1.5723, "learning_rate": 3.96725108055058e-05, "epoch": 0.067328, "percentage": 6.73, "elapsed_time": "0:57:56", "remaining_time": "13:22:32"}
264
+ {"current_steps": 264, "total_steps": 3906, "loss": 1.5963, "learning_rate": 3.966957522961586e-05, "epoch": 0.067584, "percentage": 6.76, "elapsed_time": "0:58:09", "remaining_time": "13:22:18"}
265
+ {"current_steps": 265, "total_steps": 3906, "loss": 1.5364, "learning_rate": 3.966662666486689e-05, "epoch": 0.06784, "percentage": 6.78, "elapsed_time": "0:58:22", "remaining_time": "13:22:04"}
266
+ {"current_steps": 266, "total_steps": 3906, "loss": 1.5923, "learning_rate": 3.966366511320598e-05, "epoch": 0.068096, "percentage": 6.81, "elapsed_time": "0:58:35", "remaining_time": "13:21:50"}
267
+ {"current_steps": 267, "total_steps": 3906, "loss": 1.5576, "learning_rate": 3.966069057658881e-05, "epoch": 0.068352, "percentage": 6.84, "elapsed_time": "0:58:48", "remaining_time": "13:21:36"}
268
+ {"current_steps": 268, "total_steps": 3906, "loss": 1.6256, "learning_rate": 3.96577030569796e-05, "epoch": 0.068608, "percentage": 6.86, "elapsed_time": "0:59:02", "remaining_time": "13:21:23"}
269
+ {"current_steps": 269, "total_steps": 3906, "loss": 1.5431, "learning_rate": 3.965470255635119e-05, "epoch": 0.068864, "percentage": 6.89, "elapsed_time": "0:59:15", "remaining_time": "13:21:12"}
270
+ {"current_steps": 270, "total_steps": 3906, "loss": 1.5816, "learning_rate": 3.965168907668496e-05, "epoch": 0.06912, "percentage": 6.91, "elapsed_time": "0:59:28", "remaining_time": "13:20:59"}
271
+ {"current_steps": 271, "total_steps": 3906, "loss": 1.59, "learning_rate": 3.9648662619970866e-05, "epoch": 0.069376, "percentage": 6.94, "elapsed_time": "0:59:41", "remaining_time": "13:20:46"}
272
+ {"current_steps": 272, "total_steps": 3906, "loss": 1.5574, "learning_rate": 3.964562318820744e-05, "epoch": 0.069632, "percentage": 6.96, "elapsed_time": "0:59:55", "remaining_time": "13:20:32"}
273
+ {"current_steps": 273, "total_steps": 3906, "loss": 1.5142, "learning_rate": 3.964257078340178e-05, "epoch": 0.069888, "percentage": 6.99, "elapsed_time": "1:00:08", "remaining_time": "13:20:19"}
274
+ {"current_steps": 274, "total_steps": 3906, "loss": 1.5168, "learning_rate": 3.963950540756955e-05, "epoch": 0.070144, "percentage": 7.01, "elapsed_time": "1:00:21", "remaining_time": "13:20:04"}
275
+ {"current_steps": 275, "total_steps": 3906, "loss": 1.581, "learning_rate": 3.963642706273499e-05, "epoch": 0.0704, "percentage": 7.04, "elapsed_time": "1:00:34", "remaining_time": "13:19:50"}
276
+ {"current_steps": 276, "total_steps": 3906, "loss": 1.5589, "learning_rate": 3.9633335750930874e-05, "epoch": 0.070656, "percentage": 7.07, "elapsed_time": "1:00:47", "remaining_time": "13:19:36"}
277
+ {"current_steps": 277, "total_steps": 3906, "loss": 1.5933, "learning_rate": 3.963023147419858e-05, "epoch": 0.070912, "percentage": 7.09, "elapsed_time": "1:01:00", "remaining_time": "13:19:21"}
278
+ {"current_steps": 278, "total_steps": 3906, "loss": 1.5605, "learning_rate": 3.9627114234588e-05, "epoch": 0.071168, "percentage": 7.12, "elapsed_time": "1:01:14", "remaining_time": "13:19:07"}
279
+ {"current_steps": 279, "total_steps": 3906, "loss": 1.6025, "learning_rate": 3.962398403415763e-05, "epoch": 0.071424, "percentage": 7.14, "elapsed_time": "1:01:27", "remaining_time": "13:18:53"}
280
+ {"current_steps": 280, "total_steps": 3906, "loss": 1.5318, "learning_rate": 3.9620840874974505e-05, "epoch": 0.07168, "percentage": 7.17, "elapsed_time": "1:01:40", "remaining_time": "13:18:40"}
281
+ {"current_steps": 281, "total_steps": 3906, "loss": 1.5637, "learning_rate": 3.961768475911421e-05, "epoch": 0.071936, "percentage": 7.19, "elapsed_time": "1:01:53", "remaining_time": "13:18:26"}
282
+ {"current_steps": 282, "total_steps": 3906, "loss": 1.574, "learning_rate": 3.9614515688660906e-05, "epoch": 0.072192, "percentage": 7.22, "elapsed_time": "1:02:06", "remaining_time": "13:18:13"}
283
+ {"current_steps": 283, "total_steps": 3906, "loss": 1.5605, "learning_rate": 3.961133366570729e-05, "epoch": 0.072448, "percentage": 7.25, "elapsed_time": "1:02:20", "remaining_time": "13:18:00"}
284
+ {"current_steps": 284, "total_steps": 3906, "loss": 1.5433, "learning_rate": 3.960813869235462e-05, "epoch": 0.072704, "percentage": 7.27, "elapsed_time": "1:02:33", "remaining_time": "13:17:46"}
285
+ {"current_steps": 285, "total_steps": 3906, "loss": 1.5768, "learning_rate": 3.960493077071272e-05, "epoch": 0.07296, "percentage": 7.3, "elapsed_time": "1:02:46", "remaining_time": "13:17:33"}
286
+ {"current_steps": 286, "total_steps": 3906, "loss": 1.5973, "learning_rate": 3.960170990289992e-05, "epoch": 0.073216, "percentage": 7.32, "elapsed_time": "1:02:59", "remaining_time": "13:17:19"}
287
+ {"current_steps": 287, "total_steps": 3906, "loss": 1.5591, "learning_rate": 3.9598476091043165e-05, "epoch": 0.073472, "percentage": 7.35, "elapsed_time": "1:03:12", "remaining_time": "13:17:06"}
288
+ {"current_steps": 288, "total_steps": 3906, "loss": 1.5594, "learning_rate": 3.9595229337277884e-05, "epoch": 0.073728, "percentage": 7.37, "elapsed_time": "1:03:25", "remaining_time": "13:16:52"}
289
+ {"current_steps": 289, "total_steps": 3906, "loss": 1.6103, "learning_rate": 3.9591969643748094e-05, "epoch": 0.073984, "percentage": 7.4, "elapsed_time": "1:03:39", "remaining_time": "13:16:39"}
290
+ {"current_steps": 290, "total_steps": 3906, "loss": 1.5677, "learning_rate": 3.958869701260633e-05, "epoch": 0.07424, "percentage": 7.42, "elapsed_time": "1:03:52", "remaining_time": "13:16:25"}
291
+ {"current_steps": 291, "total_steps": 3906, "loss": 1.5817, "learning_rate": 3.95854114460137e-05, "epoch": 0.074496, "percentage": 7.45, "elapsed_time": "1:04:05", "remaining_time": "13:16:12"}
292
+ {"current_steps": 292, "total_steps": 3906, "loss": 1.6012, "learning_rate": 3.958211294613983e-05, "epoch": 0.074752, "percentage": 7.48, "elapsed_time": "1:04:18", "remaining_time": "13:15:58"}
293
+ {"current_steps": 293, "total_steps": 3906, "loss": 1.5017, "learning_rate": 3.957880151516289e-05, "epoch": 0.075008, "percentage": 7.5, "elapsed_time": "1:04:31", "remaining_time": "13:15:45"}
294
+ {"current_steps": 294, "total_steps": 3906, "loss": 1.5916, "learning_rate": 3.957547715526959e-05, "epoch": 0.075264, "percentage": 7.53, "elapsed_time": "1:04:45", "remaining_time": "13:15:31"}
295
+ {"current_steps": 295, "total_steps": 3906, "loss": 1.6245, "learning_rate": 3.957213986865519e-05, "epoch": 0.07552, "percentage": 7.55, "elapsed_time": "1:04:58", "remaining_time": "13:15:18"}
296
+ {"current_steps": 296, "total_steps": 3906, "loss": 1.569, "learning_rate": 3.956878965752348e-05, "epoch": 0.075776, "percentage": 7.58, "elapsed_time": "1:05:11", "remaining_time": "13:15:05"}
297
+ {"current_steps": 297, "total_steps": 3906, "loss": 1.5321, "learning_rate": 3.956542652408676e-05, "epoch": 0.076032, "percentage": 7.6, "elapsed_time": "1:05:24", "remaining_time": "13:14:52"}
298
+ {"current_steps": 298, "total_steps": 3906, "loss": 1.5182, "learning_rate": 3.9562050470565896e-05, "epoch": 0.076288, "percentage": 7.63, "elapsed_time": "1:05:37", "remaining_time": "13:14:38"}
299
+ {"current_steps": 299, "total_steps": 3906, "loss": 1.5448, "learning_rate": 3.9558661499190287e-05, "epoch": 0.076544, "percentage": 7.65, "elapsed_time": "1:05:51", "remaining_time": "13:14:24"}
300
+ {"current_steps": 300, "total_steps": 3906, "loss": 1.5386, "learning_rate": 3.955525961219783e-05, "epoch": 0.0768, "percentage": 7.68, "elapsed_time": "1:06:04", "remaining_time": "13:14:09"}
301
+ {"current_steps": 301, "total_steps": 3906, "loss": 1.521, "learning_rate": 3.9551844811834984e-05, "epoch": 0.077056, "percentage": 7.71, "elapsed_time": "1:06:17", "remaining_time": "13:13:54"}
302
+ {"current_steps": 302, "total_steps": 3906, "loss": 1.5479, "learning_rate": 3.954841710035672e-05, "epoch": 0.077312, "percentage": 7.73, "elapsed_time": "1:06:30", "remaining_time": "13:13:40"}
303
+ {"current_steps": 303, "total_steps": 3906, "loss": 1.5287, "learning_rate": 3.954497648002654e-05, "epoch": 0.077568, "percentage": 7.76, "elapsed_time": "1:06:43", "remaining_time": "13:13:27"}
304
+ {"current_steps": 304, "total_steps": 3906, "loss": 1.5556, "learning_rate": 3.9541522953116446e-05, "epoch": 0.077824, "percentage": 7.78, "elapsed_time": "1:06:56", "remaining_time": "13:13:13"}
305
+ {"current_steps": 305, "total_steps": 3906, "loss": 1.539, "learning_rate": 3.953805652190701e-05, "epoch": 0.07808, "percentage": 7.81, "elapsed_time": "1:07:10", "remaining_time": "13:13:00"}
306
+ {"current_steps": 306, "total_steps": 3906, "loss": 1.5823, "learning_rate": 3.9534577188687285e-05, "epoch": 0.078336, "percentage": 7.83, "elapsed_time": "1:07:23", "remaining_time": "13:12:47"}
307
+ {"current_steps": 307, "total_steps": 3906, "loss": 1.5869, "learning_rate": 3.9531084955754865e-05, "epoch": 0.078592, "percentage": 7.86, "elapsed_time": "1:07:36", "remaining_time": "13:12:36"}
308
+ {"current_steps": 308, "total_steps": 3906, "loss": 1.5601, "learning_rate": 3.952757982541585e-05, "epoch": 0.078848, "percentage": 7.89, "elapsed_time": "1:07:49", "remaining_time": "13:12:21"}
309
+ {"current_steps": 309, "total_steps": 3906, "loss": 1.5167, "learning_rate": 3.952406179998487e-05, "epoch": 0.079104, "percentage": 7.91, "elapsed_time": "1:08:02", "remaining_time": "13:12:08"}
310
+ {"current_steps": 310, "total_steps": 3906, "loss": 1.5454, "learning_rate": 3.952053088178506e-05, "epoch": 0.07936, "percentage": 7.94, "elapsed_time": "1:08:16", "remaining_time": "13:11:54"}
311
+ {"current_steps": 311, "total_steps": 3906, "loss": 1.5442, "learning_rate": 3.951698707314808e-05, "epoch": 0.079616, "percentage": 7.96, "elapsed_time": "1:08:29", "remaining_time": "13:11:40"}
312
+ {"current_steps": 312, "total_steps": 3906, "loss": 1.5223, "learning_rate": 3.9513430376414065e-05, "epoch": 0.079872, "percentage": 7.99, "elapsed_time": "1:08:42", "remaining_time": "13:11:26"}
313
+ {"current_steps": 313, "total_steps": 3906, "loss": 1.5353, "learning_rate": 3.9509860793931716e-05, "epoch": 0.080128, "percentage": 8.01, "elapsed_time": "1:08:55", "remaining_time": "13:11:13"}
314
+ {"current_steps": 314, "total_steps": 3906, "loss": 1.5443, "learning_rate": 3.9506278328058217e-05, "epoch": 0.080384, "percentage": 8.04, "elapsed_time": "1:09:08", "remaining_time": "13:11:00"}
315
+ {"current_steps": 315, "total_steps": 3906, "loss": 1.548, "learning_rate": 3.950268298115925e-05, "epoch": 0.08064, "percentage": 8.06, "elapsed_time": "1:09:21", "remaining_time": "13:10:46"}
316
+ {"current_steps": 316, "total_steps": 3906, "loss": 1.6164, "learning_rate": 3.949907475560901e-05, "epoch": 0.080896, "percentage": 8.09, "elapsed_time": "1:09:35", "remaining_time": "13:10:33"}
317
+ {"current_steps": 317, "total_steps": 3906, "loss": 1.5858, "learning_rate": 3.9495453653790207e-05, "epoch": 0.081152, "percentage": 8.12, "elapsed_time": "1:09:48", "remaining_time": "13:10:20"}
318
+ {"current_steps": 318, "total_steps": 3906, "loss": 1.5514, "learning_rate": 3.949181967809404e-05, "epoch": 0.081408, "percentage": 8.14, "elapsed_time": "1:10:01", "remaining_time": "13:10:06"}
319
+ {"current_steps": 319, "total_steps": 3906, "loss": 1.5811, "learning_rate": 3.948817283092022e-05, "epoch": 0.081664, "percentage": 8.17, "elapsed_time": "1:10:14", "remaining_time": "13:09:53"}
320
+ {"current_steps": 320, "total_steps": 3906, "loss": 1.5182, "learning_rate": 3.948451311467695e-05, "epoch": 0.08192, "percentage": 8.19, "elapsed_time": "1:10:28", "remaining_time": "13:09:40"}
321
+ {"current_steps": 321, "total_steps": 3906, "loss": 1.5497, "learning_rate": 3.948084053178094e-05, "epoch": 0.082176, "percentage": 8.22, "elapsed_time": "1:10:41", "remaining_time": "13:09:27"}
322
+ {"current_steps": 322, "total_steps": 3906, "loss": 1.5614, "learning_rate": 3.947715508465738e-05, "epoch": 0.082432, "percentage": 8.24, "elapsed_time": "1:10:54", "remaining_time": "13:09:14"}
323
+ {"current_steps": 323, "total_steps": 3906, "loss": 1.5218, "learning_rate": 3.947345677573997e-05, "epoch": 0.082688, "percentage": 8.27, "elapsed_time": "1:11:07", "remaining_time": "13:09:00"}
324
+ {"current_steps": 324, "total_steps": 3906, "loss": 1.5016, "learning_rate": 3.946974560747089e-05, "epoch": 0.082944, "percentage": 8.29, "elapsed_time": "1:11:20", "remaining_time": "13:08:45"}
325
+ {"current_steps": 325, "total_steps": 3906, "loss": 1.5219, "learning_rate": 3.946602158230084e-05, "epoch": 0.0832, "percentage": 8.32, "elapsed_time": "1:11:33", "remaining_time": "13:08:32"}
326
+ {"current_steps": 326, "total_steps": 3906, "loss": 1.5709, "learning_rate": 3.946228470268898e-05, "epoch": 0.083456, "percentage": 8.35, "elapsed_time": "1:11:47", "remaining_time": "13:08:18"}
327
+ {"current_steps": 327, "total_steps": 3906, "loss": 1.5026, "learning_rate": 3.945853497110296e-05, "epoch": 0.083712, "percentage": 8.37, "elapsed_time": "1:12:00", "remaining_time": "13:08:05"}
328
+ {"current_steps": 328, "total_steps": 3906, "loss": 1.5638, "learning_rate": 3.945477239001893e-05, "epoch": 0.083968, "percentage": 8.4, "elapsed_time": "1:12:13", "remaining_time": "13:07:52"}
329
+ {"current_steps": 329, "total_steps": 3906, "loss": 1.5849, "learning_rate": 3.9450996961921536e-05, "epoch": 0.084224, "percentage": 8.42, "elapsed_time": "1:12:26", "remaining_time": "13:07:39"}
330
+ {"current_steps": 330, "total_steps": 3906, "loss": 1.5437, "learning_rate": 3.944720868930388e-05, "epoch": 0.08448, "percentage": 8.45, "elapsed_time": "1:12:39", "remaining_time": "13:07:25"}
331
+ {"current_steps": 331, "total_steps": 3906, "loss": 1.5299, "learning_rate": 3.944340757466756e-05, "epoch": 0.084736, "percentage": 8.47, "elapsed_time": "1:12:53", "remaining_time": "13:07:12"}
332
+ {"current_steps": 332, "total_steps": 3906, "loss": 1.5006, "learning_rate": 3.9439593620522644e-05, "epoch": 0.084992, "percentage": 8.5, "elapsed_time": "1:13:06", "remaining_time": "13:06:59"}
333
+ {"current_steps": 333, "total_steps": 3906, "loss": 1.5268, "learning_rate": 3.9435766829387706e-05, "epoch": 0.085248, "percentage": 8.53, "elapsed_time": "1:13:19", "remaining_time": "13:06:44"}
334
+ {"current_steps": 334, "total_steps": 3906, "loss": 1.5115, "learning_rate": 3.943192720378976e-05, "epoch": 0.085504, "percentage": 8.55, "elapsed_time": "1:13:32", "remaining_time": "13:06:30"}
335
+ {"current_steps": 335, "total_steps": 3906, "loss": 1.556, "learning_rate": 3.942807474626433e-05, "epoch": 0.08576, "percentage": 8.58, "elapsed_time": "1:13:45", "remaining_time": "13:06:16"}
336
+ {"current_steps": 336, "total_steps": 3906, "loss": 1.5597, "learning_rate": 3.9424209459355385e-05, "epoch": 0.086016, "percentage": 8.6, "elapsed_time": "1:13:58", "remaining_time": "13:06:02"}
337
+ {"current_steps": 337, "total_steps": 3906, "loss": 1.4676, "learning_rate": 3.942033134561538e-05, "epoch": 0.086272, "percentage": 8.63, "elapsed_time": "1:14:12", "remaining_time": "13:05:49"}
338
+ {"current_steps": 338, "total_steps": 3906, "loss": 1.5329, "learning_rate": 3.941644040760523e-05, "epoch": 0.086528, "percentage": 8.65, "elapsed_time": "1:14:25", "remaining_time": "13:05:36"}
339
+ {"current_steps": 339, "total_steps": 3906, "loss": 1.5304, "learning_rate": 3.941253664789435e-05, "epoch": 0.086784, "percentage": 8.68, "elapsed_time": "1:14:38", "remaining_time": "13:05:23"}
340
+ {"current_steps": 340, "total_steps": 3906, "loss": 1.5164, "learning_rate": 3.9408620069060584e-05, "epoch": 0.08704, "percentage": 8.7, "elapsed_time": "1:14:51", "remaining_time": "13:05:09"}
341
+ {"current_steps": 341, "total_steps": 3906, "loss": 1.5121, "learning_rate": 3.9404690673690255e-05, "epoch": 0.087296, "percentage": 8.73, "elapsed_time": "1:15:04", "remaining_time": "13:04:56"}
342
+ {"current_steps": 342, "total_steps": 3906, "loss": 1.525, "learning_rate": 3.940074846437815e-05, "epoch": 0.087552, "percentage": 8.76, "elapsed_time": "1:15:18", "remaining_time": "13:04:43"}
343
+ {"current_steps": 343, "total_steps": 3906, "loss": 1.4974, "learning_rate": 3.939679344372753e-05, "epoch": 0.087808, "percentage": 8.78, "elapsed_time": "1:15:31", "remaining_time": "13:04:29"}
344
+ {"current_steps": 344, "total_steps": 3906, "loss": 1.5145, "learning_rate": 3.9392825614350085e-05, "epoch": 0.088064, "percentage": 8.81, "elapsed_time": "1:15:44", "remaining_time": "13:04:15"}
345
+ {"current_steps": 345, "total_steps": 3906, "loss": 1.5629, "learning_rate": 3.9388844978865994e-05, "epoch": 0.08832, "percentage": 8.83, "elapsed_time": "1:15:57", "remaining_time": "13:04:01"}
346
+ {"current_steps": 346, "total_steps": 3906, "loss": 1.4852, "learning_rate": 3.938485153990388e-05, "epoch": 0.088576, "percentage": 8.86, "elapsed_time": "1:16:10", "remaining_time": "13:03:47"}
347
+ {"current_steps": 347, "total_steps": 3906, "loss": 1.5562, "learning_rate": 3.9380845300100816e-05, "epoch": 0.088832, "percentage": 8.88, "elapsed_time": "1:16:23", "remaining_time": "13:03:34"}
348
+ {"current_steps": 348, "total_steps": 3906, "loss": 1.533, "learning_rate": 3.937682626210234e-05, "epoch": 0.089088, "percentage": 8.91, "elapsed_time": "1:16:37", "remaining_time": "13:03:21"}
349
+ {"current_steps": 349, "total_steps": 3906, "loss": 1.5696, "learning_rate": 3.9372794428562426e-05, "epoch": 0.089344, "percentage": 8.93, "elapsed_time": "1:16:50", "remaining_time": "13:03:07"}
350
+ {"current_steps": 350, "total_steps": 3906, "loss": 1.5335, "learning_rate": 3.9368749802143524e-05, "epoch": 0.0896, "percentage": 8.96, "elapsed_time": "1:17:03", "remaining_time": "13:02:54"}
351
+ {"current_steps": 351, "total_steps": 3906, "loss": 1.5339, "learning_rate": 3.93646923855165e-05, "epoch": 0.089856, "percentage": 8.99, "elapsed_time": "1:17:16", "remaining_time": "13:02:41"}
352
+ {"current_steps": 352, "total_steps": 3906, "loss": 1.5391, "learning_rate": 3.9360622181360675e-05, "epoch": 0.090112, "percentage": 9.01, "elapsed_time": "1:17:29", "remaining_time": "13:02:28"}
353
+ {"current_steps": 353, "total_steps": 3906, "loss": 1.5424, "learning_rate": 3.9356539192363834e-05, "epoch": 0.090368, "percentage": 9.04, "elapsed_time": "1:17:43", "remaining_time": "13:02:14"}
354
+ {"current_steps": 354, "total_steps": 3906, "loss": 1.5176, "learning_rate": 3.935244342122219e-05, "epoch": 0.090624, "percentage": 9.06, "elapsed_time": "1:17:56", "remaining_time": "13:02:01"}
355
+ {"current_steps": 355, "total_steps": 3906, "loss": 1.5632, "learning_rate": 3.934833487064038e-05, "epoch": 0.09088, "percentage": 9.09, "elapsed_time": "1:18:09", "remaining_time": "13:01:47"}
356
+ {"current_steps": 356, "total_steps": 3906, "loss": 1.5766, "learning_rate": 3.9344213543331524e-05, "epoch": 0.091136, "percentage": 9.11, "elapsed_time": "1:18:22", "remaining_time": "13:01:33"}
357
+ {"current_steps": 357, "total_steps": 3906, "loss": 1.5205, "learning_rate": 3.934007944201713e-05, "epoch": 0.091392, "percentage": 9.14, "elapsed_time": "1:18:35", "remaining_time": "13:01:20"}
358
+ {"current_steps": 358, "total_steps": 3906, "loss": 1.5189, "learning_rate": 3.933593256942717e-05, "epoch": 0.091648, "percentage": 9.17, "elapsed_time": "1:18:48", "remaining_time": "13:01:06"}
359
+ {"current_steps": 359, "total_steps": 3906, "loss": 1.498, "learning_rate": 3.933177292830004e-05, "epoch": 0.091904, "percentage": 9.19, "elapsed_time": "1:19:02", "remaining_time": "13:00:53"}
360
+ {"current_steps": 360, "total_steps": 3906, "loss": 1.5586, "learning_rate": 3.932760052138258e-05, "epoch": 0.09216, "percentage": 9.22, "elapsed_time": "1:19:15", "remaining_time": "13:00:40"}
361
+ {"current_steps": 361, "total_steps": 3906, "loss": 1.4637, "learning_rate": 3.932341535143003e-05, "epoch": 0.092416, "percentage": 9.24, "elapsed_time": "1:19:28", "remaining_time": "13:00:26"}
362
+ {"current_steps": 362, "total_steps": 3906, "loss": 1.5315, "learning_rate": 3.931921742120611e-05, "epoch": 0.092672, "percentage": 9.27, "elapsed_time": "1:19:41", "remaining_time": "13:00:13"}
363
+ {"current_steps": 363, "total_steps": 3906, "loss": 1.4683, "learning_rate": 3.931500673348291e-05, "epoch": 0.092928, "percentage": 9.29, "elapsed_time": "1:19:54", "remaining_time": "13:00:00"}
364
+ {"current_steps": 364, "total_steps": 3906, "loss": 1.5282, "learning_rate": 3.931078329104098e-05, "epoch": 0.093184, "percentage": 9.32, "elapsed_time": "1:20:08", "remaining_time": "12:59:47"}
365
+ {"current_steps": 365, "total_steps": 3906, "loss": 1.4843, "learning_rate": 3.930654709666927e-05, "epoch": 0.09344, "percentage": 9.34, "elapsed_time": "1:20:21", "remaining_time": "12:59:33"}
366
+ {"current_steps": 366, "total_steps": 3906, "loss": 1.507, "learning_rate": 3.930229815316517e-05, "epoch": 0.093696, "percentage": 9.37, "elapsed_time": "1:20:34", "remaining_time": "12:59:20"}
367
+ {"current_steps": 367, "total_steps": 3906, "loss": 1.5102, "learning_rate": 3.92980364633345e-05, "epoch": 0.093952, "percentage": 9.4, "elapsed_time": "1:20:47", "remaining_time": "12:59:07"}
368
+ {"current_steps": 368, "total_steps": 3906, "loss": 1.4859, "learning_rate": 3.9293762029991454e-05, "epoch": 0.094208, "percentage": 9.42, "elapsed_time": "1:21:00", "remaining_time": "12:58:53"}
369
+ {"current_steps": 369, "total_steps": 3906, "loss": 1.5334, "learning_rate": 3.9289474855958675e-05, "epoch": 0.094464, "percentage": 9.45, "elapsed_time": "1:21:14", "remaining_time": "12:58:40"}
370
+ {"current_steps": 370, "total_steps": 3906, "loss": 1.5007, "learning_rate": 3.928517494406721e-05, "epoch": 0.09472, "percentage": 9.47, "elapsed_time": "1:21:27", "remaining_time": "12:58:27"}
371
+ {"current_steps": 371, "total_steps": 3906, "loss": 1.5516, "learning_rate": 3.928086229715652e-05, "epoch": 0.094976, "percentage": 9.5, "elapsed_time": "1:21:40", "remaining_time": "12:58:13"}
372
+ {"current_steps": 372, "total_steps": 3906, "loss": 1.5632, "learning_rate": 3.927653691807447e-05, "epoch": 0.095232, "percentage": 9.52, "elapsed_time": "1:21:53", "remaining_time": "12:58:00"}
373
+ {"current_steps": 373, "total_steps": 3906, "loss": 1.5356, "learning_rate": 3.927219880967733e-05, "epoch": 0.095488, "percentage": 9.55, "elapsed_time": "1:22:06", "remaining_time": "12:57:47"}
374
+ {"current_steps": 374, "total_steps": 3906, "loss": 1.495, "learning_rate": 3.92678479748298e-05, "epoch": 0.095744, "percentage": 9.58, "elapsed_time": "1:22:20", "remaining_time": "12:57:33"}
375
+ {"current_steps": 375, "total_steps": 3906, "loss": 1.5237, "learning_rate": 3.926348441640495e-05, "epoch": 0.096, "percentage": 9.6, "elapsed_time": "1:22:33", "remaining_time": "12:57:21"}
376
+ {"current_steps": 376, "total_steps": 3906, "loss": 1.5611, "learning_rate": 3.9259108137284275e-05, "epoch": 0.096256, "percentage": 9.63, "elapsed_time": "1:22:46", "remaining_time": "12:57:08"}
377
+ {"current_steps": 377, "total_steps": 3906, "loss": 1.5373, "learning_rate": 3.9254719140357656e-05, "epoch": 0.096512, "percentage": 9.65, "elapsed_time": "1:22:59", "remaining_time": "12:56:54"}
378
+ {"current_steps": 378, "total_steps": 3906, "loss": 1.5063, "learning_rate": 3.925031742852339e-05, "epoch": 0.096768, "percentage": 9.68, "elapsed_time": "1:23:13", "remaining_time": "12:56:41"}
379
+ {"current_steps": 379, "total_steps": 3906, "loss": 1.5304, "learning_rate": 3.9245903004688156e-05, "epoch": 0.097024, "percentage": 9.7, "elapsed_time": "1:23:26", "remaining_time": "12:56:28"}
380
+ {"current_steps": 380, "total_steps": 3906, "loss": 1.5237, "learning_rate": 3.9241475871767026e-05, "epoch": 0.09728, "percentage": 9.73, "elapsed_time": "1:23:39", "remaining_time": "12:56:14"}
381
+ {"current_steps": 381, "total_steps": 3906, "loss": 1.5362, "learning_rate": 3.923703603268348e-05, "epoch": 0.097536, "percentage": 9.75, "elapsed_time": "1:23:52", "remaining_time": "12:56:00"}
382
+ {"current_steps": 382, "total_steps": 3906, "loss": 1.4965, "learning_rate": 3.9232583490369363e-05, "epoch": 0.097792, "percentage": 9.78, "elapsed_time": "1:24:05", "remaining_time": "12:55:47"}
383
+ {"current_steps": 383, "total_steps": 3906, "loss": 1.5107, "learning_rate": 3.922811824776494e-05, "epoch": 0.098048, "percentage": 9.81, "elapsed_time": "1:24:18", "remaining_time": "12:55:33"}
384
+ {"current_steps": 384, "total_steps": 3906, "loss": 1.4619, "learning_rate": 3.922364030781884e-05, "epoch": 0.098304, "percentage": 9.83, "elapsed_time": "1:24:32", "remaining_time": "12:55:20"}
385
+ {"current_steps": 385, "total_steps": 3906, "loss": 1.5492, "learning_rate": 3.921914967348807e-05, "epoch": 0.09856, "percentage": 9.86, "elapsed_time": "1:24:45", "remaining_time": "12:55:07"}
386
+ {"current_steps": 386, "total_steps": 3906, "loss": 1.5979, "learning_rate": 3.9214646347738055e-05, "epoch": 0.098816, "percentage": 9.88, "elapsed_time": "1:24:58", "remaining_time": "12:54:54"}
387
+ {"current_steps": 387, "total_steps": 3906, "loss": 1.4744, "learning_rate": 3.921013033354256e-05, "epoch": 0.099072, "percentage": 9.91, "elapsed_time": "1:25:11", "remaining_time": "12:54:40"}
388
+ {"current_steps": 388, "total_steps": 3906, "loss": 1.4878, "learning_rate": 3.920560163388377e-05, "epoch": 0.099328, "percentage": 9.93, "elapsed_time": "1:25:24", "remaining_time": "12:54:26"}
389
+ {"current_steps": 389, "total_steps": 3906, "loss": 1.4809, "learning_rate": 3.92010602517522e-05, "epoch": 0.099584, "percentage": 9.96, "elapsed_time": "1:25:38", "remaining_time": "12:54:13"}
390
+ {"current_steps": 390, "total_steps": 3906, "loss": 1.5365, "learning_rate": 3.919650619014678e-05, "epoch": 0.09984, "percentage": 9.98, "elapsed_time": "1:25:51", "remaining_time": "12:53:59"}
391
+ {"current_steps": 391, "total_steps": 3906, "loss": 1.5147, "learning_rate": 3.9191939452074786e-05, "epoch": 0.100096, "percentage": 10.01, "elapsed_time": "1:26:04", "remaining_time": "12:53:46"}
392
+ {"current_steps": 392, "total_steps": 3906, "loss": 1.5106, "learning_rate": 3.9187360040551886e-05, "epoch": 0.100352, "percentage": 10.04, "elapsed_time": "1:26:17", "remaining_time": "12:53:33"}
393
+ {"current_steps": 393, "total_steps": 3906, "loss": 1.4999, "learning_rate": 3.9182767958602104e-05, "epoch": 0.100608, "percentage": 10.06, "elapsed_time": "1:26:30", "remaining_time": "12:53:20"}
394
+ {"current_steps": 394, "total_steps": 3906, "loss": 1.5021, "learning_rate": 3.917816320925784e-05, "epoch": 0.100864, "percentage": 10.09, "elapsed_time": "1:26:43", "remaining_time": "12:53:06"}
395
+ {"current_steps": 395, "total_steps": 3906, "loss": 1.4991, "learning_rate": 3.917354579555984e-05, "epoch": 0.10112, "percentage": 10.11, "elapsed_time": "1:26:57", "remaining_time": "12:52:53"}
396
+ {"current_steps": 396, "total_steps": 3906, "loss": 1.5445, "learning_rate": 3.916891572055724e-05, "epoch": 0.101376, "percentage": 10.14, "elapsed_time": "1:27:10", "remaining_time": "12:52:40"}
397
+ {"current_steps": 397, "total_steps": 3906, "loss": 1.5418, "learning_rate": 3.916427298730751e-05, "epoch": 0.101632, "percentage": 10.16, "elapsed_time": "1:27:23", "remaining_time": "12:52:26"}
398
+ {"current_steps": 398, "total_steps": 3906, "loss": 1.4997, "learning_rate": 3.9159617598876495e-05, "epoch": 0.101888, "percentage": 10.19, "elapsed_time": "1:27:36", "remaining_time": "12:52:13"}
399
+ {"current_steps": 399, "total_steps": 3906, "loss": 1.4576, "learning_rate": 3.9154949558338404e-05, "epoch": 0.102144, "percentage": 10.22, "elapsed_time": "1:27:50", "remaining_time": "12:52:00"}
400
+ {"current_steps": 400, "total_steps": 3906, "loss": 1.4819, "learning_rate": 3.915026886877578e-05, "epoch": 0.1024, "percentage": 10.24, "elapsed_time": "1:28:03", "remaining_time": "12:51:48"}
401
+ {"current_steps": 401, "total_steps": 3906, "loss": 1.4622, "learning_rate": 3.914557553327954e-05, "epoch": 0.102656, "percentage": 10.27, "elapsed_time": "1:28:33", "remaining_time": "12:53:59"}
402
+ {"current_steps": 402, "total_steps": 3906, "loss": 1.5045, "learning_rate": 3.914086955494893e-05, "epoch": 0.102912, "percentage": 10.29, "elapsed_time": "1:28:46", "remaining_time": "12:53:45"}
403
+ {"current_steps": 403, "total_steps": 3906, "loss": 1.5452, "learning_rate": 3.913615093689155e-05, "epoch": 0.103168, "percentage": 10.32, "elapsed_time": "1:28:59", "remaining_time": "12:53:32"}
404
+ {"current_steps": 404, "total_steps": 3906, "loss": 1.5164, "learning_rate": 3.9131419682223376e-05, "epoch": 0.103424, "percentage": 10.34, "elapsed_time": "1:29:12", "remaining_time": "12:53:18"}
405
+ {"current_steps": 405, "total_steps": 3906, "loss": 1.5413, "learning_rate": 3.9126675794068686e-05, "epoch": 0.10368, "percentage": 10.37, "elapsed_time": "1:29:25", "remaining_time": "12:53:05"}
406
+ {"current_steps": 406, "total_steps": 3906, "loss": 1.5168, "learning_rate": 3.912191927556013e-05, "epoch": 0.103936, "percentage": 10.39, "elapsed_time": "1:29:39", "remaining_time": "12:52:52"}
407
+ {"current_steps": 407, "total_steps": 3906, "loss": 1.5974, "learning_rate": 3.911715012983868e-05, "epoch": 0.104192, "percentage": 10.42, "elapsed_time": "1:29:52", "remaining_time": "12:52:38"}
408
+ {"current_steps": 408, "total_steps": 3906, "loss": 1.506, "learning_rate": 3.911236836005366e-05, "epoch": 0.104448, "percentage": 10.45, "elapsed_time": "1:30:05", "remaining_time": "12:52:25"}
409
+ {"current_steps": 409, "total_steps": 3906, "loss": 1.5209, "learning_rate": 3.910757396936273e-05, "epoch": 0.104704, "percentage": 10.47, "elapsed_time": "1:30:18", "remaining_time": "12:52:11"}
410
+ {"current_steps": 410, "total_steps": 3906, "loss": 1.4858, "learning_rate": 3.910276696093186e-05, "epoch": 0.10496, "percentage": 10.5, "elapsed_time": "1:30:32", "remaining_time": "12:51:57"}
411
+ {"current_steps": 411, "total_steps": 3906, "loss": 1.54, "learning_rate": 3.90979473379354e-05, "epoch": 0.105216, "percentage": 10.52, "elapsed_time": "1:30:45", "remaining_time": "12:51:43"}
412
+ {"current_steps": 412, "total_steps": 3906, "loss": 1.4793, "learning_rate": 3.909311510355598e-05, "epoch": 0.105472, "percentage": 10.55, "elapsed_time": "1:30:58", "remaining_time": "12:51:30"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de81595e6f9c087c2ca2826609efaeae7a253ad6ec6f25717829ad9cd4d78995
3
+ size 7096