yoonyoon commited on
Commit
8af6fb5
1 Parent(s): e1acdf2
config.json ADDED
The diff for this file is too large to render. See raw diff
 
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.38.1",
6
+ "use_cache": false
7
+ }
model-00001-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecf09a137dc9370ff2fdaccfd224f3cc76e740dbbc27d0e55b0dbd98e9b635f0
3
+ size 4933725040
model-00002-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17b803b1cdf74d084c58d00b9bafc7a144fbb6959b1e6b35923b450dccfa58e6
3
+ size 4915916176
model-00003-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d6de738bcfbb7540234185024060d183ff7c9998b328337791a6e7e065d692a
3
+ size 4999819336
model-00004-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21d58de47a649d1a263c2649a14ccad3841cecf942f0e00e94ee3581ec4b8fd0
3
+ size 4915916184
model-00005-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddbbe94a61f5b5a21bfea5438f1390ff9b764fb4b5aeafba4545679a927481b4
3
+ size 1913728736
model.safetensors.index.json ADDED
@@ -0,0 +1,442 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 21679054848
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00005-of-00005.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00005.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00005.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00005.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00005.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00005.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00005.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00005.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00005.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00005.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00005.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00005.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00005.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00005.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00005.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00005.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00005.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00005.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00005.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00005.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00005.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00005.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00005.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00005.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00005.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00005.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00005.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00005.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
242
+ "model.layers.32.input_layernorm.weight": "model-00003-of-00005.safetensors",
243
+ "model.layers.32.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
244
+ "model.layers.32.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
245
+ "model.layers.32.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
246
+ "model.layers.32.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
247
+ "model.layers.32.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
248
+ "model.layers.32.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
249
+ "model.layers.32.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
250
+ "model.layers.32.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
251
+ "model.layers.33.input_layernorm.weight": "model-00004-of-00005.safetensors",
252
+ "model.layers.33.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
253
+ "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
254
+ "model.layers.33.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
255
+ "model.layers.33.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
256
+ "model.layers.33.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
257
+ "model.layers.33.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
258
+ "model.layers.33.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
259
+ "model.layers.33.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
260
+ "model.layers.34.input_layernorm.weight": "model-00004-of-00005.safetensors",
261
+ "model.layers.34.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
262
+ "model.layers.34.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
263
+ "model.layers.34.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
264
+ "model.layers.34.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
265
+ "model.layers.34.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
266
+ "model.layers.34.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
267
+ "model.layers.34.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
268
+ "model.layers.34.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
269
+ "model.layers.35.input_layernorm.weight": "model-00004-of-00005.safetensors",
270
+ "model.layers.35.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
271
+ "model.layers.35.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
272
+ "model.layers.35.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
273
+ "model.layers.35.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
274
+ "model.layers.35.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
275
+ "model.layers.35.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
276
+ "model.layers.35.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
277
+ "model.layers.35.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
278
+ "model.layers.36.input_layernorm.weight": "model-00004-of-00005.safetensors",
279
+ "model.layers.36.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
280
+ "model.layers.36.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
281
+ "model.layers.36.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
282
+ "model.layers.36.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
283
+ "model.layers.36.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
284
+ "model.layers.36.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
285
+ "model.layers.36.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
286
+ "model.layers.36.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
287
+ "model.layers.37.input_layernorm.weight": "model-00004-of-00005.safetensors",
288
+ "model.layers.37.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
289
+ "model.layers.37.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
290
+ "model.layers.37.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
291
+ "model.layers.37.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
292
+ "model.layers.37.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
293
+ "model.layers.37.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
294
+ "model.layers.37.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
295
+ "model.layers.37.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
296
+ "model.layers.38.input_layernorm.weight": "model-00004-of-00005.safetensors",
297
+ "model.layers.38.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
298
+ "model.layers.38.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
299
+ "model.layers.38.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
300
+ "model.layers.38.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
301
+ "model.layers.38.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
302
+ "model.layers.38.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
303
+ "model.layers.38.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
304
+ "model.layers.38.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
305
+ "model.layers.39.input_layernorm.weight": "model-00004-of-00005.safetensors",
306
+ "model.layers.39.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
307
+ "model.layers.39.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
308
+ "model.layers.39.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
309
+ "model.layers.39.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
310
+ "model.layers.39.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
311
+ "model.layers.39.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
312
+ "model.layers.39.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
313
+ "model.layers.39.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
314
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00005.safetensors",
315
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
316
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
317
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
318
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
319
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
320
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
321
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
322
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
323
+ "model.layers.40.input_layernorm.weight": "model-00004-of-00005.safetensors",
324
+ "model.layers.40.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
325
+ "model.layers.40.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
326
+ "model.layers.40.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
327
+ "model.layers.40.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
328
+ "model.layers.40.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
329
+ "model.layers.40.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
330
+ "model.layers.40.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
331
+ "model.layers.40.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
332
+ "model.layers.41.input_layernorm.weight": "model-00004-of-00005.safetensors",
333
+ "model.layers.41.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
334
+ "model.layers.41.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
335
+ "model.layers.41.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
336
+ "model.layers.41.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
337
+ "model.layers.41.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
338
+ "model.layers.41.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
339
+ "model.layers.41.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
340
+ "model.layers.41.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
341
+ "model.layers.42.input_layernorm.weight": "model-00004-of-00005.safetensors",
342
+ "model.layers.42.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
343
+ "model.layers.42.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
344
+ "model.layers.42.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
345
+ "model.layers.42.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
346
+ "model.layers.42.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
347
+ "model.layers.42.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
348
+ "model.layers.42.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
349
+ "model.layers.42.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
350
+ "model.layers.43.input_layernorm.weight": "model-00004-of-00005.safetensors",
351
+ "model.layers.43.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
352
+ "model.layers.43.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
353
+ "model.layers.43.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
354
+ "model.layers.43.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
355
+ "model.layers.43.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
356
+ "model.layers.43.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
357
+ "model.layers.43.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
358
+ "model.layers.43.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
359
+ "model.layers.44.input_layernorm.weight": "model-00005-of-00005.safetensors",
360
+ "model.layers.44.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
361
+ "model.layers.44.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
362
+ "model.layers.44.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
363
+ "model.layers.44.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
364
+ "model.layers.44.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
365
+ "model.layers.44.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
366
+ "model.layers.44.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
367
+ "model.layers.44.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
368
+ "model.layers.45.input_layernorm.weight": "model-00005-of-00005.safetensors",
369
+ "model.layers.45.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
370
+ "model.layers.45.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
371
+ "model.layers.45.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
372
+ "model.layers.45.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
373
+ "model.layers.45.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
374
+ "model.layers.45.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
375
+ "model.layers.45.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
376
+ "model.layers.45.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
377
+ "model.layers.46.input_layernorm.weight": "model-00005-of-00005.safetensors",
378
+ "model.layers.46.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
379
+ "model.layers.46.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
380
+ "model.layers.46.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
381
+ "model.layers.46.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
382
+ "model.layers.46.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
383
+ "model.layers.46.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
384
+ "model.layers.46.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
385
+ "model.layers.46.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
386
+ "model.layers.47.input_layernorm.weight": "model-00005-of-00005.safetensors",
387
+ "model.layers.47.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
388
+ "model.layers.47.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
389
+ "model.layers.47.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
390
+ "model.layers.47.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
391
+ "model.layers.47.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
392
+ "model.layers.47.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
393
+ "model.layers.47.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
394
+ "model.layers.47.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
395
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00005.safetensors",
396
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
397
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
398
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
399
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
400
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
401
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
402
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
403
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
404
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00005.safetensors",
405
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
406
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
407
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
408
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
409
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
410
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
411
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
412
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
413
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00005.safetensors",
414
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
415
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
416
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
417
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
418
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
419
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
420
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
421
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
422
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00005.safetensors",
423
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
424
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
425
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
426
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
427
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
428
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
429
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
430
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
431
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00005.safetensors",
432
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
433
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
434
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
435
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
436
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
437
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
438
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
439
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
440
+ "model.norm.weight": "model-00005-of-00005.safetensors"
441
+ }
442
+ }
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f0ac66640a4707dbadff443d3e179e68b78039bc3349dece5d791b297ffd59d1
3
+ size 1480592220
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:386fcc8cc1089aade9450d86fb239ea3483f455fd2d78d8378645feecfec9d69
3
+ size 14244
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da9ab3b1cd0eedf96c680680bd28b90c2100079af79744dc3cfc9e2f8b824c6f
3
+ size 1064
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "</s>",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe1f1df46f5bbbe47211af84f7ec05e86f38a4e5651f5dc99de7d63e2b636b64
3
+ size 750737
tokenizer_config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": true,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ }
30
+ },
31
+ "bos_token": "<s>",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
+ "legacy": true,
35
+ "model_max_length": 4096,
36
+ "pad_token": "</s>",
37
+ "padding_side": "left",
38
+ "sp_model_kwargs": {},
39
+ "spaces_between_special_tokens": false,
40
+ "tokenizer_class": "LlamaTokenizer",
41
+ "unk_token": "<unk>",
42
+ "use_default_system_prompt": false
43
+ }
trainer_state.json ADDED
@@ -0,0 +1,2891 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 0.401616943554929,
5
+ "eval_steps": 2,
6
+ "global_step": 59040,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.0,
13
+ "grad_norm": 17.5,
14
+ "learning_rate": 3.264565858082068e-06,
15
+ "loss": 9.7328,
16
+ "step": 144
17
+ },
18
+ {
19
+ "epoch": 0.0,
20
+ "grad_norm": 19.75,
21
+ "learning_rate": 6.529131716164136e-06,
22
+ "loss": 8.8374,
23
+ "step": 288
24
+ },
25
+ {
26
+ "epoch": 0.0,
27
+ "grad_norm": 4.5625,
28
+ "learning_rate": 9.793697574246204e-06,
29
+ "loss": 7.1244,
30
+ "step": 432
31
+ },
32
+ {
33
+ "epoch": 0.0,
34
+ "grad_norm": 3.609375,
35
+ "learning_rate": 1.3058263432328271e-05,
36
+ "loss": 6.2564,
37
+ "step": 576
38
+ },
39
+ {
40
+ "epoch": 0.0,
41
+ "grad_norm": 2.59375,
42
+ "learning_rate": 1.6322829290410337e-05,
43
+ "loss": 5.7834,
44
+ "step": 720
45
+ },
46
+ {
47
+ "epoch": 0.01,
48
+ "grad_norm": 2.859375,
49
+ "learning_rate": 1.958739514849241e-05,
50
+ "loss": 5.3786,
51
+ "step": 864
52
+ },
53
+ {
54
+ "epoch": 0.01,
55
+ "grad_norm": 2.671875,
56
+ "learning_rate": 2.2851961006574474e-05,
57
+ "loss": 4.8869,
58
+ "step": 1008
59
+ },
60
+ {
61
+ "epoch": 0.01,
62
+ "grad_norm": 3.265625,
63
+ "learning_rate": 2.6116526864656543e-05,
64
+ "loss": 4.4858,
65
+ "step": 1152
66
+ },
67
+ {
68
+ "epoch": 0.01,
69
+ "grad_norm": 2.703125,
70
+ "learning_rate": 2.9381092722738608e-05,
71
+ "loss": 4.1009,
72
+ "step": 1296
73
+ },
74
+ {
75
+ "epoch": 0.01,
76
+ "grad_norm": 2.546875,
77
+ "learning_rate": 3.264565858082067e-05,
78
+ "loss": 3.7652,
79
+ "step": 1440
80
+ },
81
+ {
82
+ "epoch": 0.01,
83
+ "grad_norm": 2.65625,
84
+ "learning_rate": 3.5910224438902745e-05,
85
+ "loss": 3.4421,
86
+ "step": 1584
87
+ },
88
+ {
89
+ "epoch": 0.01,
90
+ "grad_norm": 3.09375,
91
+ "learning_rate": 3.917479029698482e-05,
92
+ "loss": 3.2573,
93
+ "step": 1728
94
+ },
95
+ {
96
+ "epoch": 0.01,
97
+ "grad_norm": 2.09375,
98
+ "learning_rate": 4.2439356155066876e-05,
99
+ "loss": 3.0644,
100
+ "step": 1872
101
+ },
102
+ {
103
+ "epoch": 0.01,
104
+ "grad_norm": 3.609375,
105
+ "learning_rate": 4.570392201314895e-05,
106
+ "loss": 2.9334,
107
+ "step": 2016
108
+ },
109
+ {
110
+ "epoch": 0.01,
111
+ "grad_norm": 2.125,
112
+ "learning_rate": 4.896848787123102e-05,
113
+ "loss": 2.8316,
114
+ "step": 2160
115
+ },
116
+ {
117
+ "epoch": 0.02,
118
+ "grad_norm": 2.109375,
119
+ "learning_rate": 5.2233053729313085e-05,
120
+ "loss": 2.7194,
121
+ "step": 2304
122
+ },
123
+ {
124
+ "epoch": 0.02,
125
+ "grad_norm": 2.046875,
126
+ "learning_rate": 5.549761958739516e-05,
127
+ "loss": 2.6336,
128
+ "step": 2448
129
+ },
130
+ {
131
+ "epoch": 0.02,
132
+ "grad_norm": 1.8125,
133
+ "learning_rate": 5.8762185445477216e-05,
134
+ "loss": 2.5866,
135
+ "step": 2592
136
+ },
137
+ {
138
+ "epoch": 0.02,
139
+ "grad_norm": 2.0625,
140
+ "learning_rate": 6.202675130355929e-05,
141
+ "loss": 2.5192,
142
+ "step": 2736
143
+ },
144
+ {
145
+ "epoch": 0.02,
146
+ "grad_norm": 6.15625,
147
+ "learning_rate": 6.529131716164135e-05,
148
+ "loss": 2.4692,
149
+ "step": 2880
150
+ },
151
+ {
152
+ "epoch": 0.02,
153
+ "grad_norm": 1.6328125,
154
+ "learning_rate": 6.855588301972342e-05,
155
+ "loss": 2.4297,
156
+ "step": 3024
157
+ },
158
+ {
159
+ "epoch": 0.02,
160
+ "grad_norm": 2.140625,
161
+ "learning_rate": 7.182044887780549e-05,
162
+ "loss": 2.4255,
163
+ "step": 3168
164
+ },
165
+ {
166
+ "epoch": 0.02,
167
+ "grad_norm": 2.09375,
168
+ "learning_rate": 7.508501473588756e-05,
169
+ "loss": 2.3646,
170
+ "step": 3312
171
+ },
172
+ {
173
+ "epoch": 0.02,
174
+ "grad_norm": 1.6328125,
175
+ "learning_rate": 7.834958059396963e-05,
176
+ "loss": 2.3059,
177
+ "step": 3456
178
+ },
179
+ {
180
+ "epoch": 0.02,
181
+ "grad_norm": 1.7578125,
182
+ "learning_rate": 8.161414645205169e-05,
183
+ "loss": 2.2926,
184
+ "step": 3600
185
+ },
186
+ {
187
+ "epoch": 0.03,
188
+ "grad_norm": 1.6171875,
189
+ "learning_rate": 8.487871231013375e-05,
190
+ "loss": 2.2704,
191
+ "step": 3744
192
+ },
193
+ {
194
+ "epoch": 0.03,
195
+ "grad_norm": 1.7578125,
196
+ "learning_rate": 8.814327816821582e-05,
197
+ "loss": 2.251,
198
+ "step": 3888
199
+ },
200
+ {
201
+ "epoch": 0.03,
202
+ "grad_norm": 2.078125,
203
+ "learning_rate": 9.14078440262979e-05,
204
+ "loss": 2.2393,
205
+ "step": 4032
206
+ },
207
+ {
208
+ "epoch": 0.03,
209
+ "grad_norm": 1.5859375,
210
+ "learning_rate": 9.467240988437997e-05,
211
+ "loss": 2.2282,
212
+ "step": 4176
213
+ },
214
+ {
215
+ "epoch": 0.03,
216
+ "grad_norm": 1.625,
217
+ "learning_rate": 9.793697574246204e-05,
218
+ "loss": 2.2133,
219
+ "step": 4320
220
+ },
221
+ {
222
+ "epoch": 0.03,
223
+ "grad_norm": 1.6484375,
224
+ "learning_rate": 9.999996591298748e-05,
225
+ "loss": 2.1821,
226
+ "step": 4464
227
+ },
228
+ {
229
+ "epoch": 0.03,
230
+ "grad_norm": 1.8203125,
231
+ "learning_rate": 9.99995290562682e-05,
232
+ "loss": 2.1851,
233
+ "step": 4608
234
+ },
235
+ {
236
+ "epoch": 0.03,
237
+ "grad_norm": 1.5703125,
238
+ "learning_rate": 9.999858894492333e-05,
239
+ "loss": 2.1736,
240
+ "step": 4752
241
+ },
242
+ {
243
+ "epoch": 0.03,
244
+ "grad_norm": 1.4296875,
245
+ "learning_rate": 9.99971455884153e-05,
246
+ "loss": 2.1474,
247
+ "step": 4896
248
+ },
249
+ {
250
+ "epoch": 0.03,
251
+ "grad_norm": 1.515625,
252
+ "learning_rate": 9.999519900127174e-05,
253
+ "loss": 2.144,
254
+ "step": 5040
255
+ },
256
+ {
257
+ "epoch": 0.04,
258
+ "grad_norm": 1.5,
259
+ "learning_rate": 9.999274920308544e-05,
260
+ "loss": 2.1284,
261
+ "step": 5184
262
+ },
263
+ {
264
+ "epoch": 0.04,
265
+ "grad_norm": 1.46875,
266
+ "learning_rate": 9.998979621851405e-05,
267
+ "loss": 2.1219,
268
+ "step": 5328
269
+ },
270
+ {
271
+ "epoch": 0.04,
272
+ "grad_norm": 1.28125,
273
+ "learning_rate": 9.998634007727992e-05,
274
+ "loss": 2.1004,
275
+ "step": 5472
276
+ },
277
+ {
278
+ "epoch": 0.04,
279
+ "grad_norm": 3.625,
280
+ "learning_rate": 9.998238081416977e-05,
281
+ "loss": 2.1148,
282
+ "step": 5616
283
+ },
284
+ {
285
+ "epoch": 0.04,
286
+ "grad_norm": 1.421875,
287
+ "learning_rate": 9.99779184690343e-05,
288
+ "loss": 2.119,
289
+ "step": 5760
290
+ },
291
+ {
292
+ "epoch": 0.04,
293
+ "grad_norm": 1.5859375,
294
+ "learning_rate": 9.997295308678789e-05,
295
+ "loss": 2.1017,
296
+ "step": 5904
297
+ },
298
+ {
299
+ "epoch": 0.04,
300
+ "grad_norm": 1.359375,
301
+ "learning_rate": 9.996748471740802e-05,
302
+ "loss": 2.098,
303
+ "step": 6048
304
+ },
305
+ {
306
+ "epoch": 0.04,
307
+ "grad_norm": 1.5234375,
308
+ "learning_rate": 9.996151341593482e-05,
309
+ "loss": 2.0888,
310
+ "step": 6192
311
+ },
312
+ {
313
+ "epoch": 0.04,
314
+ "grad_norm": 1.59375,
315
+ "learning_rate": 9.995503924247063e-05,
316
+ "loss": 2.0885,
317
+ "step": 6336
318
+ },
319
+ {
320
+ "epoch": 0.04,
321
+ "grad_norm": 1.4140625,
322
+ "learning_rate": 9.994806226217915e-05,
323
+ "loss": 2.0654,
324
+ "step": 6480
325
+ },
326
+ {
327
+ "epoch": 0.05,
328
+ "grad_norm": 1.3359375,
329
+ "learning_rate": 9.994058254528505e-05,
330
+ "loss": 2.0888,
331
+ "step": 6624
332
+ },
333
+ {
334
+ "epoch": 0.05,
335
+ "grad_norm": 1.3671875,
336
+ "learning_rate": 9.993260016707303e-05,
337
+ "loss": 2.0717,
338
+ "step": 6768
339
+ },
340
+ {
341
+ "epoch": 0.05,
342
+ "grad_norm": 1.4765625,
343
+ "learning_rate": 9.992411520788724e-05,
344
+ "loss": 2.0513,
345
+ "step": 6912
346
+ },
347
+ {
348
+ "epoch": 0.05,
349
+ "grad_norm": 1.34375,
350
+ "learning_rate": 9.99151277531304e-05,
351
+ "loss": 2.0695,
352
+ "step": 7056
353
+ },
354
+ {
355
+ "epoch": 0.05,
356
+ "grad_norm": 1.5546875,
357
+ "learning_rate": 9.990563789326291e-05,
358
+ "loss": 2.0612,
359
+ "step": 7200
360
+ },
361
+ {
362
+ "epoch": 0.05,
363
+ "grad_norm": 3.390625,
364
+ "learning_rate": 9.989564572380199e-05,
365
+ "loss": 2.0577,
366
+ "step": 7344
367
+ },
368
+ {
369
+ "epoch": 0.05,
370
+ "grad_norm": 1.6796875,
371
+ "learning_rate": 9.98851513453207e-05,
372
+ "loss": 2.0453,
373
+ "step": 7488
374
+ },
375
+ {
376
+ "epoch": 0.05,
377
+ "grad_norm": 1.5625,
378
+ "learning_rate": 9.987415486344694e-05,
379
+ "loss": 2.0381,
380
+ "step": 7632
381
+ },
382
+ {
383
+ "epoch": 0.05,
384
+ "grad_norm": 1.453125,
385
+ "learning_rate": 9.986265638886232e-05,
386
+ "loss": 2.0528,
387
+ "step": 7776
388
+ },
389
+ {
390
+ "epoch": 0.05,
391
+ "grad_norm": 1.8515625,
392
+ "learning_rate": 9.985065603730119e-05,
393
+ "loss": 2.0414,
394
+ "step": 7920
395
+ },
396
+ {
397
+ "epoch": 0.05,
398
+ "grad_norm": 3.296875,
399
+ "learning_rate": 9.98381539295493e-05,
400
+ "loss": 2.0388,
401
+ "step": 8064
402
+ },
403
+ {
404
+ "epoch": 0.06,
405
+ "grad_norm": 1.3828125,
406
+ "learning_rate": 9.98251501914427e-05,
407
+ "loss": 2.0096,
408
+ "step": 8208
409
+ },
410
+ {
411
+ "epoch": 0.06,
412
+ "grad_norm": 1.3984375,
413
+ "learning_rate": 9.981164495386647e-05,
414
+ "loss": 2.023,
415
+ "step": 8352
416
+ },
417
+ {
418
+ "epoch": 0.06,
419
+ "grad_norm": 1.1796875,
420
+ "learning_rate": 9.979763835275338e-05,
421
+ "loss": 2.0058,
422
+ "step": 8496
423
+ },
424
+ {
425
+ "epoch": 0.06,
426
+ "grad_norm": 1.375,
427
+ "learning_rate": 9.978313052908243e-05,
428
+ "loss": 2.0333,
429
+ "step": 8640
430
+ },
431
+ {
432
+ "epoch": 0.06,
433
+ "grad_norm": 1.34375,
434
+ "learning_rate": 9.976812162887765e-05,
435
+ "loss": 2.0387,
436
+ "step": 8784
437
+ },
438
+ {
439
+ "epoch": 0.06,
440
+ "grad_norm": 1.40625,
441
+ "learning_rate": 9.975261180320638e-05,
442
+ "loss": 2.0307,
443
+ "step": 8928
444
+ },
445
+ {
446
+ "epoch": 0.06,
447
+ "grad_norm": 1.484375,
448
+ "learning_rate": 9.973660120817796e-05,
449
+ "loss": 2.0153,
450
+ "step": 9072
451
+ },
452
+ {
453
+ "epoch": 0.06,
454
+ "grad_norm": 1.3203125,
455
+ "learning_rate": 9.9720090004942e-05,
456
+ "loss": 2.0324,
457
+ "step": 9216
458
+ },
459
+ {
460
+ "epoch": 0.06,
461
+ "grad_norm": 1.2890625,
462
+ "learning_rate": 9.970307835968687e-05,
463
+ "loss": 2.0118,
464
+ "step": 9360
465
+ },
466
+ {
467
+ "epoch": 0.06,
468
+ "grad_norm": 1.4375,
469
+ "learning_rate": 9.968556644363793e-05,
470
+ "loss": 2.0003,
471
+ "step": 9504
472
+ },
473
+ {
474
+ "epoch": 0.07,
475
+ "grad_norm": 1.1875,
476
+ "learning_rate": 9.966755443305595e-05,
477
+ "loss": 2.0019,
478
+ "step": 9648
479
+ },
480
+ {
481
+ "epoch": 0.07,
482
+ "grad_norm": 1.1796875,
483
+ "learning_rate": 9.964904250923517e-05,
484
+ "loss": 2.0126,
485
+ "step": 9792
486
+ },
487
+ {
488
+ "epoch": 0.07,
489
+ "grad_norm": 1.265625,
490
+ "learning_rate": 9.963003085850155e-05,
491
+ "loss": 1.9971,
492
+ "step": 9936
493
+ },
494
+ {
495
+ "epoch": 0.07,
496
+ "grad_norm": 1.1953125,
497
+ "learning_rate": 9.961051967221096e-05,
498
+ "loss": 1.9832,
499
+ "step": 10080
500
+ },
501
+ {
502
+ "epoch": 0.07,
503
+ "grad_norm": 1.1875,
504
+ "learning_rate": 9.959050914674708e-05,
505
+ "loss": 1.9898,
506
+ "step": 10224
507
+ },
508
+ {
509
+ "epoch": 0.07,
510
+ "grad_norm": 1.3125,
511
+ "learning_rate": 9.956999948351967e-05,
512
+ "loss": 1.9954,
513
+ "step": 10368
514
+ },
515
+ {
516
+ "epoch": 0.07,
517
+ "grad_norm": 1.4140625,
518
+ "learning_rate": 9.954899088896226e-05,
519
+ "loss": 1.9928,
520
+ "step": 10512
521
+ },
522
+ {
523
+ "epoch": 0.07,
524
+ "grad_norm": 1.2265625,
525
+ "learning_rate": 9.952748357453034e-05,
526
+ "loss": 1.9925,
527
+ "step": 10656
528
+ },
529
+ {
530
+ "epoch": 0.07,
531
+ "grad_norm": 1.78125,
532
+ "learning_rate": 9.950547775669903e-05,
533
+ "loss": 1.9898,
534
+ "step": 10800
535
+ },
536
+ {
537
+ "epoch": 0.07,
538
+ "grad_norm": 1.28125,
539
+ "learning_rate": 9.948297365696104e-05,
540
+ "loss": 2.0034,
541
+ "step": 10944
542
+ },
543
+ {
544
+ "epoch": 0.08,
545
+ "grad_norm": 1.265625,
546
+ "learning_rate": 9.945997150182432e-05,
547
+ "loss": 2.0083,
548
+ "step": 11088
549
+ },
550
+ {
551
+ "epoch": 0.08,
552
+ "grad_norm": 1.234375,
553
+ "learning_rate": 9.943647152280988e-05,
554
+ "loss": 1.9859,
555
+ "step": 11232
556
+ },
557
+ {
558
+ "epoch": 0.08,
559
+ "grad_norm": 1.2265625,
560
+ "learning_rate": 9.941247395644942e-05,
561
+ "loss": 1.9656,
562
+ "step": 11376
563
+ },
564
+ {
565
+ "epoch": 0.08,
566
+ "grad_norm": 1.671875,
567
+ "learning_rate": 9.938797904428294e-05,
568
+ "loss": 1.9793,
569
+ "step": 11520
570
+ },
571
+ {
572
+ "epoch": 0.08,
573
+ "grad_norm": 1.15625,
574
+ "learning_rate": 9.936298703285629e-05,
575
+ "loss": 1.9857,
576
+ "step": 11664
577
+ },
578
+ {
579
+ "epoch": 0.08,
580
+ "grad_norm": 1.2421875,
581
+ "learning_rate": 9.933749817371878e-05,
582
+ "loss": 1.9909,
583
+ "step": 11808
584
+ },
585
+ {
586
+ "epoch": 0.08,
587
+ "grad_norm": 1.515625,
588
+ "learning_rate": 9.931151272342054e-05,
589
+ "loss": 1.9626,
590
+ "step": 11952
591
+ },
592
+ {
593
+ "epoch": 0.08,
594
+ "grad_norm": 1.2109375,
595
+ "learning_rate": 9.928503094351e-05,
596
+ "loss": 1.9937,
597
+ "step": 12096
598
+ },
599
+ {
600
+ "epoch": 0.08,
601
+ "grad_norm": 1.1640625,
602
+ "learning_rate": 9.92580531005312e-05,
603
+ "loss": 1.9876,
604
+ "step": 12240
605
+ },
606
+ {
607
+ "epoch": 0.08,
608
+ "grad_norm": 1.53125,
609
+ "learning_rate": 9.923057946602125e-05,
610
+ "loss": 1.9967,
611
+ "step": 12384
612
+ },
613
+ {
614
+ "epoch": 0.09,
615
+ "grad_norm": 1.265625,
616
+ "learning_rate": 9.920261031650738e-05,
617
+ "loss": 1.9712,
618
+ "step": 12528
619
+ },
620
+ {
621
+ "epoch": 0.09,
622
+ "grad_norm": 2.0,
623
+ "learning_rate": 9.917414593350432e-05,
624
+ "loss": 1.9782,
625
+ "step": 12672
626
+ },
627
+ {
628
+ "epoch": 0.09,
629
+ "grad_norm": 1.28125,
630
+ "learning_rate": 9.914518660351142e-05,
631
+ "loss": 1.9822,
632
+ "step": 12816
633
+ },
634
+ {
635
+ "epoch": 0.09,
636
+ "grad_norm": 1.3671875,
637
+ "learning_rate": 9.911573261800977e-05,
638
+ "loss": 1.9826,
639
+ "step": 12960
640
+ },
641
+ {
642
+ "epoch": 0.09,
643
+ "grad_norm": 2.015625,
644
+ "learning_rate": 9.908578427345925e-05,
645
+ "loss": 1.9701,
646
+ "step": 13104
647
+ },
648
+ {
649
+ "epoch": 0.09,
650
+ "grad_norm": 6.625,
651
+ "learning_rate": 9.905534187129555e-05,
652
+ "loss": 1.9822,
653
+ "step": 13248
654
+ },
655
+ {
656
+ "epoch": 0.09,
657
+ "grad_norm": 1.21875,
658
+ "learning_rate": 9.902440571792714e-05,
659
+ "loss": 1.9728,
660
+ "step": 13392
661
+ },
662
+ {
663
+ "epoch": 0.09,
664
+ "grad_norm": 1.140625,
665
+ "learning_rate": 9.899297612473225e-05,
666
+ "loss": 1.968,
667
+ "step": 13536
668
+ },
669
+ {
670
+ "epoch": 0.09,
671
+ "grad_norm": 1.1953125,
672
+ "learning_rate": 9.896105340805557e-05,
673
+ "loss": 1.9723,
674
+ "step": 13680
675
+ },
676
+ {
677
+ "epoch": 0.09,
678
+ "grad_norm": 1.203125,
679
+ "learning_rate": 9.892863788920523e-05,
680
+ "loss": 1.9676,
681
+ "step": 13824
682
+ },
683
+ {
684
+ "epoch": 0.1,
685
+ "grad_norm": 1.125,
686
+ "learning_rate": 9.889572989444953e-05,
687
+ "loss": 1.9499,
688
+ "step": 13968
689
+ },
690
+ {
691
+ "epoch": 0.1,
692
+ "grad_norm": 1.3359375,
693
+ "learning_rate": 9.886232975501355e-05,
694
+ "loss": 1.985,
695
+ "step": 14112
696
+ },
697
+ {
698
+ "epoch": 0.1,
699
+ "grad_norm": 1.5625,
700
+ "learning_rate": 9.8828437807076e-05,
701
+ "loss": 1.9802,
702
+ "step": 14256
703
+ },
704
+ {
705
+ "epoch": 0.1,
706
+ "grad_norm": 1.171875,
707
+ "learning_rate": 9.879405439176566e-05,
708
+ "loss": 1.9356,
709
+ "step": 14400
710
+ },
711
+ {
712
+ "epoch": 0.1,
713
+ "grad_norm": 1.7109375,
714
+ "learning_rate": 9.875917985515805e-05,
715
+ "loss": 1.9518,
716
+ "step": 14544
717
+ },
718
+ {
719
+ "epoch": 0.1,
720
+ "grad_norm": 1.3359375,
721
+ "learning_rate": 9.872381454827191e-05,
722
+ "loss": 1.9851,
723
+ "step": 14688
724
+ },
725
+ {
726
+ "epoch": 0.1,
727
+ "grad_norm": 1.203125,
728
+ "learning_rate": 9.868795882706569e-05,
729
+ "loss": 1.9658,
730
+ "step": 14832
731
+ },
732
+ {
733
+ "epoch": 0.1,
734
+ "grad_norm": 1.8828125,
735
+ "learning_rate": 9.86516130524339e-05,
736
+ "loss": 1.9648,
737
+ "step": 14976
738
+ },
739
+ {
740
+ "epoch": 0.1,
741
+ "grad_norm": 1.3671875,
742
+ "learning_rate": 9.861477759020363e-05,
743
+ "loss": 1.9852,
744
+ "step": 15120
745
+ },
746
+ {
747
+ "epoch": 0.1,
748
+ "grad_norm": 1.140625,
749
+ "learning_rate": 9.857745281113069e-05,
750
+ "loss": 1.9768,
751
+ "step": 15264
752
+ },
753
+ {
754
+ "epoch": 0.1,
755
+ "grad_norm": 1.265625,
756
+ "learning_rate": 9.853963909089594e-05,
757
+ "loss": 1.958,
758
+ "step": 15408
759
+ },
760
+ {
761
+ "epoch": 0.11,
762
+ "grad_norm": 1.453125,
763
+ "learning_rate": 9.850133681010159e-05,
764
+ "loss": 1.9697,
765
+ "step": 15552
766
+ },
767
+ {
768
+ "epoch": 0.11,
769
+ "grad_norm": 1.1484375,
770
+ "learning_rate": 9.846254635426724e-05,
771
+ "loss": 1.976,
772
+ "step": 15696
773
+ },
774
+ {
775
+ "epoch": 0.11,
776
+ "grad_norm": 1.34375,
777
+ "learning_rate": 9.842326811382612e-05,
778
+ "loss": 1.9644,
779
+ "step": 15840
780
+ },
781
+ {
782
+ "epoch": 0.11,
783
+ "grad_norm": 1.2578125,
784
+ "learning_rate": 9.838350248412107e-05,
785
+ "loss": 1.9412,
786
+ "step": 15984
787
+ },
788
+ {
789
+ "epoch": 0.11,
790
+ "grad_norm": 1.140625,
791
+ "learning_rate": 9.83432498654006e-05,
792
+ "loss": 1.9672,
793
+ "step": 16128
794
+ },
795
+ {
796
+ "epoch": 0.11,
797
+ "grad_norm": 1.1484375,
798
+ "learning_rate": 9.830251066281489e-05,
799
+ "loss": 1.9348,
800
+ "step": 16272
801
+ },
802
+ {
803
+ "epoch": 0.11,
804
+ "grad_norm": 1.1875,
805
+ "learning_rate": 9.826128528641157e-05,
806
+ "loss": 1.9759,
807
+ "step": 16416
808
+ },
809
+ {
810
+ "epoch": 0.11,
811
+ "grad_norm": 1.2109375,
812
+ "learning_rate": 9.821957415113186e-05,
813
+ "loss": 1.9671,
814
+ "step": 16560
815
+ },
816
+ {
817
+ "epoch": 0.11,
818
+ "grad_norm": 1.1015625,
819
+ "learning_rate": 9.817737767680608e-05,
820
+ "loss": 1.9524,
821
+ "step": 16704
822
+ },
823
+ {
824
+ "epoch": 0.11,
825
+ "grad_norm": 2.640625,
826
+ "learning_rate": 9.813469628814969e-05,
827
+ "loss": 1.9456,
828
+ "step": 16848
829
+ },
830
+ {
831
+ "epoch": 0.12,
832
+ "grad_norm": 1.1640625,
833
+ "learning_rate": 9.809153041475883e-05,
834
+ "loss": 1.9576,
835
+ "step": 16992
836
+ },
837
+ {
838
+ "epoch": 0.12,
839
+ "grad_norm": 1.3046875,
840
+ "learning_rate": 9.804788049110615e-05,
841
+ "loss": 1.9802,
842
+ "step": 17136
843
+ },
844
+ {
845
+ "epoch": 0.12,
846
+ "grad_norm": 1.3828125,
847
+ "learning_rate": 9.800374695653627e-05,
848
+ "loss": 1.9562,
849
+ "step": 17280
850
+ },
851
+ {
852
+ "epoch": 0.12,
853
+ "grad_norm": 1.15625,
854
+ "learning_rate": 9.795913025526148e-05,
855
+ "loss": 1.9342,
856
+ "step": 17424
857
+ },
858
+ {
859
+ "epoch": 0.12,
860
+ "grad_norm": 1.265625,
861
+ "learning_rate": 9.791403083635726e-05,
862
+ "loss": 1.9553,
863
+ "step": 17568
864
+ },
865
+ {
866
+ "epoch": 0.12,
867
+ "grad_norm": 1.1484375,
868
+ "learning_rate": 9.786844915375768e-05,
869
+ "loss": 1.9813,
870
+ "step": 17712
871
+ },
872
+ {
873
+ "epoch": 0.12,
874
+ "grad_norm": 1.375,
875
+ "learning_rate": 9.782238566625094e-05,
876
+ "loss": 1.9396,
877
+ "step": 17856
878
+ },
879
+ {
880
+ "epoch": 0.12,
881
+ "grad_norm": 2.5625,
882
+ "learning_rate": 9.777584083747466e-05,
883
+ "loss": 1.9618,
884
+ "step": 18000
885
+ },
886
+ {
887
+ "epoch": 0.12,
888
+ "grad_norm": 1.5234375,
889
+ "learning_rate": 9.772881513591124e-05,
890
+ "loss": 1.9451,
891
+ "step": 18144
892
+ },
893
+ {
894
+ "epoch": 0.12,
895
+ "grad_norm": 2.171875,
896
+ "learning_rate": 9.768130903488322e-05,
897
+ "loss": 1.9672,
898
+ "step": 18288
899
+ },
900
+ {
901
+ "epoch": 0.13,
902
+ "grad_norm": 1.140625,
903
+ "learning_rate": 9.763332301254835e-05,
904
+ "loss": 1.9764,
905
+ "step": 18432
906
+ },
907
+ {
908
+ "epoch": 0.13,
909
+ "grad_norm": 1.3203125,
910
+ "learning_rate": 9.7584857551895e-05,
911
+ "loss": 1.9538,
912
+ "step": 18576
913
+ },
914
+ {
915
+ "epoch": 0.13,
916
+ "grad_norm": 1.171875,
917
+ "learning_rate": 9.753591314073705e-05,
918
+ "loss": 1.9473,
919
+ "step": 18720
920
+ },
921
+ {
922
+ "epoch": 0.13,
923
+ "grad_norm": 1.078125,
924
+ "learning_rate": 9.748649027170918e-05,
925
+ "loss": 1.9756,
926
+ "step": 18864
927
+ },
928
+ {
929
+ "epoch": 0.13,
930
+ "grad_norm": 1.078125,
931
+ "learning_rate": 9.743658944226185e-05,
932
+ "loss": 1.9427,
933
+ "step": 19008
934
+ },
935
+ {
936
+ "epoch": 0.13,
937
+ "grad_norm": 1.203125,
938
+ "learning_rate": 9.738621115465624e-05,
939
+ "loss": 1.9448,
940
+ "step": 19152
941
+ },
942
+ {
943
+ "epoch": 0.13,
944
+ "grad_norm": 1.1328125,
945
+ "learning_rate": 9.733535591595924e-05,
946
+ "loss": 1.9604,
947
+ "step": 19296
948
+ },
949
+ {
950
+ "epoch": 0.13,
951
+ "grad_norm": 1.09375,
952
+ "learning_rate": 9.728402423803836e-05,
953
+ "loss": 1.9488,
954
+ "step": 19440
955
+ },
956
+ {
957
+ "epoch": 0.13,
958
+ "grad_norm": 1.140625,
959
+ "learning_rate": 9.723221663755657e-05,
960
+ "loss": 1.9586,
961
+ "step": 19584
962
+ },
963
+ {
964
+ "epoch": 0.13,
965
+ "grad_norm": 1.171875,
966
+ "learning_rate": 9.717993363596705e-05,
967
+ "loss": 1.9509,
968
+ "step": 19728
969
+ },
970
+ {
971
+ "epoch": 0.14,
972
+ "grad_norm": 1.375,
973
+ "learning_rate": 9.712717575950802e-05,
974
+ "loss": 1.9435,
975
+ "step": 19872
976
+ },
977
+ {
978
+ "epoch": 0.14,
979
+ "grad_norm": 1.1796875,
980
+ "learning_rate": 9.707394353919737e-05,
981
+ "loss": 1.9581,
982
+ "step": 20016
983
+ },
984
+ {
985
+ "epoch": 0.14,
986
+ "grad_norm": 1.421875,
987
+ "learning_rate": 9.70202375108274e-05,
988
+ "loss": 1.9395,
989
+ "step": 20160
990
+ },
991
+ {
992
+ "epoch": 0.14,
993
+ "grad_norm": 1.1640625,
994
+ "learning_rate": 9.696605821495932e-05,
995
+ "loss": 1.9643,
996
+ "step": 20304
997
+ },
998
+ {
999
+ "epoch": 0.14,
1000
+ "grad_norm": 1.2421875,
1001
+ "learning_rate": 9.691140619691789e-05,
1002
+ "loss": 1.9318,
1003
+ "step": 20448
1004
+ },
1005
+ {
1006
+ "epoch": 0.14,
1007
+ "grad_norm": 1.40625,
1008
+ "learning_rate": 9.685628200678592e-05,
1009
+ "loss": 1.9463,
1010
+ "step": 20592
1011
+ },
1012
+ {
1013
+ "epoch": 0.14,
1014
+ "grad_norm": 1.28125,
1015
+ "learning_rate": 9.680068619939872e-05,
1016
+ "loss": 1.9573,
1017
+ "step": 20736
1018
+ },
1019
+ {
1020
+ "epoch": 0.14,
1021
+ "grad_norm": 1.2265625,
1022
+ "learning_rate": 9.674461933433849e-05,
1023
+ "loss": 1.9454,
1024
+ "step": 20880
1025
+ },
1026
+ {
1027
+ "epoch": 0.14,
1028
+ "grad_norm": 1.0703125,
1029
+ "learning_rate": 9.668808197592871e-05,
1030
+ "loss": 1.9413,
1031
+ "step": 21024
1032
+ },
1033
+ {
1034
+ "epoch": 0.14,
1035
+ "grad_norm": 1.1640625,
1036
+ "learning_rate": 9.663107469322852e-05,
1037
+ "loss": 1.9387,
1038
+ "step": 21168
1039
+ },
1040
+ {
1041
+ "epoch": 0.14,
1042
+ "grad_norm": 1.3828125,
1043
+ "learning_rate": 9.657359806002689e-05,
1044
+ "loss": 1.9389,
1045
+ "step": 21312
1046
+ },
1047
+ {
1048
+ "epoch": 0.15,
1049
+ "grad_norm": 1.6015625,
1050
+ "learning_rate": 9.651565265483687e-05,
1051
+ "loss": 1.9477,
1052
+ "step": 21456
1053
+ },
1054
+ {
1055
+ "epoch": 0.15,
1056
+ "grad_norm": 1.1328125,
1057
+ "learning_rate": 9.645723906088983e-05,
1058
+ "loss": 1.9358,
1059
+ "step": 21600
1060
+ },
1061
+ {
1062
+ "epoch": 0.15,
1063
+ "grad_norm": 1.25,
1064
+ "learning_rate": 9.639835786612957e-05,
1065
+ "loss": 1.9403,
1066
+ "step": 21744
1067
+ },
1068
+ {
1069
+ "epoch": 0.15,
1070
+ "grad_norm": 1.125,
1071
+ "learning_rate": 9.633900966320631e-05,
1072
+ "loss": 1.9274,
1073
+ "step": 21888
1074
+ },
1075
+ {
1076
+ "epoch": 0.15,
1077
+ "grad_norm": 1.1640625,
1078
+ "learning_rate": 9.627919504947084e-05,
1079
+ "loss": 1.9236,
1080
+ "step": 22032
1081
+ },
1082
+ {
1083
+ "epoch": 0.15,
1084
+ "grad_norm": 1.0703125,
1085
+ "learning_rate": 9.621891462696843e-05,
1086
+ "loss": 1.947,
1087
+ "step": 22176
1088
+ },
1089
+ {
1090
+ "epoch": 0.15,
1091
+ "grad_norm": 1.2265625,
1092
+ "learning_rate": 9.615816900243287e-05,
1093
+ "loss": 1.9404,
1094
+ "step": 22320
1095
+ },
1096
+ {
1097
+ "epoch": 0.15,
1098
+ "grad_norm": 1.5390625,
1099
+ "learning_rate": 9.60969587872802e-05,
1100
+ "loss": 1.9458,
1101
+ "step": 22464
1102
+ },
1103
+ {
1104
+ "epoch": 0.15,
1105
+ "grad_norm": 1.1640625,
1106
+ "learning_rate": 9.603528459760274e-05,
1107
+ "loss": 1.9203,
1108
+ "step": 22608
1109
+ },
1110
+ {
1111
+ "epoch": 0.15,
1112
+ "grad_norm": 1.1328125,
1113
+ "learning_rate": 9.597314705416274e-05,
1114
+ "loss": 1.943,
1115
+ "step": 22752
1116
+ },
1117
+ {
1118
+ "epoch": 0.16,
1119
+ "grad_norm": 1.109375,
1120
+ "learning_rate": 9.59105467823862e-05,
1121
+ "loss": 1.9428,
1122
+ "step": 22896
1123
+ },
1124
+ {
1125
+ "epoch": 0.16,
1126
+ "grad_norm": 1.0859375,
1127
+ "learning_rate": 9.584748441235663e-05,
1128
+ "loss": 1.9281,
1129
+ "step": 23040
1130
+ },
1131
+ {
1132
+ "epoch": 0.16,
1133
+ "grad_norm": 4.21875,
1134
+ "learning_rate": 9.578396057880854e-05,
1135
+ "loss": 1.9449,
1136
+ "step": 23184
1137
+ },
1138
+ {
1139
+ "epoch": 0.16,
1140
+ "grad_norm": 5.3125,
1141
+ "learning_rate": 9.571997592112126e-05,
1142
+ "loss": 1.9509,
1143
+ "step": 23328
1144
+ },
1145
+ {
1146
+ "epoch": 0.16,
1147
+ "grad_norm": 1.140625,
1148
+ "learning_rate": 9.565553108331231e-05,
1149
+ "loss": 1.9423,
1150
+ "step": 23472
1151
+ },
1152
+ {
1153
+ "epoch": 0.16,
1154
+ "grad_norm": 1.1328125,
1155
+ "learning_rate": 9.559062671403109e-05,
1156
+ "loss": 1.9272,
1157
+ "step": 23616
1158
+ },
1159
+ {
1160
+ "epoch": 0.16,
1161
+ "grad_norm": 1.1796875,
1162
+ "learning_rate": 9.552526346655222e-05,
1163
+ "loss": 1.9438,
1164
+ "step": 23760
1165
+ },
1166
+ {
1167
+ "epoch": 0.16,
1168
+ "grad_norm": 1.6953125,
1169
+ "learning_rate": 9.545944199876904e-05,
1170
+ "loss": 1.9523,
1171
+ "step": 23904
1172
+ },
1173
+ {
1174
+ "epoch": 0.16,
1175
+ "grad_norm": 1.1015625,
1176
+ "learning_rate": 9.539316297318695e-05,
1177
+ "loss": 1.9306,
1178
+ "step": 24048
1179
+ },
1180
+ {
1181
+ "epoch": 0.16,
1182
+ "grad_norm": 1.203125,
1183
+ "learning_rate": 9.532642705691674e-05,
1184
+ "loss": 1.9391,
1185
+ "step": 24192
1186
+ },
1187
+ {
1188
+ "epoch": 0.17,
1189
+ "grad_norm": 1.09375,
1190
+ "learning_rate": 9.525923492166792e-05,
1191
+ "loss": 1.9723,
1192
+ "step": 24336
1193
+ },
1194
+ {
1195
+ "epoch": 0.17,
1196
+ "grad_norm": 1.09375,
1197
+ "learning_rate": 9.519158724374193e-05,
1198
+ "loss": 1.9303,
1199
+ "step": 24480
1200
+ },
1201
+ {
1202
+ "epoch": 0.17,
1203
+ "grad_norm": 1.125,
1204
+ "learning_rate": 9.51234847040253e-05,
1205
+ "loss": 1.9424,
1206
+ "step": 24624
1207
+ },
1208
+ {
1209
+ "epoch": 0.17,
1210
+ "grad_norm": 1.265625,
1211
+ "learning_rate": 9.505492798798286e-05,
1212
+ "loss": 1.9572,
1213
+ "step": 24768
1214
+ },
1215
+ {
1216
+ "epoch": 0.17,
1217
+ "grad_norm": 1.28125,
1218
+ "learning_rate": 9.49859177856508e-05,
1219
+ "loss": 1.9425,
1220
+ "step": 24912
1221
+ },
1222
+ {
1223
+ "epoch": 0.17,
1224
+ "grad_norm": 3.96875,
1225
+ "learning_rate": 9.491645479162971e-05,
1226
+ "loss": 1.9466,
1227
+ "step": 25056
1228
+ },
1229
+ {
1230
+ "epoch": 0.17,
1231
+ "grad_norm": 1.0703125,
1232
+ "learning_rate": 9.484653970507766e-05,
1233
+ "loss": 1.9457,
1234
+ "step": 25200
1235
+ },
1236
+ {
1237
+ "epoch": 0.17,
1238
+ "grad_norm": 1.2890625,
1239
+ "learning_rate": 9.477617322970309e-05,
1240
+ "loss": 1.9162,
1241
+ "step": 25344
1242
+ },
1243
+ {
1244
+ "epoch": 0.17,
1245
+ "grad_norm": 1.1796875,
1246
+ "learning_rate": 9.470535607375775e-05,
1247
+ "loss": 1.9428,
1248
+ "step": 25488
1249
+ },
1250
+ {
1251
+ "epoch": 0.17,
1252
+ "grad_norm": 1.0703125,
1253
+ "learning_rate": 9.463408895002958e-05,
1254
+ "loss": 1.9496,
1255
+ "step": 25632
1256
+ },
1257
+ {
1258
+ "epoch": 0.18,
1259
+ "grad_norm": 1.140625,
1260
+ "learning_rate": 9.456237257583555e-05,
1261
+ "loss": 1.9268,
1262
+ "step": 25776
1263
+ },
1264
+ {
1265
+ "epoch": 0.18,
1266
+ "grad_norm": 3.140625,
1267
+ "learning_rate": 9.449020767301435e-05,
1268
+ "loss": 1.934,
1269
+ "step": 25920
1270
+ },
1271
+ {
1272
+ "epoch": 0.18,
1273
+ "grad_norm": 1.2421875,
1274
+ "learning_rate": 9.441759496791929e-05,
1275
+ "loss": 1.919,
1276
+ "step": 26064
1277
+ },
1278
+ {
1279
+ "epoch": 0.18,
1280
+ "grad_norm": 1.1796875,
1281
+ "learning_rate": 9.434453519141085e-05,
1282
+ "loss": 1.9358,
1283
+ "step": 26208
1284
+ },
1285
+ {
1286
+ "epoch": 0.18,
1287
+ "grad_norm": 1.15625,
1288
+ "learning_rate": 9.427102907884933e-05,
1289
+ "loss": 1.9278,
1290
+ "step": 26352
1291
+ },
1292
+ {
1293
+ "epoch": 0.18,
1294
+ "grad_norm": 1.125,
1295
+ "learning_rate": 9.419707737008754e-05,
1296
+ "loss": 1.9417,
1297
+ "step": 26496
1298
+ },
1299
+ {
1300
+ "epoch": 0.18,
1301
+ "grad_norm": 1.796875,
1302
+ "learning_rate": 9.412268080946327e-05,
1303
+ "loss": 1.9371,
1304
+ "step": 26640
1305
+ },
1306
+ {
1307
+ "epoch": 0.18,
1308
+ "grad_norm": 1.15625,
1309
+ "learning_rate": 9.404784014579186e-05,
1310
+ "loss": 1.9495,
1311
+ "step": 26784
1312
+ },
1313
+ {
1314
+ "epoch": 0.18,
1315
+ "grad_norm": 1.8203125,
1316
+ "learning_rate": 9.397255613235862e-05,
1317
+ "loss": 1.9303,
1318
+ "step": 26928
1319
+ },
1320
+ {
1321
+ "epoch": 0.18,
1322
+ "grad_norm": 1.8828125,
1323
+ "learning_rate": 9.38968295269112e-05,
1324
+ "loss": 1.9501,
1325
+ "step": 27072
1326
+ },
1327
+ {
1328
+ "epoch": 0.19,
1329
+ "grad_norm": 1.171875,
1330
+ "learning_rate": 9.38206610916521e-05,
1331
+ "loss": 1.9409,
1332
+ "step": 27216
1333
+ },
1334
+ {
1335
+ "epoch": 0.19,
1336
+ "grad_norm": 1.2578125,
1337
+ "learning_rate": 9.374405159323087e-05,
1338
+ "loss": 1.9442,
1339
+ "step": 27360
1340
+ },
1341
+ {
1342
+ "epoch": 0.19,
1343
+ "grad_norm": 1.8203125,
1344
+ "learning_rate": 9.36670018027365e-05,
1345
+ "loss": 1.9354,
1346
+ "step": 27504
1347
+ },
1348
+ {
1349
+ "epoch": 0.19,
1350
+ "grad_norm": 1.046875,
1351
+ "learning_rate": 9.358951249568952e-05,
1352
+ "loss": 1.9349,
1353
+ "step": 27648
1354
+ },
1355
+ {
1356
+ "epoch": 0.19,
1357
+ "grad_norm": 1.0859375,
1358
+ "learning_rate": 9.351158445203434e-05,
1359
+ "loss": 1.9287,
1360
+ "step": 27792
1361
+ },
1362
+ {
1363
+ "epoch": 0.19,
1364
+ "grad_norm": 1.28125,
1365
+ "learning_rate": 9.343321845613132e-05,
1366
+ "loss": 1.9313,
1367
+ "step": 27936
1368
+ },
1369
+ {
1370
+ "epoch": 0.19,
1371
+ "grad_norm": 1.171875,
1372
+ "learning_rate": 9.335441529674888e-05,
1373
+ "loss": 1.9414,
1374
+ "step": 28080
1375
+ },
1376
+ {
1377
+ "epoch": 0.19,
1378
+ "grad_norm": 1.2734375,
1379
+ "learning_rate": 9.327517576705558e-05,
1380
+ "loss": 1.9269,
1381
+ "step": 28224
1382
+ },
1383
+ {
1384
+ "epoch": 0.19,
1385
+ "grad_norm": 1.1875,
1386
+ "learning_rate": 9.319550066461214e-05,
1387
+ "loss": 1.9202,
1388
+ "step": 28368
1389
+ },
1390
+ {
1391
+ "epoch": 0.19,
1392
+ "grad_norm": 1.109375,
1393
+ "learning_rate": 9.311539079136336e-05,
1394
+ "loss": 1.9235,
1395
+ "step": 28512
1396
+ },
1397
+ {
1398
+ "epoch": 0.19,
1399
+ "grad_norm": 1.09375,
1400
+ "learning_rate": 9.303484695363016e-05,
1401
+ "loss": 1.9439,
1402
+ "step": 28656
1403
+ },
1404
+ {
1405
+ "epoch": 0.2,
1406
+ "grad_norm": 1.171875,
1407
+ "learning_rate": 9.295386996210133e-05,
1408
+ "loss": 1.8996,
1409
+ "step": 28800
1410
+ },
1411
+ {
1412
+ "epoch": 0.2,
1413
+ "grad_norm": 1.203125,
1414
+ "learning_rate": 9.287246063182546e-05,
1415
+ "loss": 1.9243,
1416
+ "step": 28944
1417
+ },
1418
+ {
1419
+ "epoch": 0.2,
1420
+ "grad_norm": 1.3203125,
1421
+ "learning_rate": 9.279061978220272e-05,
1422
+ "loss": 1.9174,
1423
+ "step": 29088
1424
+ },
1425
+ {
1426
+ "epoch": 0.2,
1427
+ "grad_norm": 1.515625,
1428
+ "learning_rate": 9.270834823697658e-05,
1429
+ "loss": 1.9144,
1430
+ "step": 29232
1431
+ },
1432
+ {
1433
+ "epoch": 0.2,
1434
+ "grad_norm": 1.1171875,
1435
+ "learning_rate": 9.262564682422554e-05,
1436
+ "loss": 1.9212,
1437
+ "step": 29376
1438
+ },
1439
+ {
1440
+ "epoch": 0.2,
1441
+ "grad_norm": 3.78125,
1442
+ "learning_rate": 9.254251637635485e-05,
1443
+ "loss": 1.9362,
1444
+ "step": 29520
1445
+ },
1446
+ {
1447
+ "epoch": 0.2,
1448
+ "grad_norm": 1.1328125,
1449
+ "learning_rate": 9.245895773008799e-05,
1450
+ "loss": 1.9275,
1451
+ "step": 29664
1452
+ },
1453
+ {
1454
+ "epoch": 0.2,
1455
+ "grad_norm": 1.1640625,
1456
+ "learning_rate": 9.237497172645843e-05,
1457
+ "loss": 1.9486,
1458
+ "step": 29808
1459
+ },
1460
+ {
1461
+ "epoch": 0.2,
1462
+ "grad_norm": 1.0625,
1463
+ "learning_rate": 9.229055921080101e-05,
1464
+ "loss": 1.9109,
1465
+ "step": 29952
1466
+ },
1467
+ {
1468
+ "epoch": 0.2,
1469
+ "grad_norm": 1.6328125,
1470
+ "learning_rate": 9.220572103274352e-05,
1471
+ "loss": 1.9268,
1472
+ "step": 30096
1473
+ },
1474
+ {
1475
+ "epoch": 0.21,
1476
+ "grad_norm": 1.0859375,
1477
+ "learning_rate": 9.212045804619809e-05,
1478
+ "loss": 1.9211,
1479
+ "step": 30240
1480
+ },
1481
+ {
1482
+ "epoch": 0.21,
1483
+ "grad_norm": 1.234375,
1484
+ "learning_rate": 9.203477110935267e-05,
1485
+ "loss": 1.9151,
1486
+ "step": 30384
1487
+ },
1488
+ {
1489
+ "epoch": 0.21,
1490
+ "grad_norm": 1.65625,
1491
+ "learning_rate": 9.194866108466233e-05,
1492
+ "loss": 1.928,
1493
+ "step": 30528
1494
+ },
1495
+ {
1496
+ "epoch": 0.21,
1497
+ "grad_norm": 1.09375,
1498
+ "learning_rate": 9.18621288388406e-05,
1499
+ "loss": 1.9282,
1500
+ "step": 30672
1501
+ },
1502
+ {
1503
+ "epoch": 0.21,
1504
+ "grad_norm": 1.1484375,
1505
+ "learning_rate": 9.177517524285074e-05,
1506
+ "loss": 1.9251,
1507
+ "step": 30816
1508
+ },
1509
+ {
1510
+ "epoch": 0.21,
1511
+ "grad_norm": 1.0625,
1512
+ "learning_rate": 9.168780117189696e-05,
1513
+ "loss": 1.9425,
1514
+ "step": 30960
1515
+ },
1516
+ {
1517
+ "epoch": 0.21,
1518
+ "grad_norm": 1.28125,
1519
+ "learning_rate": 9.160000750541569e-05,
1520
+ "loss": 1.9576,
1521
+ "step": 31104
1522
+ },
1523
+ {
1524
+ "epoch": 0.21,
1525
+ "grad_norm": 1.125,
1526
+ "learning_rate": 9.15117951270666e-05,
1527
+ "loss": 1.9337,
1528
+ "step": 31248
1529
+ },
1530
+ {
1531
+ "epoch": 0.21,
1532
+ "grad_norm": 1.21875,
1533
+ "learning_rate": 9.14231649247238e-05,
1534
+ "loss": 1.9047,
1535
+ "step": 31392
1536
+ },
1537
+ {
1538
+ "epoch": 0.21,
1539
+ "grad_norm": 1.140625,
1540
+ "learning_rate": 9.13341177904669e-05,
1541
+ "loss": 1.9391,
1542
+ "step": 31536
1543
+ },
1544
+ {
1545
+ "epoch": 0.22,
1546
+ "grad_norm": 1.3671875,
1547
+ "learning_rate": 9.124465462057196e-05,
1548
+ "loss": 1.9066,
1549
+ "step": 31680
1550
+ },
1551
+ {
1552
+ "epoch": 0.22,
1553
+ "grad_norm": 1.1328125,
1554
+ "learning_rate": 9.115477631550258e-05,
1555
+ "loss": 1.9169,
1556
+ "step": 31824
1557
+ },
1558
+ {
1559
+ "epoch": 0.22,
1560
+ "grad_norm": 1.1328125,
1561
+ "learning_rate": 9.106448377990068e-05,
1562
+ "loss": 1.9238,
1563
+ "step": 31968
1564
+ },
1565
+ {
1566
+ "epoch": 0.22,
1567
+ "grad_norm": 1.4453125,
1568
+ "learning_rate": 9.097377792257759e-05,
1569
+ "loss": 1.9373,
1570
+ "step": 32112
1571
+ },
1572
+ {
1573
+ "epoch": 0.22,
1574
+ "grad_norm": 1.1015625,
1575
+ "learning_rate": 9.088265965650476e-05,
1576
+ "loss": 1.9265,
1577
+ "step": 32256
1578
+ },
1579
+ {
1580
+ "epoch": 0.22,
1581
+ "grad_norm": 1.1484375,
1582
+ "learning_rate": 9.079112989880455e-05,
1583
+ "loss": 1.9111,
1584
+ "step": 32400
1585
+ },
1586
+ {
1587
+ "epoch": 0.22,
1588
+ "grad_norm": 1.1015625,
1589
+ "learning_rate": 9.069918957074118e-05,
1590
+ "loss": 1.9345,
1591
+ "step": 32544
1592
+ },
1593
+ {
1594
+ "epoch": 0.22,
1595
+ "grad_norm": 1.1953125,
1596
+ "learning_rate": 9.060683959771124e-05,
1597
+ "loss": 1.9157,
1598
+ "step": 32688
1599
+ },
1600
+ {
1601
+ "epoch": 0.22,
1602
+ "grad_norm": 1.09375,
1603
+ "learning_rate": 9.051408090923449e-05,
1604
+ "loss": 1.9033,
1605
+ "step": 32832
1606
+ },
1607
+ {
1608
+ "epoch": 0.22,
1609
+ "grad_norm": 3.515625,
1610
+ "learning_rate": 9.042091443894456e-05,
1611
+ "loss": 1.9159,
1612
+ "step": 32976
1613
+ },
1614
+ {
1615
+ "epoch": 0.23,
1616
+ "grad_norm": 1.3046875,
1617
+ "learning_rate": 9.032734112457937e-05,
1618
+ "loss": 1.9182,
1619
+ "step": 33120
1620
+ },
1621
+ {
1622
+ "epoch": 0.23,
1623
+ "grad_norm": 1.203125,
1624
+ "learning_rate": 9.02333619079719e-05,
1625
+ "loss": 1.9231,
1626
+ "step": 33264
1627
+ },
1628
+ {
1629
+ "epoch": 0.23,
1630
+ "grad_norm": 2.90625,
1631
+ "learning_rate": 9.013897773504052e-05,
1632
+ "loss": 1.9234,
1633
+ "step": 33408
1634
+ },
1635
+ {
1636
+ "epoch": 0.23,
1637
+ "grad_norm": 1.1875,
1638
+ "learning_rate": 9.004418955577965e-05,
1639
+ "loss": 1.9147,
1640
+ "step": 33552
1641
+ },
1642
+ {
1643
+ "epoch": 0.23,
1644
+ "grad_norm": 1.1640625,
1645
+ "learning_rate": 8.994899832425005e-05,
1646
+ "loss": 1.924,
1647
+ "step": 33696
1648
+ },
1649
+ {
1650
+ "epoch": 0.23,
1651
+ "grad_norm": 1.0859375,
1652
+ "learning_rate": 8.985340499856933e-05,
1653
+ "loss": 1.9249,
1654
+ "step": 33840
1655
+ },
1656
+ {
1657
+ "epoch": 0.23,
1658
+ "grad_norm": 1.078125,
1659
+ "learning_rate": 8.975741054090216e-05,
1660
+ "loss": 1.9018,
1661
+ "step": 33984
1662
+ },
1663
+ {
1664
+ "epoch": 0.23,
1665
+ "grad_norm": 1.28125,
1666
+ "learning_rate": 8.96610159174508e-05,
1667
+ "loss": 1.9359,
1668
+ "step": 34128
1669
+ },
1670
+ {
1671
+ "epoch": 0.23,
1672
+ "grad_norm": 1.1640625,
1673
+ "learning_rate": 8.956422209844514e-05,
1674
+ "loss": 1.9366,
1675
+ "step": 34272
1676
+ },
1677
+ {
1678
+ "epoch": 0.23,
1679
+ "grad_norm": 1.2265625,
1680
+ "learning_rate": 8.946703005813312e-05,
1681
+ "loss": 1.921,
1682
+ "step": 34416
1683
+ },
1684
+ {
1685
+ "epoch": 0.24,
1686
+ "grad_norm": 1.1640625,
1687
+ "learning_rate": 8.936944077477084e-05,
1688
+ "loss": 1.916,
1689
+ "step": 34560
1690
+ },
1691
+ {
1692
+ "epoch": 0.24,
1693
+ "grad_norm": 2.484375,
1694
+ "learning_rate": 8.92714552306127e-05,
1695
+ "loss": 1.9121,
1696
+ "step": 34704
1697
+ },
1698
+ {
1699
+ "epoch": 0.24,
1700
+ "grad_norm": 1.15625,
1701
+ "learning_rate": 8.917307441190158e-05,
1702
+ "loss": 1.907,
1703
+ "step": 34848
1704
+ },
1705
+ {
1706
+ "epoch": 0.24,
1707
+ "grad_norm": 1.203125,
1708
+ "learning_rate": 8.907429930885882e-05,
1709
+ "loss": 1.9058,
1710
+ "step": 34992
1711
+ },
1712
+ {
1713
+ "epoch": 0.24,
1714
+ "grad_norm": 1.1796875,
1715
+ "learning_rate": 8.897513091567435e-05,
1716
+ "loss": 1.924,
1717
+ "step": 35136
1718
+ },
1719
+ {
1720
+ "epoch": 0.24,
1721
+ "grad_norm": 1.515625,
1722
+ "learning_rate": 8.88755702304966e-05,
1723
+ "loss": 1.9397,
1724
+ "step": 35280
1725
+ },
1726
+ {
1727
+ "epoch": 0.24,
1728
+ "grad_norm": 1.1171875,
1729
+ "learning_rate": 8.877561825542256e-05,
1730
+ "loss": 1.9426,
1731
+ "step": 35424
1732
+ },
1733
+ {
1734
+ "epoch": 0.24,
1735
+ "grad_norm": 1.21875,
1736
+ "learning_rate": 8.867527599648755e-05,
1737
+ "loss": 1.9151,
1738
+ "step": 35568
1739
+ },
1740
+ {
1741
+ "epoch": 0.24,
1742
+ "grad_norm": 1.1015625,
1743
+ "learning_rate": 8.85745444636552e-05,
1744
+ "loss": 1.9062,
1745
+ "step": 35712
1746
+ },
1747
+ {
1748
+ "epoch": 0.24,
1749
+ "grad_norm": 1.1484375,
1750
+ "learning_rate": 8.847342467080729e-05,
1751
+ "loss": 1.9036,
1752
+ "step": 35856
1753
+ },
1754
+ {
1755
+ "epoch": 0.24,
1756
+ "grad_norm": 1.109375,
1757
+ "learning_rate": 8.837191763573343e-05,
1758
+ "loss": 1.9183,
1759
+ "step": 36000
1760
+ },
1761
+ {
1762
+ "epoch": 0.25,
1763
+ "grad_norm": 1.078125,
1764
+ "learning_rate": 8.827002438012095e-05,
1765
+ "loss": 1.9288,
1766
+ "step": 36144
1767
+ },
1768
+ {
1769
+ "epoch": 0.25,
1770
+ "grad_norm": 1.109375,
1771
+ "learning_rate": 8.816774592954458e-05,
1772
+ "loss": 1.9296,
1773
+ "step": 36288
1774
+ },
1775
+ {
1776
+ "epoch": 0.25,
1777
+ "grad_norm": 1.3359375,
1778
+ "learning_rate": 8.806508331345609e-05,
1779
+ "loss": 1.9343,
1780
+ "step": 36432
1781
+ },
1782
+ {
1783
+ "epoch": 0.25,
1784
+ "grad_norm": 1.171875,
1785
+ "learning_rate": 8.79620375651739e-05,
1786
+ "loss": 1.9268,
1787
+ "step": 36576
1788
+ },
1789
+ {
1790
+ "epoch": 0.25,
1791
+ "grad_norm": 1.078125,
1792
+ "learning_rate": 8.785860972187279e-05,
1793
+ "loss": 1.9331,
1794
+ "step": 36720
1795
+ },
1796
+ {
1797
+ "epoch": 0.25,
1798
+ "grad_norm": 1.140625,
1799
+ "learning_rate": 8.775480082457336e-05,
1800
+ "loss": 1.9028,
1801
+ "step": 36864
1802
+ },
1803
+ {
1804
+ "epoch": 0.25,
1805
+ "grad_norm": 1.1484375,
1806
+ "learning_rate": 8.765061191813165e-05,
1807
+ "loss": 1.9192,
1808
+ "step": 37008
1809
+ },
1810
+ {
1811
+ "epoch": 0.25,
1812
+ "grad_norm": 1.375,
1813
+ "learning_rate": 8.754604405122848e-05,
1814
+ "loss": 1.9208,
1815
+ "step": 37152
1816
+ },
1817
+ {
1818
+ "epoch": 0.25,
1819
+ "grad_norm": 1.1640625,
1820
+ "learning_rate": 8.744109827635902e-05,
1821
+ "loss": 1.9039,
1822
+ "step": 37296
1823
+ },
1824
+ {
1825
+ "epoch": 0.25,
1826
+ "grad_norm": 1.0859375,
1827
+ "learning_rate": 8.733577564982218e-05,
1828
+ "loss": 1.9271,
1829
+ "step": 37440
1830
+ },
1831
+ {
1832
+ "epoch": 0.26,
1833
+ "grad_norm": 1.21875,
1834
+ "learning_rate": 8.723007723170988e-05,
1835
+ "loss": 1.9236,
1836
+ "step": 37584
1837
+ },
1838
+ {
1839
+ "epoch": 0.26,
1840
+ "grad_norm": 1.1796875,
1841
+ "learning_rate": 8.712400408589654e-05,
1842
+ "loss": 1.9426,
1843
+ "step": 37728
1844
+ },
1845
+ {
1846
+ "epoch": 0.26,
1847
+ "grad_norm": 1.15625,
1848
+ "learning_rate": 8.701755728002823e-05,
1849
+ "loss": 1.9107,
1850
+ "step": 37872
1851
+ },
1852
+ {
1853
+ "epoch": 0.26,
1854
+ "grad_norm": 1.125,
1855
+ "learning_rate": 8.691073788551196e-05,
1856
+ "loss": 1.9313,
1857
+ "step": 38016
1858
+ },
1859
+ {
1860
+ "epoch": 0.26,
1861
+ "grad_norm": 1.3671875,
1862
+ "learning_rate": 8.680354697750499e-05,
1863
+ "loss": 1.9089,
1864
+ "step": 38160
1865
+ },
1866
+ {
1867
+ "epoch": 0.26,
1868
+ "grad_norm": 1.1328125,
1869
+ "learning_rate": 8.669598563490385e-05,
1870
+ "loss": 1.9294,
1871
+ "step": 38304
1872
+ },
1873
+ {
1874
+ "epoch": 0.26,
1875
+ "grad_norm": 1.0859375,
1876
+ "learning_rate": 8.658805494033365e-05,
1877
+ "loss": 1.9045,
1878
+ "step": 38448
1879
+ },
1880
+ {
1881
+ "epoch": 0.26,
1882
+ "grad_norm": 1.421875,
1883
+ "learning_rate": 8.647975598013698e-05,
1884
+ "loss": 1.9113,
1885
+ "step": 38592
1886
+ },
1887
+ {
1888
+ "epoch": 0.26,
1889
+ "grad_norm": 2.09375,
1890
+ "learning_rate": 8.63710898443632e-05,
1891
+ "loss": 1.9295,
1892
+ "step": 38736
1893
+ },
1894
+ {
1895
+ "epoch": 0.26,
1896
+ "grad_norm": 1.1484375,
1897
+ "learning_rate": 8.626205762675732e-05,
1898
+ "loss": 1.9194,
1899
+ "step": 38880
1900
+ },
1901
+ {
1902
+ "epoch": 0.27,
1903
+ "grad_norm": 1.1640625,
1904
+ "learning_rate": 8.615266042474904e-05,
1905
+ "loss": 1.9299,
1906
+ "step": 39024
1907
+ },
1908
+ {
1909
+ "epoch": 0.27,
1910
+ "grad_norm": 1.15625,
1911
+ "learning_rate": 8.604289933944165e-05,
1912
+ "loss": 1.9278,
1913
+ "step": 39168
1914
+ },
1915
+ {
1916
+ "epoch": 0.27,
1917
+ "grad_norm": 2.015625,
1918
+ "learning_rate": 8.593277547560108e-05,
1919
+ "loss": 1.9234,
1920
+ "step": 39312
1921
+ },
1922
+ {
1923
+ "epoch": 0.27,
1924
+ "grad_norm": 1.09375,
1925
+ "learning_rate": 8.582228994164463e-05,
1926
+ "loss": 1.9134,
1927
+ "step": 39456
1928
+ },
1929
+ {
1930
+ "epoch": 0.27,
1931
+ "grad_norm": 1.3515625,
1932
+ "learning_rate": 8.571144384962991e-05,
1933
+ "loss": 1.9048,
1934
+ "step": 39600
1935
+ },
1936
+ {
1937
+ "epoch": 0.27,
1938
+ "grad_norm": 1.4375,
1939
+ "learning_rate": 8.560023831524358e-05,
1940
+ "loss": 1.9368,
1941
+ "step": 39744
1942
+ },
1943
+ {
1944
+ "epoch": 0.27,
1945
+ "grad_norm": 1.203125,
1946
+ "learning_rate": 8.548867445779015e-05,
1947
+ "loss": 1.9274,
1948
+ "step": 39888
1949
+ },
1950
+ {
1951
+ "epoch": 0.27,
1952
+ "grad_norm": 1.203125,
1953
+ "learning_rate": 8.53767534001808e-05,
1954
+ "loss": 1.9237,
1955
+ "step": 40032
1956
+ },
1957
+ {
1958
+ "epoch": 0.27,
1959
+ "grad_norm": 1.2109375,
1960
+ "learning_rate": 8.52644762689219e-05,
1961
+ "loss": 1.9127,
1962
+ "step": 40176
1963
+ },
1964
+ {
1965
+ "epoch": 0.27,
1966
+ "grad_norm": 1.1796875,
1967
+ "learning_rate": 8.515184419410382e-05,
1968
+ "loss": 1.9343,
1969
+ "step": 40320
1970
+ },
1971
+ {
1972
+ "epoch": 0.28,
1973
+ "grad_norm": 1.21875,
1974
+ "learning_rate": 8.503885830938949e-05,
1975
+ "loss": 1.9175,
1976
+ "step": 40464
1977
+ },
1978
+ {
1979
+ "epoch": 0.28,
1980
+ "grad_norm": 1.4375,
1981
+ "learning_rate": 8.492551975200298e-05,
1982
+ "loss": 1.8913,
1983
+ "step": 40608
1984
+ },
1985
+ {
1986
+ "epoch": 0.28,
1987
+ "grad_norm": 1.328125,
1988
+ "learning_rate": 8.481182966271815e-05,
1989
+ "loss": 1.9123,
1990
+ "step": 40752
1991
+ },
1992
+ {
1993
+ "epoch": 0.28,
1994
+ "grad_norm": 1.125,
1995
+ "learning_rate": 8.469778918584701e-05,
1996
+ "loss": 1.8848,
1997
+ "step": 40896
1998
+ },
1999
+ {
2000
+ "epoch": 0.28,
2001
+ "grad_norm": 1.1171875,
2002
+ "learning_rate": 8.458339946922833e-05,
2003
+ "loss": 1.9092,
2004
+ "step": 41040
2005
+ },
2006
+ {
2007
+ "epoch": 0.28,
2008
+ "grad_norm": 1.4609375,
2009
+ "learning_rate": 8.4468661664216e-05,
2010
+ "loss": 1.9293,
2011
+ "step": 41184
2012
+ },
2013
+ {
2014
+ "epoch": 0.28,
2015
+ "grad_norm": 1.15625,
2016
+ "learning_rate": 8.435357692566758e-05,
2017
+ "loss": 1.8904,
2018
+ "step": 41328
2019
+ },
2020
+ {
2021
+ "epoch": 0.28,
2022
+ "grad_norm": 1.875,
2023
+ "learning_rate": 8.42381464119325e-05,
2024
+ "loss": 1.9033,
2025
+ "step": 41472
2026
+ },
2027
+ {
2028
+ "epoch": 0.28,
2029
+ "grad_norm": 1.296875,
2030
+ "learning_rate": 8.412237128484047e-05,
2031
+ "loss": 1.8981,
2032
+ "step": 41616
2033
+ },
2034
+ {
2035
+ "epoch": 0.28,
2036
+ "grad_norm": 1.125,
2037
+ "learning_rate": 8.400625270968987e-05,
2038
+ "loss": 1.9204,
2039
+ "step": 41760
2040
+ },
2041
+ {
2042
+ "epoch": 0.29,
2043
+ "grad_norm": 4.78125,
2044
+ "learning_rate": 8.388979185523589e-05,
2045
+ "loss": 1.918,
2046
+ "step": 41904
2047
+ },
2048
+ {
2049
+ "epoch": 0.29,
2050
+ "grad_norm": 1.109375,
2051
+ "learning_rate": 8.377298989367884e-05,
2052
+ "loss": 1.8975,
2053
+ "step": 42048
2054
+ },
2055
+ {
2056
+ "epoch": 0.29,
2057
+ "grad_norm": 1.2578125,
2058
+ "learning_rate": 8.365584800065236e-05,
2059
+ "loss": 1.9176,
2060
+ "step": 42192
2061
+ },
2062
+ {
2063
+ "epoch": 0.29,
2064
+ "grad_norm": 1.1328125,
2065
+ "learning_rate": 8.353836735521154e-05,
2066
+ "loss": 1.9063,
2067
+ "step": 42336
2068
+ },
2069
+ {
2070
+ "epoch": 0.29,
2071
+ "grad_norm": 1.1328125,
2072
+ "learning_rate": 8.342054913982107e-05,
2073
+ "loss": 1.9107,
2074
+ "step": 42480
2075
+ },
2076
+ {
2077
+ "epoch": 0.29,
2078
+ "grad_norm": 1.1484375,
2079
+ "learning_rate": 8.330239454034337e-05,
2080
+ "loss": 1.9103,
2081
+ "step": 42624
2082
+ },
2083
+ {
2084
+ "epoch": 0.29,
2085
+ "grad_norm": 1.109375,
2086
+ "learning_rate": 8.318390474602663e-05,
2087
+ "loss": 1.9036,
2088
+ "step": 42768
2089
+ },
2090
+ {
2091
+ "epoch": 0.29,
2092
+ "grad_norm": 1.71875,
2093
+ "learning_rate": 8.306508094949277e-05,
2094
+ "loss": 1.9126,
2095
+ "step": 42912
2096
+ },
2097
+ {
2098
+ "epoch": 0.29,
2099
+ "grad_norm": 1.171875,
2100
+ "learning_rate": 8.294592434672561e-05,
2101
+ "loss": 1.904,
2102
+ "step": 43056
2103
+ },
2104
+ {
2105
+ "epoch": 0.29,
2106
+ "grad_norm": 1.171875,
2107
+ "learning_rate": 8.282643613705867e-05,
2108
+ "loss": 1.9269,
2109
+ "step": 43200
2110
+ },
2111
+ {
2112
+ "epoch": 0.29,
2113
+ "grad_norm": 1.1484375,
2114
+ "learning_rate": 8.270661752316316e-05,
2115
+ "loss": 1.923,
2116
+ "step": 43344
2117
+ },
2118
+ {
2119
+ "epoch": 0.3,
2120
+ "grad_norm": 1.046875,
2121
+ "learning_rate": 8.258646971103584e-05,
2122
+ "loss": 1.9142,
2123
+ "step": 43488
2124
+ },
2125
+ {
2126
+ "epoch": 0.3,
2127
+ "grad_norm": 1.3984375,
2128
+ "learning_rate": 8.246599390998699e-05,
2129
+ "loss": 1.902,
2130
+ "step": 43632
2131
+ },
2132
+ {
2133
+ "epoch": 0.3,
2134
+ "grad_norm": 1.125,
2135
+ "learning_rate": 8.234519133262808e-05,
2136
+ "loss": 1.9054,
2137
+ "step": 43776
2138
+ },
2139
+ {
2140
+ "epoch": 0.3,
2141
+ "grad_norm": 1.7265625,
2142
+ "learning_rate": 8.222406319485971e-05,
2143
+ "loss": 1.934,
2144
+ "step": 43920
2145
+ },
2146
+ {
2147
+ "epoch": 0.3,
2148
+ "grad_norm": 1.234375,
2149
+ "learning_rate": 8.210261071585927e-05,
2150
+ "loss": 1.9144,
2151
+ "step": 44064
2152
+ },
2153
+ {
2154
+ "epoch": 0.3,
2155
+ "grad_norm": 13.6875,
2156
+ "learning_rate": 8.198083511806868e-05,
2157
+ "loss": 1.9006,
2158
+ "step": 44208
2159
+ },
2160
+ {
2161
+ "epoch": 0.3,
2162
+ "grad_norm": 1.0625,
2163
+ "learning_rate": 8.185873762718217e-05,
2164
+ "loss": 1.914,
2165
+ "step": 44352
2166
+ },
2167
+ {
2168
+ "epoch": 0.3,
2169
+ "grad_norm": 1.1171875,
2170
+ "learning_rate": 8.173631947213385e-05,
2171
+ "loss": 1.9198,
2172
+ "step": 44496
2173
+ },
2174
+ {
2175
+ "epoch": 0.3,
2176
+ "grad_norm": 1.125,
2177
+ "learning_rate": 8.161358188508538e-05,
2178
+ "loss": 1.928,
2179
+ "step": 44640
2180
+ },
2181
+ {
2182
+ "epoch": 0.3,
2183
+ "grad_norm": 1.0859375,
2184
+ "learning_rate": 8.149052610141357e-05,
2185
+ "loss": 1.9129,
2186
+ "step": 44784
2187
+ },
2188
+ {
2189
+ "epoch": 0.31,
2190
+ "grad_norm": 1.09375,
2191
+ "learning_rate": 8.136715335969792e-05,
2192
+ "loss": 1.9024,
2193
+ "step": 44928
2194
+ },
2195
+ {
2196
+ "epoch": 0.31,
2197
+ "grad_norm": 1.15625,
2198
+ "learning_rate": 8.12434649017082e-05,
2199
+ "loss": 1.9092,
2200
+ "step": 45072
2201
+ },
2202
+ {
2203
+ "epoch": 0.31,
2204
+ "grad_norm": 1.0703125,
2205
+ "learning_rate": 8.111946197239188e-05,
2206
+ "loss": 1.9193,
2207
+ "step": 45216
2208
+ },
2209
+ {
2210
+ "epoch": 0.31,
2211
+ "grad_norm": 1.3515625,
2212
+ "learning_rate": 8.099514581986171e-05,
2213
+ "loss": 1.9084,
2214
+ "step": 45360
2215
+ },
2216
+ {
2217
+ "epoch": 0.31,
2218
+ "grad_norm": 1.015625,
2219
+ "learning_rate": 8.087051769538304e-05,
2220
+ "loss": 1.9034,
2221
+ "step": 45504
2222
+ },
2223
+ {
2224
+ "epoch": 0.31,
2225
+ "grad_norm": 1.203125,
2226
+ "learning_rate": 8.074557885336127e-05,
2227
+ "loss": 1.9067,
2228
+ "step": 45648
2229
+ },
2230
+ {
2231
+ "epoch": 0.31,
2232
+ "grad_norm": 1.046875,
2233
+ "learning_rate": 8.062033055132927e-05,
2234
+ "loss": 1.9127,
2235
+ "step": 45792
2236
+ },
2237
+ {
2238
+ "epoch": 0.31,
2239
+ "grad_norm": 1.390625,
2240
+ "learning_rate": 8.049477404993464e-05,
2241
+ "loss": 1.9125,
2242
+ "step": 45936
2243
+ },
2244
+ {
2245
+ "epoch": 0.31,
2246
+ "grad_norm": 1.140625,
2247
+ "learning_rate": 8.036891061292709e-05,
2248
+ "loss": 1.9074,
2249
+ "step": 46080
2250
+ },
2251
+ {
2252
+ "epoch": 0.31,
2253
+ "grad_norm": 1.09375,
2254
+ "learning_rate": 8.02427415071457e-05,
2255
+ "loss": 1.9195,
2256
+ "step": 46224
2257
+ },
2258
+ {
2259
+ "epoch": 0.32,
2260
+ "grad_norm": 1.2265625,
2261
+ "learning_rate": 8.011626800250615e-05,
2262
+ "loss": 1.915,
2263
+ "step": 46368
2264
+ },
2265
+ {
2266
+ "epoch": 0.32,
2267
+ "grad_norm": 1.0390625,
2268
+ "learning_rate": 7.998949137198795e-05,
2269
+ "loss": 1.9123,
2270
+ "step": 46512
2271
+ },
2272
+ {
2273
+ "epoch": 0.32,
2274
+ "grad_norm": 1.0859375,
2275
+ "learning_rate": 7.986241289162162e-05,
2276
+ "loss": 1.9281,
2277
+ "step": 46656
2278
+ },
2279
+ {
2280
+ "epoch": 0.32,
2281
+ "grad_norm": 1.109375,
2282
+ "learning_rate": 7.97350338404759e-05,
2283
+ "loss": 1.8771,
2284
+ "step": 46800
2285
+ },
2286
+ {
2287
+ "epoch": 0.32,
2288
+ "grad_norm": 1.1015625,
2289
+ "learning_rate": 7.960735550064478e-05,
2290
+ "loss": 1.9318,
2291
+ "step": 46944
2292
+ },
2293
+ {
2294
+ "epoch": 0.32,
2295
+ "grad_norm": 1.0703125,
2296
+ "learning_rate": 7.947937915723466e-05,
2297
+ "loss": 1.9139,
2298
+ "step": 47088
2299
+ },
2300
+ {
2301
+ "epoch": 0.32,
2302
+ "grad_norm": 15.125,
2303
+ "learning_rate": 7.935110609835142e-05,
2304
+ "loss": 1.9126,
2305
+ "step": 47232
2306
+ },
2307
+ {
2308
+ "epoch": 0.32,
2309
+ "grad_norm": 1.125,
2310
+ "learning_rate": 7.922253761508743e-05,
2311
+ "loss": 1.9131,
2312
+ "step": 47376
2313
+ },
2314
+ {
2315
+ "epoch": 0.32,
2316
+ "grad_norm": 1.125,
2317
+ "learning_rate": 7.909367500150856e-05,
2318
+ "loss": 1.9272,
2319
+ "step": 47520
2320
+ },
2321
+ {
2322
+ "epoch": 0.32,
2323
+ "grad_norm": 1.09375,
2324
+ "learning_rate": 7.89645195546411e-05,
2325
+ "loss": 1.9072,
2326
+ "step": 47664
2327
+ },
2328
+ {
2329
+ "epoch": 0.33,
2330
+ "grad_norm": 1.1953125,
2331
+ "learning_rate": 7.88350725744589e-05,
2332
+ "loss": 1.8913,
2333
+ "step": 47808
2334
+ },
2335
+ {
2336
+ "epoch": 0.33,
2337
+ "grad_norm": 1.078125,
2338
+ "learning_rate": 7.870533536386999e-05,
2339
+ "loss": 1.9215,
2340
+ "step": 47952
2341
+ },
2342
+ {
2343
+ "epoch": 0.33,
2344
+ "grad_norm": 1.1796875,
2345
+ "learning_rate": 7.857530922870376e-05,
2346
+ "loss": 1.9269,
2347
+ "step": 48096
2348
+ },
2349
+ {
2350
+ "epoch": 0.33,
2351
+ "grad_norm": 1.2578125,
2352
+ "learning_rate": 7.844499547769754e-05,
2353
+ "loss": 1.9072,
2354
+ "step": 48240
2355
+ },
2356
+ {
2357
+ "epoch": 0.33,
2358
+ "grad_norm": 1.15625,
2359
+ "learning_rate": 7.831439542248372e-05,
2360
+ "loss": 1.9031,
2361
+ "step": 48384
2362
+ },
2363
+ {
2364
+ "epoch": 0.33,
2365
+ "grad_norm": 2.796875,
2366
+ "learning_rate": 7.818351037757627e-05,
2367
+ "loss": 1.9148,
2368
+ "step": 48528
2369
+ },
2370
+ {
2371
+ "epoch": 0.33,
2372
+ "grad_norm": 2.546875,
2373
+ "learning_rate": 7.805234166035771e-05,
2374
+ "loss": 1.9211,
2375
+ "step": 48672
2376
+ },
2377
+ {
2378
+ "epoch": 0.33,
2379
+ "grad_norm": 1.140625,
2380
+ "learning_rate": 7.792089059106574e-05,
2381
+ "loss": 1.9307,
2382
+ "step": 48816
2383
+ },
2384
+ {
2385
+ "epoch": 0.33,
2386
+ "grad_norm": 1.2109375,
2387
+ "learning_rate": 7.778915849277997e-05,
2388
+ "loss": 1.9168,
2389
+ "step": 48960
2390
+ },
2391
+ {
2392
+ "epoch": 0.33,
2393
+ "grad_norm": 1.2109375,
2394
+ "learning_rate": 7.765714669140869e-05,
2395
+ "loss": 1.9243,
2396
+ "step": 49104
2397
+ },
2398
+ {
2399
+ "epoch": 0.34,
2400
+ "grad_norm": 1.65625,
2401
+ "learning_rate": 7.752485651567539e-05,
2402
+ "loss": 1.9008,
2403
+ "step": 49248
2404
+ },
2405
+ {
2406
+ "epoch": 0.34,
2407
+ "grad_norm": 1.109375,
2408
+ "learning_rate": 7.739228929710544e-05,
2409
+ "loss": 1.9126,
2410
+ "step": 49392
2411
+ },
2412
+ {
2413
+ "epoch": 0.34,
2414
+ "grad_norm": 1.1171875,
2415
+ "learning_rate": 7.725944637001277e-05,
2416
+ "loss": 1.9219,
2417
+ "step": 49536
2418
+ },
2419
+ {
2420
+ "epoch": 0.34,
2421
+ "grad_norm": 1.140625,
2422
+ "learning_rate": 7.71263290714863e-05,
2423
+ "loss": 1.9154,
2424
+ "step": 49680
2425
+ },
2426
+ {
2427
+ "epoch": 0.34,
2428
+ "grad_norm": 1.453125,
2429
+ "learning_rate": 7.699293874137657e-05,
2430
+ "loss": 1.9111,
2431
+ "step": 49824
2432
+ },
2433
+ {
2434
+ "epoch": 0.34,
2435
+ "grad_norm": 1.109375,
2436
+ "learning_rate": 7.685927672228226e-05,
2437
+ "loss": 1.9028,
2438
+ "step": 49968
2439
+ },
2440
+ {
2441
+ "epoch": 0.34,
2442
+ "grad_norm": 1.5703125,
2443
+ "learning_rate": 7.672534435953662e-05,
2444
+ "loss": 1.9026,
2445
+ "step": 50112
2446
+ },
2447
+ {
2448
+ "epoch": 0.34,
2449
+ "grad_norm": 1.234375,
2450
+ "learning_rate": 7.659114300119395e-05,
2451
+ "loss": 1.9071,
2452
+ "step": 50256
2453
+ },
2454
+ {
2455
+ "epoch": 0.34,
2456
+ "grad_norm": 1.296875,
2457
+ "learning_rate": 7.645667399801609e-05,
2458
+ "loss": 1.921,
2459
+ "step": 50400
2460
+ },
2461
+ {
2462
+ "epoch": 0.34,
2463
+ "grad_norm": 1.046875,
2464
+ "learning_rate": 7.632193870345872e-05,
2465
+ "loss": 1.8933,
2466
+ "step": 50544
2467
+ },
2468
+ {
2469
+ "epoch": 0.34,
2470
+ "grad_norm": 2.796875,
2471
+ "learning_rate": 7.618693847365784e-05,
2472
+ "loss": 1.9139,
2473
+ "step": 50688
2474
+ },
2475
+ {
2476
+ "epoch": 0.35,
2477
+ "grad_norm": 1.125,
2478
+ "learning_rate": 7.605167466741603e-05,
2479
+ "loss": 1.921,
2480
+ "step": 50832
2481
+ },
2482
+ {
2483
+ "epoch": 0.35,
2484
+ "grad_norm": 1.1953125,
2485
+ "learning_rate": 7.591614864618883e-05,
2486
+ "loss": 1.9422,
2487
+ "step": 50976
2488
+ },
2489
+ {
2490
+ "epoch": 0.35,
2491
+ "grad_norm": 1.0546875,
2492
+ "learning_rate": 7.578036177407109e-05,
2493
+ "loss": 1.8973,
2494
+ "step": 51120
2495
+ },
2496
+ {
2497
+ "epoch": 0.35,
2498
+ "grad_norm": 1.875,
2499
+ "learning_rate": 7.564431541778302e-05,
2500
+ "loss": 1.9229,
2501
+ "step": 51264
2502
+ },
2503
+ {
2504
+ "epoch": 0.35,
2505
+ "grad_norm": 1.0625,
2506
+ "learning_rate": 7.550801094665673e-05,
2507
+ "loss": 1.8958,
2508
+ "step": 51408
2509
+ },
2510
+ {
2511
+ "epoch": 0.35,
2512
+ "grad_norm": 1.1328125,
2513
+ "learning_rate": 7.537144973262228e-05,
2514
+ "loss": 1.9134,
2515
+ "step": 51552
2516
+ },
2517
+ {
2518
+ "epoch": 0.35,
2519
+ "grad_norm": 1.7890625,
2520
+ "learning_rate": 7.52346331501938e-05,
2521
+ "loss": 1.8897,
2522
+ "step": 51696
2523
+ },
2524
+ {
2525
+ "epoch": 0.35,
2526
+ "grad_norm": 1.546875,
2527
+ "learning_rate": 7.509756257645591e-05,
2528
+ "loss": 1.9281,
2529
+ "step": 51840
2530
+ },
2531
+ {
2532
+ "epoch": 0.35,
2533
+ "grad_norm": 1.09375,
2534
+ "learning_rate": 7.496023939104952e-05,
2535
+ "loss": 1.9152,
2536
+ "step": 51984
2537
+ },
2538
+ {
2539
+ "epoch": 0.35,
2540
+ "grad_norm": 1.1484375,
2541
+ "learning_rate": 7.482266497615827e-05,
2542
+ "loss": 1.9152,
2543
+ "step": 52128
2544
+ },
2545
+ {
2546
+ "epoch": 0.36,
2547
+ "grad_norm": 1.0703125,
2548
+ "learning_rate": 7.468484071649439e-05,
2549
+ "loss": 1.9176,
2550
+ "step": 52272
2551
+ },
2552
+ {
2553
+ "epoch": 0.36,
2554
+ "grad_norm": 1.4609375,
2555
+ "learning_rate": 7.454676799928488e-05,
2556
+ "loss": 1.9076,
2557
+ "step": 52416
2558
+ },
2559
+ {
2560
+ "epoch": 0.36,
2561
+ "grad_norm": 1.234375,
2562
+ "learning_rate": 7.44084482142575e-05,
2563
+ "loss": 1.8938,
2564
+ "step": 52560
2565
+ },
2566
+ {
2567
+ "epoch": 0.36,
2568
+ "grad_norm": 1.140625,
2569
+ "learning_rate": 7.426988275362679e-05,
2570
+ "loss": 1.9025,
2571
+ "step": 52704
2572
+ },
2573
+ {
2574
+ "epoch": 0.36,
2575
+ "grad_norm": 1.1953125,
2576
+ "learning_rate": 7.413107301208009e-05,
2577
+ "loss": 1.9007,
2578
+ "step": 52848
2579
+ },
2580
+ {
2581
+ "epoch": 0.36,
2582
+ "grad_norm": 1.1015625,
2583
+ "learning_rate": 7.399202038676342e-05,
2584
+ "loss": 1.9081,
2585
+ "step": 52992
2586
+ },
2587
+ {
2588
+ "epoch": 0.36,
2589
+ "grad_norm": 1.109375,
2590
+ "learning_rate": 7.38527262772675e-05,
2591
+ "loss": 1.9028,
2592
+ "step": 53136
2593
+ },
2594
+ {
2595
+ "epoch": 0.36,
2596
+ "grad_norm": 1.265625,
2597
+ "learning_rate": 7.371319208561365e-05,
2598
+ "loss": 1.9027,
2599
+ "step": 53280
2600
+ },
2601
+ {
2602
+ "epoch": 0.36,
2603
+ "grad_norm": 1.6640625,
2604
+ "learning_rate": 7.357341921623965e-05,
2605
+ "loss": 1.9105,
2606
+ "step": 53424
2607
+ },
2608
+ {
2609
+ "epoch": 0.36,
2610
+ "grad_norm": 1.2265625,
2611
+ "learning_rate": 7.343340907598561e-05,
2612
+ "loss": 1.907,
2613
+ "step": 53568
2614
+ },
2615
+ {
2616
+ "epoch": 0.37,
2617
+ "grad_norm": 1.875,
2618
+ "learning_rate": 7.329316307407981e-05,
2619
+ "loss": 1.9045,
2620
+ "step": 53712
2621
+ },
2622
+ {
2623
+ "epoch": 0.37,
2624
+ "grad_norm": 1.171875,
2625
+ "learning_rate": 7.315268262212451e-05,
2626
+ "loss": 1.9191,
2627
+ "step": 53856
2628
+ },
2629
+ {
2630
+ "epoch": 0.37,
2631
+ "grad_norm": 1.109375,
2632
+ "learning_rate": 7.301196913408182e-05,
2633
+ "loss": 1.9195,
2634
+ "step": 54000
2635
+ },
2636
+ {
2637
+ "epoch": 0.37,
2638
+ "grad_norm": 1.109375,
2639
+ "learning_rate": 7.287102402625931e-05,
2640
+ "loss": 1.881,
2641
+ "step": 54144
2642
+ },
2643
+ {
2644
+ "epoch": 0.37,
2645
+ "grad_norm": 1.09375,
2646
+ "learning_rate": 7.27298487172959e-05,
2647
+ "loss": 1.918,
2648
+ "step": 54288
2649
+ },
2650
+ {
2651
+ "epoch": 0.37,
2652
+ "grad_norm": 1.421875,
2653
+ "learning_rate": 7.258844462814755e-05,
2654
+ "loss": 1.9015,
2655
+ "step": 54432
2656
+ },
2657
+ {
2658
+ "epoch": 0.37,
2659
+ "grad_norm": 1.1640625,
2660
+ "learning_rate": 7.244681318207287e-05,
2661
+ "loss": 1.91,
2662
+ "step": 54576
2663
+ },
2664
+ {
2665
+ "epoch": 0.37,
2666
+ "grad_norm": 1.203125,
2667
+ "learning_rate": 7.23049558046189e-05,
2668
+ "loss": 1.8926,
2669
+ "step": 54720
2670
+ },
2671
+ {
2672
+ "epoch": 0.37,
2673
+ "grad_norm": 1.34375,
2674
+ "learning_rate": 7.216287392360674e-05,
2675
+ "loss": 1.916,
2676
+ "step": 54864
2677
+ },
2678
+ {
2679
+ "epoch": 0.37,
2680
+ "grad_norm": 1.28125,
2681
+ "learning_rate": 7.20205689691171e-05,
2682
+ "loss": 1.9072,
2683
+ "step": 55008
2684
+ },
2685
+ {
2686
+ "epoch": 0.38,
2687
+ "grad_norm": 1.3046875,
2688
+ "learning_rate": 7.187804237347603e-05,
2689
+ "loss": 1.895,
2690
+ "step": 55152
2691
+ },
2692
+ {
2693
+ "epoch": 0.38,
2694
+ "grad_norm": 1.09375,
2695
+ "learning_rate": 7.173529557124042e-05,
2696
+ "loss": 1.9027,
2697
+ "step": 55296
2698
+ },
2699
+ {
2700
+ "epoch": 0.38,
2701
+ "grad_norm": 1.109375,
2702
+ "learning_rate": 7.159232999918353e-05,
2703
+ "loss": 1.9182,
2704
+ "step": 55440
2705
+ },
2706
+ {
2707
+ "epoch": 0.38,
2708
+ "grad_norm": 1.5234375,
2709
+ "learning_rate": 7.144914709628066e-05,
2710
+ "loss": 1.9177,
2711
+ "step": 55584
2712
+ },
2713
+ {
2714
+ "epoch": 0.38,
2715
+ "grad_norm": 1.125,
2716
+ "learning_rate": 7.130574830369454e-05,
2717
+ "loss": 1.9248,
2718
+ "step": 55728
2719
+ },
2720
+ {
2721
+ "epoch": 0.38,
2722
+ "grad_norm": 1.171875,
2723
+ "learning_rate": 7.116213506476087e-05,
2724
+ "loss": 1.9145,
2725
+ "step": 55872
2726
+ },
2727
+ {
2728
+ "epoch": 0.38,
2729
+ "grad_norm": 1.28125,
2730
+ "learning_rate": 7.101830882497382e-05,
2731
+ "loss": 1.8965,
2732
+ "step": 56016
2733
+ },
2734
+ {
2735
+ "epoch": 0.38,
2736
+ "grad_norm": 1.1484375,
2737
+ "learning_rate": 7.08742710319714e-05,
2738
+ "loss": 1.9228,
2739
+ "step": 56160
2740
+ },
2741
+ {
2742
+ "epoch": 0.38,
2743
+ "grad_norm": 1.0859375,
2744
+ "learning_rate": 7.0730023135521e-05,
2745
+ "loss": 1.9277,
2746
+ "step": 56304
2747
+ },
2748
+ {
2749
+ "epoch": 0.38,
2750
+ "grad_norm": 1.25,
2751
+ "learning_rate": 7.058556658750471e-05,
2752
+ "loss": 1.9116,
2753
+ "step": 56448
2754
+ },
2755
+ {
2756
+ "epoch": 0.38,
2757
+ "grad_norm": 1.25,
2758
+ "learning_rate": 7.044090284190476e-05,
2759
+ "loss": 1.9069,
2760
+ "step": 56592
2761
+ },
2762
+ {
2763
+ "epoch": 0.39,
2764
+ "grad_norm": 1.0703125,
2765
+ "learning_rate": 7.029603335478883e-05,
2766
+ "loss": 1.9087,
2767
+ "step": 56736
2768
+ },
2769
+ {
2770
+ "epoch": 0.39,
2771
+ "grad_norm": 1.1484375,
2772
+ "learning_rate": 7.015095958429544e-05,
2773
+ "loss": 1.9116,
2774
+ "step": 56880
2775
+ },
2776
+ {
2777
+ "epoch": 0.39,
2778
+ "grad_norm": 1.2890625,
2779
+ "learning_rate": 7.000568299061929e-05,
2780
+ "loss": 1.9132,
2781
+ "step": 57024
2782
+ },
2783
+ {
2784
+ "epoch": 0.39,
2785
+ "grad_norm": 1.34375,
2786
+ "learning_rate": 6.986020503599651e-05,
2787
+ "loss": 1.9259,
2788
+ "step": 57168
2789
+ },
2790
+ {
2791
+ "epoch": 0.39,
2792
+ "grad_norm": 1.09375,
2793
+ "learning_rate": 6.971452718468991e-05,
2794
+ "loss": 1.9268,
2795
+ "step": 57312
2796
+ },
2797
+ {
2798
+ "epoch": 0.39,
2799
+ "grad_norm": 1.203125,
2800
+ "learning_rate": 6.956865090297439e-05,
2801
+ "loss": 1.9186,
2802
+ "step": 57456
2803
+ },
2804
+ {
2805
+ "epoch": 0.39,
2806
+ "grad_norm": 1.171875,
2807
+ "learning_rate": 6.942257765912206e-05,
2808
+ "loss": 1.891,
2809
+ "step": 57600
2810
+ },
2811
+ {
2812
+ "epoch": 0.39,
2813
+ "grad_norm": 1.65625,
2814
+ "learning_rate": 6.927630892338746e-05,
2815
+ "loss": 1.9136,
2816
+ "step": 57744
2817
+ },
2818
+ {
2819
+ "epoch": 0.39,
2820
+ "grad_norm": 1.1015625,
2821
+ "learning_rate": 6.912984616799283e-05,
2822
+ "loss": 1.9042,
2823
+ "step": 57888
2824
+ },
2825
+ {
2826
+ "epoch": 0.39,
2827
+ "grad_norm": 1.3671875,
2828
+ "learning_rate": 6.898319086711321e-05,
2829
+ "loss": 1.8964,
2830
+ "step": 58032
2831
+ },
2832
+ {
2833
+ "epoch": 0.4,
2834
+ "grad_norm": 1.0859375,
2835
+ "learning_rate": 6.88363444968617e-05,
2836
+ "loss": 1.9057,
2837
+ "step": 58176
2838
+ },
2839
+ {
2840
+ "epoch": 0.4,
2841
+ "grad_norm": 1.2421875,
2842
+ "learning_rate": 6.868930853527452e-05,
2843
+ "loss": 1.8916,
2844
+ "step": 58320
2845
+ },
2846
+ {
2847
+ "epoch": 0.4,
2848
+ "grad_norm": 1.3046875,
2849
+ "learning_rate": 6.854208446229616e-05,
2850
+ "loss": 1.9167,
2851
+ "step": 58464
2852
+ },
2853
+ {
2854
+ "epoch": 0.4,
2855
+ "grad_norm": 1.09375,
2856
+ "learning_rate": 6.839467375976449e-05,
2857
+ "loss": 1.8944,
2858
+ "step": 58608
2859
+ },
2860
+ {
2861
+ "epoch": 0.4,
2862
+ "grad_norm": 1.1484375,
2863
+ "learning_rate": 6.824707791139584e-05,
2864
+ "loss": 1.9114,
2865
+ "step": 58752
2866
+ },
2867
+ {
2868
+ "epoch": 0.4,
2869
+ "grad_norm": 1.1328125,
2870
+ "learning_rate": 6.809929840277008e-05,
2871
+ "loss": 1.8976,
2872
+ "step": 58896
2873
+ },
2874
+ {
2875
+ "epoch": 0.4,
2876
+ "grad_norm": 1.0625,
2877
+ "learning_rate": 6.795133672131564e-05,
2878
+ "loss": 1.9072,
2879
+ "step": 59040
2880
+ }
2881
+ ],
2882
+ "logging_steps": 144,
2883
+ "max_steps": 147005,
2884
+ "num_input_tokens_seen": 0,
2885
+ "num_train_epochs": 1,
2886
+ "save_steps": 1440,
2887
+ "total_flos": 2.4734817975942513e+20,
2888
+ "train_batch_size": 4,
2889
+ "trial_name": null,
2890
+ "trial_params": null
2891
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e7f668272e1e3c39c7953a047fd5f2806fb8c3611ae881516e8311e33ea8373
3
+ size 4920