preemware commited on
Commit
1fbb158
1 Parent(s): 5bc608f

Upload folder using huggingface_hub

Browse files
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "<|im_end|>": 32000,
3
+ "<|im_start|>": 32001
4
+ }
config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "alpindale/Mistral-7B-v0.2-hf",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 32000,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 4096,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 32768,
14
+ "model_type": "mistral",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 32,
17
+ "num_key_value_heads": 8,
18
+ "rms_norm_eps": 1e-05,
19
+ "rope_theta": 1000000.0,
20
+ "sliding_window": null,
21
+ "tie_word_embeddings": false,
22
+ "torch_dtype": "bfloat16",
23
+ "transformers_version": "4.40.0.dev0",
24
+ "use_cache": false,
25
+ "vocab_size": 32002
26
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": 2,
6
+ "transformers_version": "4.40.0.dev0"
7
+ }
model-00001-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00223efab0d26506448b5fd217f1bebdd658227dee59c10abf83fd1271cc3c75
3
+ size 4943178720
model-00002-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff1500bf47a4bb0567f06f397c79467d3d7097ac6ac6c732fe11abf5bb8cdcbb
3
+ size 4999819336
model-00003-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0a56c231d8801d62f0ccae7b8c0e8df8dd9a4094bd6db85e0c36de7b94e9d46
3
+ size 4540532728
model.safetensors.index.json ADDED
@@ -0,0 +1,298 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 14483496960
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00003-of-00003.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00003.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00003.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00003.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
242
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
243
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
244
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
245
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
246
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
247
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
248
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
249
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
250
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
251
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
252
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
253
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
254
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
255
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
256
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
257
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
258
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
259
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
260
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
261
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
262
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
263
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
264
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
265
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
266
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
267
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
268
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
269
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
270
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
271
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
272
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
273
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
274
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
275
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
276
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
277
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
278
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
279
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
280
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
281
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
282
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
283
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
284
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
285
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
286
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
287
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
288
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
289
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
290
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
291
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
292
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
293
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
294
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
295
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
296
+ "model.norm.weight": "model-00003-of-00003.safetensors"
297
+ }
298
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|im_end|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "</s>",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": true,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ },
30
+ "32000": {
31
+ "content": "<|im_end|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "32001": {
39
+ "content": "<|im_start|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ }
46
+ },
47
+ "bos_token": "<s>",
48
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
49
+ "clean_up_tokenization_spaces": false,
50
+ "eos_token": "<|im_end|>",
51
+ "legacy": true,
52
+ "model_max_length": 1000000000000000019884624838656,
53
+ "pad_token": "</s>",
54
+ "sp_model_kwargs": {},
55
+ "spaces_between_special_tokens": false,
56
+ "tokenizer_class": "LlamaTokenizer",
57
+ "unk_token": "<unk>",
58
+ "use_default_system_prompt": false,
59
+ "use_fast": true
60
+ }
trainer_state.json ADDED
@@ -0,0 +1,3297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 3.9423076923076925,
5
+ "eval_steps": 500,
6
+ "global_step": 468,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.01,
13
+ "grad_norm": 26.249136076838056,
14
+ "learning_rate": 5.000000000000001e-07,
15
+ "loss": 1.1102,
16
+ "step": 1
17
+ },
18
+ {
19
+ "epoch": 0.02,
20
+ "grad_norm": 26.110264576346324,
21
+ "learning_rate": 1.0000000000000002e-06,
22
+ "loss": 1.0997,
23
+ "step": 2
24
+ },
25
+ {
26
+ "epoch": 0.03,
27
+ "grad_norm": 25.94374396489923,
28
+ "learning_rate": 1.5e-06,
29
+ "loss": 1.1239,
30
+ "step": 3
31
+ },
32
+ {
33
+ "epoch": 0.03,
34
+ "grad_norm": 22.37475129701464,
35
+ "learning_rate": 2.0000000000000003e-06,
36
+ "loss": 1.1059,
37
+ "step": 4
38
+ },
39
+ {
40
+ "epoch": 0.04,
41
+ "grad_norm": 16.25191401266457,
42
+ "learning_rate": 2.5e-06,
43
+ "loss": 1.0405,
44
+ "step": 5
45
+ },
46
+ {
47
+ "epoch": 0.05,
48
+ "grad_norm": 22.679130338310745,
49
+ "learning_rate": 3e-06,
50
+ "loss": 1.0213,
51
+ "step": 6
52
+ },
53
+ {
54
+ "epoch": 0.06,
55
+ "grad_norm": 15.916959043580418,
56
+ "learning_rate": 3.5e-06,
57
+ "loss": 1.0104,
58
+ "step": 7
59
+ },
60
+ {
61
+ "epoch": 0.07,
62
+ "grad_norm": 9.482985220910942,
63
+ "learning_rate": 4.000000000000001e-06,
64
+ "loss": 0.9937,
65
+ "step": 8
66
+ },
67
+ {
68
+ "epoch": 0.08,
69
+ "grad_norm": 6.965472899289267,
70
+ "learning_rate": 4.5e-06,
71
+ "loss": 0.9693,
72
+ "step": 9
73
+ },
74
+ {
75
+ "epoch": 0.09,
76
+ "grad_norm": 12.298531856803667,
77
+ "learning_rate": 5e-06,
78
+ "loss": 0.962,
79
+ "step": 10
80
+ },
81
+ {
82
+ "epoch": 0.09,
83
+ "grad_norm": 6.569727331935135,
84
+ "learning_rate": 4.999941186489917e-06,
85
+ "loss": 0.9374,
86
+ "step": 11
87
+ },
88
+ {
89
+ "epoch": 0.1,
90
+ "grad_norm": 5.377638735854951,
91
+ "learning_rate": 4.999764748726891e-06,
92
+ "loss": 0.9315,
93
+ "step": 12
94
+ },
95
+ {
96
+ "epoch": 0.11,
97
+ "grad_norm": 4.601213320961148,
98
+ "learning_rate": 4.999470695012462e-06,
99
+ "loss": 0.9317,
100
+ "step": 13
101
+ },
102
+ {
103
+ "epoch": 0.12,
104
+ "grad_norm": 6.018739227425751,
105
+ "learning_rate": 4.999059039182093e-06,
106
+ "loss": 0.9246,
107
+ "step": 14
108
+ },
109
+ {
110
+ "epoch": 0.13,
111
+ "grad_norm": 5.719715824372754,
112
+ "learning_rate": 4.998529800604525e-06,
113
+ "loss": 0.9136,
114
+ "step": 15
115
+ },
116
+ {
117
+ "epoch": 0.14,
118
+ "grad_norm": 3.9341753543248505,
119
+ "learning_rate": 4.99788300418086e-06,
120
+ "loss": 0.9221,
121
+ "step": 16
122
+ },
123
+ {
124
+ "epoch": 0.15,
125
+ "grad_norm": 4.060460637107745,
126
+ "learning_rate": 4.997118680343392e-06,
127
+ "loss": 0.9021,
128
+ "step": 17
129
+ },
130
+ {
131
+ "epoch": 0.15,
132
+ "grad_norm": 3.576152525840241,
133
+ "learning_rate": 4.996236865054177e-06,
134
+ "loss": 0.8804,
135
+ "step": 18
136
+ },
137
+ {
138
+ "epoch": 0.16,
139
+ "grad_norm": 3.2384463037464157,
140
+ "learning_rate": 4.995237599803336e-06,
141
+ "loss": 0.8864,
142
+ "step": 19
143
+ },
144
+ {
145
+ "epoch": 0.17,
146
+ "grad_norm": 2.961735655562835,
147
+ "learning_rate": 4.994120931607106e-06,
148
+ "loss": 0.884,
149
+ "step": 20
150
+ },
151
+ {
152
+ "epoch": 0.18,
153
+ "grad_norm": 3.715236432103334,
154
+ "learning_rate": 4.992886913005628e-06,
155
+ "loss": 0.8803,
156
+ "step": 21
157
+ },
158
+ {
159
+ "epoch": 0.19,
160
+ "grad_norm": 2.526746110251263,
161
+ "learning_rate": 4.991535602060475e-06,
162
+ "loss": 0.8856,
163
+ "step": 22
164
+ },
165
+ {
166
+ "epoch": 0.2,
167
+ "grad_norm": 2.663980149991846,
168
+ "learning_rate": 4.9900670623519185e-06,
169
+ "loss": 0.8613,
170
+ "step": 23
171
+ },
172
+ {
173
+ "epoch": 0.21,
174
+ "grad_norm": 2.4521755603079787,
175
+ "learning_rate": 4.988481362975939e-06,
176
+ "loss": 0.8692,
177
+ "step": 24
178
+ },
179
+ {
180
+ "epoch": 0.21,
181
+ "grad_norm": 2.414349491574281,
182
+ "learning_rate": 4.986778578540973e-06,
183
+ "loss": 0.8602,
184
+ "step": 25
185
+ },
186
+ {
187
+ "epoch": 0.22,
188
+ "grad_norm": 2.8508832900360477,
189
+ "learning_rate": 4.984958789164404e-06,
190
+ "loss": 0.8526,
191
+ "step": 26
192
+ },
193
+ {
194
+ "epoch": 0.23,
195
+ "grad_norm": 3.196516977267481,
196
+ "learning_rate": 4.983022080468794e-06,
197
+ "loss": 0.8416,
198
+ "step": 27
199
+ },
200
+ {
201
+ "epoch": 0.24,
202
+ "grad_norm": 3.6095238921057553,
203
+ "learning_rate": 4.980968543577849e-06,
204
+ "loss": 0.8477,
205
+ "step": 28
206
+ },
207
+ {
208
+ "epoch": 0.25,
209
+ "grad_norm": 2.2205063108737506,
210
+ "learning_rate": 4.978798275112142e-06,
211
+ "loss": 0.8502,
212
+ "step": 29
213
+ },
214
+ {
215
+ "epoch": 0.26,
216
+ "grad_norm": 4.453254154090838,
217
+ "learning_rate": 4.976511377184557e-06,
218
+ "loss": 0.8468,
219
+ "step": 30
220
+ },
221
+ {
222
+ "epoch": 0.26,
223
+ "grad_norm": 2.9249988861975766,
224
+ "learning_rate": 4.97410795739549e-06,
225
+ "loss": 0.8391,
226
+ "step": 31
227
+ },
228
+ {
229
+ "epoch": 0.27,
230
+ "grad_norm": 4.462696000038617,
231
+ "learning_rate": 4.971588128827783e-06,
232
+ "loss": 0.8436,
233
+ "step": 32
234
+ },
235
+ {
236
+ "epoch": 0.28,
237
+ "grad_norm": 3.587837288732032,
238
+ "learning_rate": 4.968952010041408e-06,
239
+ "loss": 0.8564,
240
+ "step": 33
241
+ },
242
+ {
243
+ "epoch": 0.29,
244
+ "grad_norm": 4.2433648029783635,
245
+ "learning_rate": 4.966199725067883e-06,
246
+ "loss": 0.8501,
247
+ "step": 34
248
+ },
249
+ {
250
+ "epoch": 0.3,
251
+ "grad_norm": 3.55480062205993,
252
+ "learning_rate": 4.96333140340444e-06,
253
+ "loss": 0.8474,
254
+ "step": 35
255
+ },
256
+ {
257
+ "epoch": 0.31,
258
+ "grad_norm": 3.430958497804354,
259
+ "learning_rate": 4.960347180007932e-06,
260
+ "loss": 0.8343,
261
+ "step": 36
262
+ },
263
+ {
264
+ "epoch": 0.32,
265
+ "grad_norm": 3.0559248054083428,
266
+ "learning_rate": 4.957247195288479e-06,
267
+ "loss": 0.8358,
268
+ "step": 37
269
+ },
270
+ {
271
+ "epoch": 0.32,
272
+ "grad_norm": 3.2032433802521147,
273
+ "learning_rate": 4.9540315951028695e-06,
274
+ "loss": 0.8538,
275
+ "step": 38
276
+ },
277
+ {
278
+ "epoch": 0.33,
279
+ "grad_norm": 2.757473867230571,
280
+ "learning_rate": 4.9507005307476894e-06,
281
+ "loss": 0.8488,
282
+ "step": 39
283
+ },
284
+ {
285
+ "epoch": 0.34,
286
+ "grad_norm": 3.045305839090202,
287
+ "learning_rate": 4.947254158952209e-06,
288
+ "loss": 0.8463,
289
+ "step": 40
290
+ },
291
+ {
292
+ "epoch": 0.35,
293
+ "grad_norm": 2.9629285351308554,
294
+ "learning_rate": 4.943692641871005e-06,
295
+ "loss": 0.828,
296
+ "step": 41
297
+ },
298
+ {
299
+ "epoch": 0.36,
300
+ "grad_norm": 2.7499225905634037,
301
+ "learning_rate": 4.940016147076337e-06,
302
+ "loss": 0.835,
303
+ "step": 42
304
+ },
305
+ {
306
+ "epoch": 0.37,
307
+ "grad_norm": 2.606092501631258,
308
+ "learning_rate": 4.9362248475502515e-06,
309
+ "loss": 0.8269,
310
+ "step": 43
311
+ },
312
+ {
313
+ "epoch": 0.38,
314
+ "grad_norm": 2.674180949875197,
315
+ "learning_rate": 4.932318921676458e-06,
316
+ "loss": 0.8417,
317
+ "step": 44
318
+ },
319
+ {
320
+ "epoch": 0.38,
321
+ "grad_norm": 2.7285602808097336,
322
+ "learning_rate": 4.928298553231924e-06,
323
+ "loss": 0.8142,
324
+ "step": 45
325
+ },
326
+ {
327
+ "epoch": 0.39,
328
+ "grad_norm": 2.7015909165553387,
329
+ "learning_rate": 4.924163931378233e-06,
330
+ "loss": 0.8323,
331
+ "step": 46
332
+ },
333
+ {
334
+ "epoch": 0.4,
335
+ "grad_norm": 2.5415943230444498,
336
+ "learning_rate": 4.919915250652686e-06,
337
+ "loss": 0.8244,
338
+ "step": 47
339
+ },
340
+ {
341
+ "epoch": 0.41,
342
+ "grad_norm": 2.776100967798618,
343
+ "learning_rate": 4.9155527109591435e-06,
344
+ "loss": 0.8516,
345
+ "step": 48
346
+ },
347
+ {
348
+ "epoch": 0.42,
349
+ "grad_norm": 2.711552634387251,
350
+ "learning_rate": 4.911076517558623e-06,
351
+ "loss": 0.8313,
352
+ "step": 49
353
+ },
354
+ {
355
+ "epoch": 0.43,
356
+ "grad_norm": 2.905341213972799,
357
+ "learning_rate": 4.906486881059641e-06,
358
+ "loss": 0.827,
359
+ "step": 50
360
+ },
361
+ {
362
+ "epoch": 0.44,
363
+ "grad_norm": 2.6533492618702206,
364
+ "learning_rate": 4.901784017408303e-06,
365
+ "loss": 0.8298,
366
+ "step": 51
367
+ },
368
+ {
369
+ "epoch": 0.44,
370
+ "grad_norm": 2.3477688431170414,
371
+ "learning_rate": 4.896968147878146e-06,
372
+ "loss": 0.8014,
373
+ "step": 52
374
+ },
375
+ {
376
+ "epoch": 0.45,
377
+ "grad_norm": 2.8384104093830587,
378
+ "learning_rate": 4.892039499059721e-06,
379
+ "loss": 0.8116,
380
+ "step": 53
381
+ },
382
+ {
383
+ "epoch": 0.46,
384
+ "grad_norm": 2.432850021289229,
385
+ "learning_rate": 4.886998302849938e-06,
386
+ "loss": 0.8156,
387
+ "step": 54
388
+ },
389
+ {
390
+ "epoch": 0.47,
391
+ "grad_norm": 2.804790940572451,
392
+ "learning_rate": 4.881844796441153e-06,
393
+ "loss": 0.8159,
394
+ "step": 55
395
+ },
396
+ {
397
+ "epoch": 0.48,
398
+ "grad_norm": 2.3348703819353926,
399
+ "learning_rate": 4.876579222310007e-06,
400
+ "loss": 0.8096,
401
+ "step": 56
402
+ },
403
+ {
404
+ "epoch": 0.49,
405
+ "grad_norm": 3.1901608706880134,
406
+ "learning_rate": 4.8712018282060165e-06,
407
+ "loss": 0.811,
408
+ "step": 57
409
+ },
410
+ {
411
+ "epoch": 0.5,
412
+ "grad_norm": 2.660868627279477,
413
+ "learning_rate": 4.86571286713992e-06,
414
+ "loss": 0.818,
415
+ "step": 58
416
+ },
417
+ {
418
+ "epoch": 0.5,
419
+ "grad_norm": 2.7869506219362514,
420
+ "learning_rate": 4.860112597371772e-06,
421
+ "loss": 0.8267,
422
+ "step": 59
423
+ },
424
+ {
425
+ "epoch": 0.51,
426
+ "grad_norm": 2.3781426665180727,
427
+ "learning_rate": 4.85440128239879e-06,
428
+ "loss": 0.8112,
429
+ "step": 60
430
+ },
431
+ {
432
+ "epoch": 0.52,
433
+ "grad_norm": 3.015345568347538,
434
+ "learning_rate": 4.8485791909429575e-06,
435
+ "loss": 0.8151,
436
+ "step": 61
437
+ },
438
+ {
439
+ "epoch": 0.53,
440
+ "grad_norm": 2.4423049063633546,
441
+ "learning_rate": 4.842646596938383e-06,
442
+ "loss": 0.8282,
443
+ "step": 62
444
+ },
445
+ {
446
+ "epoch": 0.54,
447
+ "grad_norm": 2.6311610579830345,
448
+ "learning_rate": 4.8366037795184086e-06,
449
+ "loss": 0.8224,
450
+ "step": 63
451
+ },
452
+ {
453
+ "epoch": 0.55,
454
+ "grad_norm": 2.2899705695712282,
455
+ "learning_rate": 4.830451023002477e-06,
456
+ "loss": 0.8249,
457
+ "step": 64
458
+ },
459
+ {
460
+ "epoch": 0.56,
461
+ "grad_norm": 2.734019031576332,
462
+ "learning_rate": 4.824188616882754e-06,
463
+ "loss": 0.8136,
464
+ "step": 65
465
+ },
466
+ {
467
+ "epoch": 0.56,
468
+ "grad_norm": 2.0962707496285153,
469
+ "learning_rate": 4.817816855810507e-06,
470
+ "loss": 0.8184,
471
+ "step": 66
472
+ },
473
+ {
474
+ "epoch": 0.57,
475
+ "grad_norm": 2.9665475220491966,
476
+ "learning_rate": 4.811336039582244e-06,
477
+ "loss": 0.817,
478
+ "step": 67
479
+ },
480
+ {
481
+ "epoch": 0.58,
482
+ "grad_norm": 2.3700279096809824,
483
+ "learning_rate": 4.804746473125605e-06,
484
+ "loss": 0.81,
485
+ "step": 68
486
+ },
487
+ {
488
+ "epoch": 0.59,
489
+ "grad_norm": 3.027450051648286,
490
+ "learning_rate": 4.798048466485018e-06,
491
+ "loss": 0.8219,
492
+ "step": 69
493
+ },
494
+ {
495
+ "epoch": 0.6,
496
+ "grad_norm": 2.752180087601986,
497
+ "learning_rate": 4.791242334807106e-06,
498
+ "loss": 0.8101,
499
+ "step": 70
500
+ },
501
+ {
502
+ "epoch": 0.61,
503
+ "grad_norm": 2.573031098084336,
504
+ "learning_rate": 4.784328398325866e-06,
505
+ "loss": 0.8099,
506
+ "step": 71
507
+ },
508
+ {
509
+ "epoch": 0.62,
510
+ "grad_norm": 2.7123062400387576,
511
+ "learning_rate": 4.7773069823475945e-06,
512
+ "loss": 0.8088,
513
+ "step": 72
514
+ },
515
+ {
516
+ "epoch": 0.62,
517
+ "grad_norm": 2.474407154924412,
518
+ "learning_rate": 4.770178417235589e-06,
519
+ "loss": 0.8232,
520
+ "step": 73
521
+ },
522
+ {
523
+ "epoch": 0.63,
524
+ "grad_norm": 2.4442790557938965,
525
+ "learning_rate": 4.762943038394597e-06,
526
+ "loss": 0.8051,
527
+ "step": 74
528
+ },
529
+ {
530
+ "epoch": 0.64,
531
+ "grad_norm": 2.1553005222889583,
532
+ "learning_rate": 4.755601186255041e-06,
533
+ "loss": 0.825,
534
+ "step": 75
535
+ },
536
+ {
537
+ "epoch": 0.65,
538
+ "grad_norm": 2.312089317706644,
539
+ "learning_rate": 4.7481532062569945e-06,
540
+ "loss": 0.8168,
541
+ "step": 76
542
+ },
543
+ {
544
+ "epoch": 0.66,
545
+ "grad_norm": 2.2731409969922693,
546
+ "learning_rate": 4.7405994488339375e-06,
547
+ "loss": 0.8095,
548
+ "step": 77
549
+ },
550
+ {
551
+ "epoch": 0.67,
552
+ "grad_norm": 2.444830999943097,
553
+ "learning_rate": 4.732940269396259e-06,
554
+ "loss": 0.8108,
555
+ "step": 78
556
+ },
557
+ {
558
+ "epoch": 0.68,
559
+ "grad_norm": 2.315315922207351,
560
+ "learning_rate": 4.725176028314541e-06,
561
+ "loss": 0.8072,
562
+ "step": 79
563
+ },
564
+ {
565
+ "epoch": 0.68,
566
+ "grad_norm": 2.231139324893734,
567
+ "learning_rate": 4.7173070909026015e-06,
568
+ "loss": 0.8093,
569
+ "step": 80
570
+ },
571
+ {
572
+ "epoch": 0.69,
573
+ "grad_norm": 2.3706185711850956,
574
+ "learning_rate": 4.7093338274003035e-06,
575
+ "loss": 0.8011,
576
+ "step": 81
577
+ },
578
+ {
579
+ "epoch": 0.7,
580
+ "grad_norm": 2.2002695782644905,
581
+ "learning_rate": 4.701256612956137e-06,
582
+ "loss": 0.8136,
583
+ "step": 82
584
+ },
585
+ {
586
+ "epoch": 0.71,
587
+ "grad_norm": 2.704162588410486,
588
+ "learning_rate": 4.693075827609569e-06,
589
+ "loss": 0.8161,
590
+ "step": 83
591
+ },
592
+ {
593
+ "epoch": 0.72,
594
+ "grad_norm": 1.9269420606373808,
595
+ "learning_rate": 4.684791856273161e-06,
596
+ "loss": 0.8023,
597
+ "step": 84
598
+ },
599
+ {
600
+ "epoch": 0.73,
601
+ "grad_norm": 2.4468707403860037,
602
+ "learning_rate": 4.676405088714458e-06,
603
+ "loss": 0.8019,
604
+ "step": 85
605
+ },
606
+ {
607
+ "epoch": 0.74,
608
+ "grad_norm": 2.07255184003701,
609
+ "learning_rate": 4.667915919537651e-06,
610
+ "loss": 0.8155,
611
+ "step": 86
612
+ },
613
+ {
614
+ "epoch": 0.74,
615
+ "grad_norm": 2.2036583211359746,
616
+ "learning_rate": 4.6593247481650105e-06,
617
+ "loss": 0.8175,
618
+ "step": 87
619
+ },
620
+ {
621
+ "epoch": 0.75,
622
+ "grad_norm": 1.8140377607105893,
623
+ "learning_rate": 4.65063197881809e-06,
624
+ "loss": 0.8047,
625
+ "step": 88
626
+ },
627
+ {
628
+ "epoch": 0.76,
629
+ "grad_norm": 2.1515853560068243,
630
+ "learning_rate": 4.641838020498713e-06,
631
+ "loss": 0.8185,
632
+ "step": 89
633
+ },
634
+ {
635
+ "epoch": 0.77,
636
+ "grad_norm": 2.422760543528869,
637
+ "learning_rate": 4.632943286969724e-06,
638
+ "loss": 0.8053,
639
+ "step": 90
640
+ },
641
+ {
642
+ "epoch": 0.78,
643
+ "grad_norm": 2.0829660459092776,
644
+ "learning_rate": 4.6239481967355226e-06,
645
+ "loss": 0.8016,
646
+ "step": 91
647
+ },
648
+ {
649
+ "epoch": 0.79,
650
+ "grad_norm": 2.1797861749114857,
651
+ "learning_rate": 4.614853173022374e-06,
652
+ "loss": 0.8068,
653
+ "step": 92
654
+ },
655
+ {
656
+ "epoch": 0.79,
657
+ "grad_norm": 2.6076944835409135,
658
+ "learning_rate": 4.605658643758492e-06,
659
+ "loss": 0.8121,
660
+ "step": 93
661
+ },
662
+ {
663
+ "epoch": 0.8,
664
+ "grad_norm": 1.8970468662695665,
665
+ "learning_rate": 4.59636504155391e-06,
666
+ "loss": 0.8146,
667
+ "step": 94
668
+ },
669
+ {
670
+ "epoch": 0.81,
671
+ "grad_norm": 2.195985971319255,
672
+ "learning_rate": 4.586972803680119e-06,
673
+ "loss": 0.7956,
674
+ "step": 95
675
+ },
676
+ {
677
+ "epoch": 0.82,
678
+ "grad_norm": 2.7905344729119324,
679
+ "learning_rate": 4.577482372049503e-06,
680
+ "loss": 0.7953,
681
+ "step": 96
682
+ },
683
+ {
684
+ "epoch": 0.83,
685
+ "grad_norm": 1.940825819357636,
686
+ "learning_rate": 4.567894193194538e-06,
687
+ "loss": 0.8047,
688
+ "step": 97
689
+ },
690
+ {
691
+ "epoch": 0.84,
692
+ "grad_norm": 1.7851389603760988,
693
+ "learning_rate": 4.558208718246787e-06,
694
+ "loss": 0.8105,
695
+ "step": 98
696
+ },
697
+ {
698
+ "epoch": 0.85,
699
+ "grad_norm": 2.4544053049998884,
700
+ "learning_rate": 4.548426402915674e-06,
701
+ "loss": 0.8007,
702
+ "step": 99
703
+ },
704
+ {
705
+ "epoch": 0.85,
706
+ "grad_norm": 2.1082597379545165,
707
+ "learning_rate": 4.538547707467038e-06,
708
+ "loss": 0.805,
709
+ "step": 100
710
+ },
711
+ {
712
+ "epoch": 0.86,
713
+ "grad_norm": 1.9122304736142002,
714
+ "learning_rate": 4.528573096701484e-06,
715
+ "loss": 0.8067,
716
+ "step": 101
717
+ },
718
+ {
719
+ "epoch": 0.87,
720
+ "grad_norm": 1.990095091929317,
721
+ "learning_rate": 4.5185030399325085e-06,
722
+ "loss": 0.8025,
723
+ "step": 102
724
+ },
725
+ {
726
+ "epoch": 0.88,
727
+ "grad_norm": 2.029120797101969,
728
+ "learning_rate": 4.508338010964419e-06,
729
+ "loss": 0.8051,
730
+ "step": 103
731
+ },
732
+ {
733
+ "epoch": 0.89,
734
+ "grad_norm": 1.9232157433946324,
735
+ "learning_rate": 4.498078488070044e-06,
736
+ "loss": 0.8078,
737
+ "step": 104
738
+ },
739
+ {
740
+ "epoch": 0.9,
741
+ "grad_norm": 2.026639315850326,
742
+ "learning_rate": 4.4877249539682235e-06,
743
+ "loss": 0.7957,
744
+ "step": 105
745
+ },
746
+ {
747
+ "epoch": 0.91,
748
+ "grad_norm": 2.321397841878117,
749
+ "learning_rate": 4.477277895801105e-06,
750
+ "loss": 0.8039,
751
+ "step": 106
752
+ },
753
+ {
754
+ "epoch": 0.91,
755
+ "grad_norm": 2.1058946605532656,
756
+ "learning_rate": 4.466737805111218e-06,
757
+ "loss": 0.7921,
758
+ "step": 107
759
+ },
760
+ {
761
+ "epoch": 0.92,
762
+ "grad_norm": 2.9367272096486814,
763
+ "learning_rate": 4.456105177818345e-06,
764
+ "loss": 0.7922,
765
+ "step": 108
766
+ },
767
+ {
768
+ "epoch": 0.93,
769
+ "grad_norm": 2.5699051489574605,
770
+ "learning_rate": 4.445380514196192e-06,
771
+ "loss": 0.7984,
772
+ "step": 109
773
+ },
774
+ {
775
+ "epoch": 0.94,
776
+ "grad_norm": 2.0519065594178003,
777
+ "learning_rate": 4.434564318848851e-06,
778
+ "loss": 0.784,
779
+ "step": 110
780
+ },
781
+ {
782
+ "epoch": 0.95,
783
+ "grad_norm": 2.809445653695483,
784
+ "learning_rate": 4.423657100687051e-06,
785
+ "loss": 0.7835,
786
+ "step": 111
787
+ },
788
+ {
789
+ "epoch": 0.96,
790
+ "grad_norm": 2.170793808104346,
791
+ "learning_rate": 4.41265937290422e-06,
792
+ "loss": 0.8039,
793
+ "step": 112
794
+ },
795
+ {
796
+ "epoch": 0.97,
797
+ "grad_norm": 2.612499453872831,
798
+ "learning_rate": 4.401571652952338e-06,
799
+ "loss": 0.8099,
800
+ "step": 113
801
+ },
802
+ {
803
+ "epoch": 0.97,
804
+ "grad_norm": 2.1820432279807718,
805
+ "learning_rate": 4.390394462517589e-06,
806
+ "loss": 0.7929,
807
+ "step": 114
808
+ },
809
+ {
810
+ "epoch": 0.98,
811
+ "grad_norm": 2.335653384387635,
812
+ "learning_rate": 4.379128327495813e-06,
813
+ "loss": 0.7999,
814
+ "step": 115
815
+ },
816
+ {
817
+ "epoch": 0.99,
818
+ "grad_norm": 1.9923005143051236,
819
+ "learning_rate": 4.367773777967769e-06,
820
+ "loss": 0.8123,
821
+ "step": 116
822
+ },
823
+ {
824
+ "epoch": 1.0,
825
+ "grad_norm": 2.1550829328561787,
826
+ "learning_rate": 4.3563313481741855e-06,
827
+ "loss": 0.7905,
828
+ "step": 117
829
+ },
830
+ {
831
+ "epoch": 1.01,
832
+ "grad_norm": 1.8608335678809116,
833
+ "learning_rate": 4.344801576490631e-06,
834
+ "loss": 0.8051,
835
+ "step": 118
836
+ },
837
+ {
838
+ "epoch": 1.02,
839
+ "grad_norm": 2.1245561127414923,
840
+ "learning_rate": 4.3331850054021806e-06,
841
+ "loss": 0.7979,
842
+ "step": 119
843
+ },
844
+ {
845
+ "epoch": 1.01,
846
+ "grad_norm": 3.0733225632670127,
847
+ "learning_rate": 4.321482181477891e-06,
848
+ "loss": 0.7712,
849
+ "step": 120
850
+ },
851
+ {
852
+ "epoch": 1.01,
853
+ "grad_norm": 3.2998040554493366,
854
+ "learning_rate": 4.309693655345084e-06,
855
+ "loss": 0.7523,
856
+ "step": 121
857
+ },
858
+ {
859
+ "epoch": 1.02,
860
+ "grad_norm": 3.0325218749275624,
861
+ "learning_rate": 4.29781998166344e-06,
862
+ "loss": 0.7591,
863
+ "step": 122
864
+ },
865
+ {
866
+ "epoch": 1.03,
867
+ "grad_norm": 3.696913587445061,
868
+ "learning_rate": 4.2858617190989e-06,
869
+ "loss": 0.7447,
870
+ "step": 123
871
+ },
872
+ {
873
+ "epoch": 1.04,
874
+ "grad_norm": 2.852432566856283,
875
+ "learning_rate": 4.273819430297382e-06,
876
+ "loss": 0.7557,
877
+ "step": 124
878
+ },
879
+ {
880
+ "epoch": 1.05,
881
+ "grad_norm": 4.684715882065128,
882
+ "learning_rate": 4.261693681858306e-06,
883
+ "loss": 0.7596,
884
+ "step": 125
885
+ },
886
+ {
887
+ "epoch": 1.06,
888
+ "grad_norm": 3.5092399399988343,
889
+ "learning_rate": 4.2494850443079305e-06,
890
+ "loss": 0.7467,
891
+ "step": 126
892
+ },
893
+ {
894
+ "epoch": 1.07,
895
+ "grad_norm": 3.851985488385657,
896
+ "learning_rate": 4.237194092072521e-06,
897
+ "loss": 0.7475,
898
+ "step": 127
899
+ },
900
+ {
901
+ "epoch": 1.07,
902
+ "grad_norm": 3.328297635562109,
903
+ "learning_rate": 4.2248214034513114e-06,
904
+ "loss": 0.7442,
905
+ "step": 128
906
+ },
907
+ {
908
+ "epoch": 1.08,
909
+ "grad_norm": 3.9102386032965373,
910
+ "learning_rate": 4.212367560589299e-06,
911
+ "loss": 0.7651,
912
+ "step": 129
913
+ },
914
+ {
915
+ "epoch": 1.09,
916
+ "grad_norm": 3.06457511968988,
917
+ "learning_rate": 4.199833149449853e-06,
918
+ "loss": 0.7418,
919
+ "step": 130
920
+ },
921
+ {
922
+ "epoch": 1.1,
923
+ "grad_norm": 3.2168853165359175,
924
+ "learning_rate": 4.187218759787148e-06,
925
+ "loss": 0.743,
926
+ "step": 131
927
+ },
928
+ {
929
+ "epoch": 1.11,
930
+ "grad_norm": 2.787806998778304,
931
+ "learning_rate": 4.174524985118411e-06,
932
+ "loss": 0.7583,
933
+ "step": 132
934
+ },
935
+ {
936
+ "epoch": 1.12,
937
+ "grad_norm": 3.9052093067187443,
938
+ "learning_rate": 4.161752422695995e-06,
939
+ "loss": 0.7657,
940
+ "step": 133
941
+ },
942
+ {
943
+ "epoch": 1.13,
944
+ "grad_norm": 3.0182785590623173,
945
+ "learning_rate": 4.148901673479285e-06,
946
+ "loss": 0.7362,
947
+ "step": 134
948
+ },
949
+ {
950
+ "epoch": 1.13,
951
+ "grad_norm": 3.344873463157497,
952
+ "learning_rate": 4.135973342106416e-06,
953
+ "loss": 0.7558,
954
+ "step": 135
955
+ },
956
+ {
957
+ "epoch": 1.14,
958
+ "grad_norm": 2.859789074330451,
959
+ "learning_rate": 4.122968036865827e-06,
960
+ "loss": 0.7486,
961
+ "step": 136
962
+ },
963
+ {
964
+ "epoch": 1.15,
965
+ "grad_norm": 3.228835212527119,
966
+ "learning_rate": 4.109886369667636e-06,
967
+ "loss": 0.7655,
968
+ "step": 137
969
+ },
970
+ {
971
+ "epoch": 1.16,
972
+ "grad_norm": 2.731179147211052,
973
+ "learning_rate": 4.096728956014857e-06,
974
+ "loss": 0.7528,
975
+ "step": 138
976
+ },
977
+ {
978
+ "epoch": 1.17,
979
+ "grad_norm": 2.652548225590943,
980
+ "learning_rate": 4.083496414974434e-06,
981
+ "loss": 0.7448,
982
+ "step": 139
983
+ },
984
+ {
985
+ "epoch": 1.18,
986
+ "grad_norm": 2.318475213880663,
987
+ "learning_rate": 4.070189369148117e-06,
988
+ "loss": 0.7577,
989
+ "step": 140
990
+ },
991
+ {
992
+ "epoch": 1.19,
993
+ "grad_norm": 2.63738412652813,
994
+ "learning_rate": 4.056808444643163e-06,
995
+ "loss": 0.7475,
996
+ "step": 141
997
+ },
998
+ {
999
+ "epoch": 1.19,
1000
+ "grad_norm": 2.557947896210419,
1001
+ "learning_rate": 4.043354271042884e-06,
1002
+ "loss": 0.7368,
1003
+ "step": 142
1004
+ },
1005
+ {
1006
+ "epoch": 1.2,
1007
+ "grad_norm": 2.555902551567185,
1008
+ "learning_rate": 4.02982748137702e-06,
1009
+ "loss": 0.7434,
1010
+ "step": 143
1011
+ },
1012
+ {
1013
+ "epoch": 1.21,
1014
+ "grad_norm": 2.331216526916747,
1015
+ "learning_rate": 4.0162287120919545e-06,
1016
+ "loss": 0.7592,
1017
+ "step": 144
1018
+ },
1019
+ {
1020
+ "epoch": 1.22,
1021
+ "grad_norm": 2.298359286001758,
1022
+ "learning_rate": 4.002558603020772e-06,
1023
+ "loss": 0.7407,
1024
+ "step": 145
1025
+ },
1026
+ {
1027
+ "epoch": 1.23,
1028
+ "grad_norm": 2.2941501459555806,
1029
+ "learning_rate": 3.988817797353149e-06,
1030
+ "loss": 0.7534,
1031
+ "step": 146
1032
+ },
1033
+ {
1034
+ "epoch": 1.24,
1035
+ "grad_norm": 2.195437955425272,
1036
+ "learning_rate": 3.975006941605099e-06,
1037
+ "loss": 0.7501,
1038
+ "step": 147
1039
+ },
1040
+ {
1041
+ "epoch": 1.25,
1042
+ "grad_norm": 2.2354482392933583,
1043
+ "learning_rate": 3.961126685588541e-06,
1044
+ "loss": 0.7628,
1045
+ "step": 148
1046
+ },
1047
+ {
1048
+ "epoch": 1.25,
1049
+ "grad_norm": 2.271851901257072,
1050
+ "learning_rate": 3.947177682380738e-06,
1051
+ "loss": 0.7559,
1052
+ "step": 149
1053
+ },
1054
+ {
1055
+ "epoch": 1.26,
1056
+ "grad_norm": 2.06190688358819,
1057
+ "learning_rate": 3.933160588293564e-06,
1058
+ "loss": 0.7381,
1059
+ "step": 150
1060
+ },
1061
+ {
1062
+ "epoch": 1.27,
1063
+ "grad_norm": 2.1082422406253145,
1064
+ "learning_rate": 3.9190760628426225e-06,
1065
+ "loss": 0.761,
1066
+ "step": 151
1067
+ },
1068
+ {
1069
+ "epoch": 1.28,
1070
+ "grad_norm": 2.257445076263947,
1071
+ "learning_rate": 3.904924768716216e-06,
1072
+ "loss": 0.7339,
1073
+ "step": 152
1074
+ },
1075
+ {
1076
+ "epoch": 1.29,
1077
+ "grad_norm": 2.0575089260504065,
1078
+ "learning_rate": 3.890707371744169e-06,
1079
+ "loss": 0.7481,
1080
+ "step": 153
1081
+ },
1082
+ {
1083
+ "epoch": 1.3,
1084
+ "grad_norm": 2.2929887801738205,
1085
+ "learning_rate": 3.8764245408664964e-06,
1086
+ "loss": 0.7378,
1087
+ "step": 154
1088
+ },
1089
+ {
1090
+ "epoch": 1.31,
1091
+ "grad_norm": 2.0516588845869244,
1092
+ "learning_rate": 3.862076948101934e-06,
1093
+ "loss": 0.7565,
1094
+ "step": 155
1095
+ },
1096
+ {
1097
+ "epoch": 1.31,
1098
+ "grad_norm": 2.265288203536537,
1099
+ "learning_rate": 3.847665268516314e-06,
1100
+ "loss": 0.7489,
1101
+ "step": 156
1102
+ },
1103
+ {
1104
+ "epoch": 1.32,
1105
+ "grad_norm": 2.3008837342067174,
1106
+ "learning_rate": 3.833190180190808e-06,
1107
+ "loss": 0.7387,
1108
+ "step": 157
1109
+ },
1110
+ {
1111
+ "epoch": 1.33,
1112
+ "grad_norm": 2.303244798174658,
1113
+ "learning_rate": 3.818652364190018e-06,
1114
+ "loss": 0.7456,
1115
+ "step": 158
1116
+ },
1117
+ {
1118
+ "epoch": 1.34,
1119
+ "grad_norm": 2.266318666547602,
1120
+ "learning_rate": 3.8040525045299337e-06,
1121
+ "loss": 0.7574,
1122
+ "step": 159
1123
+ },
1124
+ {
1125
+ "epoch": 1.35,
1126
+ "grad_norm": 2.12834726687523,
1127
+ "learning_rate": 3.7893912881457505e-06,
1128
+ "loss": 0.7444,
1129
+ "step": 160
1130
+ },
1131
+ {
1132
+ "epoch": 1.36,
1133
+ "grad_norm": 2.3349048004783968,
1134
+ "learning_rate": 3.7746694048595458e-06,
1135
+ "loss": 0.7576,
1136
+ "step": 161
1137
+ },
1138
+ {
1139
+ "epoch": 1.37,
1140
+ "grad_norm": 2.3189890726535025,
1141
+ "learning_rate": 3.759887547347825e-06,
1142
+ "loss": 0.7661,
1143
+ "step": 162
1144
+ },
1145
+ {
1146
+ "epoch": 1.37,
1147
+ "grad_norm": 2.4796894123137703,
1148
+ "learning_rate": 3.745046411108928e-06,
1149
+ "loss": 0.75,
1150
+ "step": 163
1151
+ },
1152
+ {
1153
+ "epoch": 1.38,
1154
+ "grad_norm": 2.3870417794754446,
1155
+ "learning_rate": 3.730146694430308e-06,
1156
+ "loss": 0.7553,
1157
+ "step": 164
1158
+ },
1159
+ {
1160
+ "epoch": 1.39,
1161
+ "grad_norm": 2.0568742731615752,
1162
+ "learning_rate": 3.7151890983556747e-06,
1163
+ "loss": 0.7493,
1164
+ "step": 165
1165
+ },
1166
+ {
1167
+ "epoch": 1.4,
1168
+ "grad_norm": 2.4078734682098344,
1169
+ "learning_rate": 3.700174326652011e-06,
1170
+ "loss": 0.7413,
1171
+ "step": 166
1172
+ },
1173
+ {
1174
+ "epoch": 1.41,
1175
+ "grad_norm": 2.1293229041387853,
1176
+ "learning_rate": 3.685103085776457e-06,
1177
+ "loss": 0.7467,
1178
+ "step": 167
1179
+ },
1180
+ {
1181
+ "epoch": 1.42,
1182
+ "grad_norm": 2.117068360526989,
1183
+ "learning_rate": 3.6699760848430753e-06,
1184
+ "loss": 0.7396,
1185
+ "step": 168
1186
+ },
1187
+ {
1188
+ "epoch": 1.43,
1189
+ "grad_norm": 1.9883016666638709,
1190
+ "learning_rate": 3.654794035589484e-06,
1191
+ "loss": 0.7491,
1192
+ "step": 169
1193
+ },
1194
+ {
1195
+ "epoch": 1.43,
1196
+ "grad_norm": 2.2563683164117534,
1197
+ "learning_rate": 3.6395576523433672e-06,
1198
+ "loss": 0.7518,
1199
+ "step": 170
1200
+ },
1201
+ {
1202
+ "epoch": 1.44,
1203
+ "grad_norm": 2.3064779080171744,
1204
+ "learning_rate": 3.6242676519888693e-06,
1205
+ "loss": 0.7565,
1206
+ "step": 171
1207
+ },
1208
+ {
1209
+ "epoch": 1.45,
1210
+ "grad_norm": 1.9222922356745977,
1211
+ "learning_rate": 3.608924753932862e-06,
1212
+ "loss": 0.7353,
1213
+ "step": 172
1214
+ },
1215
+ {
1216
+ "epoch": 1.46,
1217
+ "grad_norm": 2.2598801622305547,
1218
+ "learning_rate": 3.593529680071097e-06,
1219
+ "loss": 0.7466,
1220
+ "step": 173
1221
+ },
1222
+ {
1223
+ "epoch": 1.47,
1224
+ "grad_norm": 2.0105134715596082,
1225
+ "learning_rate": 3.578083154754241e-06,
1226
+ "loss": 0.7427,
1227
+ "step": 174
1228
+ },
1229
+ {
1230
+ "epoch": 1.48,
1231
+ "grad_norm": 2.482181765049527,
1232
+ "learning_rate": 3.5625859047537904e-06,
1233
+ "loss": 0.7531,
1234
+ "step": 175
1235
+ },
1236
+ {
1237
+ "epoch": 1.49,
1238
+ "grad_norm": 1.989275137459208,
1239
+ "learning_rate": 3.547038659227881e-06,
1240
+ "loss": 0.7458,
1241
+ "step": 176
1242
+ },
1243
+ {
1244
+ "epoch": 1.49,
1245
+ "grad_norm": 2.375903859138452,
1246
+ "learning_rate": 3.5314421496869777e-06,
1247
+ "loss": 0.7497,
1248
+ "step": 177
1249
+ },
1250
+ {
1251
+ "epoch": 1.5,
1252
+ "grad_norm": 1.818443971455023,
1253
+ "learning_rate": 3.515797109959458e-06,
1254
+ "loss": 0.7416,
1255
+ "step": 178
1256
+ },
1257
+ {
1258
+ "epoch": 1.51,
1259
+ "grad_norm": 2.1798526485495127,
1260
+ "learning_rate": 3.500104276157083e-06,
1261
+ "loss": 0.7477,
1262
+ "step": 179
1263
+ },
1264
+ {
1265
+ "epoch": 1.52,
1266
+ "grad_norm": 1.8954837732640064,
1267
+ "learning_rate": 3.484364386640365e-06,
1268
+ "loss": 0.7511,
1269
+ "step": 180
1270
+ },
1271
+ {
1272
+ "epoch": 1.53,
1273
+ "grad_norm": 2.1614293408819023,
1274
+ "learning_rate": 3.4685781819838233e-06,
1275
+ "loss": 0.7517,
1276
+ "step": 181
1277
+ },
1278
+ {
1279
+ "epoch": 1.54,
1280
+ "grad_norm": 2.1506646824589497,
1281
+ "learning_rate": 3.452746404941143e-06,
1282
+ "loss": 0.7355,
1283
+ "step": 182
1284
+ },
1285
+ {
1286
+ "epoch": 1.54,
1287
+ "grad_norm": 2.169292733116705,
1288
+ "learning_rate": 3.4368698004102284e-06,
1289
+ "loss": 0.74,
1290
+ "step": 183
1291
+ },
1292
+ {
1293
+ "epoch": 1.55,
1294
+ "grad_norm": 2.418141938844683,
1295
+ "learning_rate": 3.420949115398151e-06,
1296
+ "loss": 0.7503,
1297
+ "step": 184
1298
+ },
1299
+ {
1300
+ "epoch": 1.56,
1301
+ "grad_norm": 2.0599479007715886,
1302
+ "learning_rate": 3.404985098986007e-06,
1303
+ "loss": 0.7569,
1304
+ "step": 185
1305
+ },
1306
+ {
1307
+ "epoch": 1.57,
1308
+ "grad_norm": 1.9525184362879087,
1309
+ "learning_rate": 3.388978502293666e-06,
1310
+ "loss": 0.7354,
1311
+ "step": 186
1312
+ },
1313
+ {
1314
+ "epoch": 1.58,
1315
+ "grad_norm": 2.639288273379809,
1316
+ "learning_rate": 3.372930078444439e-06,
1317
+ "loss": 0.7567,
1318
+ "step": 187
1319
+ },
1320
+ {
1321
+ "epoch": 1.59,
1322
+ "grad_norm": 2.0510480212606232,
1323
+ "learning_rate": 3.3568405825296355e-06,
1324
+ "loss": 0.7433,
1325
+ "step": 188
1326
+ },
1327
+ {
1328
+ "epoch": 1.6,
1329
+ "grad_norm": 2.1578817124425735,
1330
+ "learning_rate": 3.34071077157304e-06,
1331
+ "loss": 0.7417,
1332
+ "step": 189
1333
+ },
1334
+ {
1335
+ "epoch": 1.6,
1336
+ "grad_norm": 2.5268212959295893,
1337
+ "learning_rate": 3.3245414044952927e-06,
1338
+ "loss": 0.7485,
1339
+ "step": 190
1340
+ },
1341
+ {
1342
+ "epoch": 1.61,
1343
+ "grad_norm": 2.159749201380735,
1344
+ "learning_rate": 3.308333242078179e-06,
1345
+ "loss": 0.7507,
1346
+ "step": 191
1347
+ },
1348
+ {
1349
+ "epoch": 1.62,
1350
+ "grad_norm": 2.308409227797682,
1351
+ "learning_rate": 3.292087046928838e-06,
1352
+ "loss": 0.752,
1353
+ "step": 192
1354
+ },
1355
+ {
1356
+ "epoch": 1.63,
1357
+ "grad_norm": 2.6229465595278616,
1358
+ "learning_rate": 3.2758035834438804e-06,
1359
+ "loss": 0.7348,
1360
+ "step": 193
1361
+ },
1362
+ {
1363
+ "epoch": 1.64,
1364
+ "grad_norm": 2.055730524359484,
1365
+ "learning_rate": 3.2594836177734208e-06,
1366
+ "loss": 0.7421,
1367
+ "step": 194
1368
+ },
1369
+ {
1370
+ "epoch": 1.65,
1371
+ "grad_norm": 2.0176570132314837,
1372
+ "learning_rate": 3.2431279177850317e-06,
1373
+ "loss": 0.7536,
1374
+ "step": 195
1375
+ },
1376
+ {
1377
+ "epoch": 1.66,
1378
+ "grad_norm": 2.403883281288115,
1379
+ "learning_rate": 3.226737253027614e-06,
1380
+ "loss": 0.7279,
1381
+ "step": 196
1382
+ },
1383
+ {
1384
+ "epoch": 1.66,
1385
+ "grad_norm": 2.000280638267734,
1386
+ "learning_rate": 3.210312394695189e-06,
1387
+ "loss": 0.7437,
1388
+ "step": 197
1389
+ },
1390
+ {
1391
+ "epoch": 1.67,
1392
+ "grad_norm": 2.220127089726233,
1393
+ "learning_rate": 3.1938541155906146e-06,
1394
+ "loss": 0.7477,
1395
+ "step": 198
1396
+ },
1397
+ {
1398
+ "epoch": 1.68,
1399
+ "grad_norm": 2.1887426336139875,
1400
+ "learning_rate": 3.177363190089221e-06,
1401
+ "loss": 0.7474,
1402
+ "step": 199
1403
+ },
1404
+ {
1405
+ "epoch": 1.69,
1406
+ "grad_norm": 1.981302078482481,
1407
+ "learning_rate": 3.1608403941023793e-06,
1408
+ "loss": 0.7396,
1409
+ "step": 200
1410
+ },
1411
+ {
1412
+ "epoch": 1.7,
1413
+ "grad_norm": 2.264086468679179,
1414
+ "learning_rate": 3.144286505040992e-06,
1415
+ "loss": 0.7391,
1416
+ "step": 201
1417
+ },
1418
+ {
1419
+ "epoch": 1.71,
1420
+ "grad_norm": 2.125610702261824,
1421
+ "learning_rate": 3.1277023017789166e-06,
1422
+ "loss": 0.7449,
1423
+ "step": 202
1424
+ },
1425
+ {
1426
+ "epoch": 1.72,
1427
+ "grad_norm": 2.167346271778115,
1428
+ "learning_rate": 3.111088564616317e-06,
1429
+ "loss": 0.741,
1430
+ "step": 203
1431
+ },
1432
+ {
1433
+ "epoch": 1.72,
1434
+ "grad_norm": 2.375363723765046,
1435
+ "learning_rate": 3.094446075242952e-06,
1436
+ "loss": 0.7469,
1437
+ "step": 204
1438
+ },
1439
+ {
1440
+ "epoch": 1.73,
1441
+ "grad_norm": 1.9942074170938597,
1442
+ "learning_rate": 3.0777756167013946e-06,
1443
+ "loss": 0.7431,
1444
+ "step": 205
1445
+ },
1446
+ {
1447
+ "epoch": 1.74,
1448
+ "grad_norm": 2.146743351563113,
1449
+ "learning_rate": 3.0610779733501904e-06,
1450
+ "loss": 0.752,
1451
+ "step": 206
1452
+ },
1453
+ {
1454
+ "epoch": 1.75,
1455
+ "grad_norm": 2.1525241003649174,
1456
+ "learning_rate": 3.044353930826952e-06,
1457
+ "loss": 0.7345,
1458
+ "step": 207
1459
+ },
1460
+ {
1461
+ "epoch": 1.76,
1462
+ "grad_norm": 1.8638029080202694,
1463
+ "learning_rate": 3.0276042760113937e-06,
1464
+ "loss": 0.7343,
1465
+ "step": 208
1466
+ },
1467
+ {
1468
+ "epoch": 1.77,
1469
+ "grad_norm": 1.8356317644401954,
1470
+ "learning_rate": 3.0108297969883105e-06,
1471
+ "loss": 0.7336,
1472
+ "step": 209
1473
+ },
1474
+ {
1475
+ "epoch": 1.78,
1476
+ "grad_norm": 2.0787011804537583,
1477
+ "learning_rate": 2.9940312830104936e-06,
1478
+ "loss": 0.7505,
1479
+ "step": 210
1480
+ },
1481
+ {
1482
+ "epoch": 1.78,
1483
+ "grad_norm": 1.8672332467108457,
1484
+ "learning_rate": 2.977209524461601e-06,
1485
+ "loss": 0.7479,
1486
+ "step": 211
1487
+ },
1488
+ {
1489
+ "epoch": 1.79,
1490
+ "grad_norm": 2.0236026898201063,
1491
+ "learning_rate": 2.960365312818967e-06,
1492
+ "loss": 0.7563,
1493
+ "step": 212
1494
+ },
1495
+ {
1496
+ "epoch": 1.8,
1497
+ "grad_norm": 1.9813232623431425,
1498
+ "learning_rate": 2.9434994406163574e-06,
1499
+ "loss": 0.7405,
1500
+ "step": 213
1501
+ },
1502
+ {
1503
+ "epoch": 1.81,
1504
+ "grad_norm": 2.009874379662123,
1505
+ "learning_rate": 2.9266127014066905e-06,
1506
+ "loss": 0.7576,
1507
+ "step": 214
1508
+ },
1509
+ {
1510
+ "epoch": 1.82,
1511
+ "grad_norm": 1.9182181733436605,
1512
+ "learning_rate": 2.9097058897246904e-06,
1513
+ "loss": 0.732,
1514
+ "step": 215
1515
+ },
1516
+ {
1517
+ "epoch": 1.83,
1518
+ "grad_norm": 2.2713302836846836,
1519
+ "learning_rate": 2.8927798010495095e-06,
1520
+ "loss": 0.7557,
1521
+ "step": 216
1522
+ },
1523
+ {
1524
+ "epoch": 1.84,
1525
+ "grad_norm": 1.8499354567551494,
1526
+ "learning_rate": 2.875835231767297e-06,
1527
+ "loss": 0.746,
1528
+ "step": 217
1529
+ },
1530
+ {
1531
+ "epoch": 1.84,
1532
+ "grad_norm": 2.056857824508339,
1533
+ "learning_rate": 2.8588729791337298e-06,
1534
+ "loss": 0.7327,
1535
+ "step": 218
1536
+ },
1537
+ {
1538
+ "epoch": 1.85,
1539
+ "grad_norm": 2.0366392068105768,
1540
+ "learning_rate": 2.8418938412365016e-06,
1541
+ "loss": 0.7649,
1542
+ "step": 219
1543
+ },
1544
+ {
1545
+ "epoch": 1.86,
1546
+ "grad_norm": 2.075283194301643,
1547
+ "learning_rate": 2.8248986169577697e-06,
1548
+ "loss": 0.7498,
1549
+ "step": 220
1550
+ },
1551
+ {
1552
+ "epoch": 1.87,
1553
+ "grad_norm": 1.8137465567608382,
1554
+ "learning_rate": 2.807888105936571e-06,
1555
+ "loss": 0.7426,
1556
+ "step": 221
1557
+ },
1558
+ {
1559
+ "epoch": 1.88,
1560
+ "grad_norm": 1.9713775183042563,
1561
+ "learning_rate": 2.7908631085311933e-06,
1562
+ "loss": 0.7573,
1563
+ "step": 222
1564
+ },
1565
+ {
1566
+ "epoch": 1.89,
1567
+ "grad_norm": 1.9347350716569554,
1568
+ "learning_rate": 2.7738244257815234e-06,
1569
+ "loss": 0.739,
1570
+ "step": 223
1571
+ },
1572
+ {
1573
+ "epoch": 1.9,
1574
+ "grad_norm": 2.04529529738096,
1575
+ "learning_rate": 2.756772859371351e-06,
1576
+ "loss": 0.743,
1577
+ "step": 224
1578
+ },
1579
+ {
1580
+ "epoch": 1.9,
1581
+ "grad_norm": 2.1971388123428754,
1582
+ "learning_rate": 2.7397092115906554e-06,
1583
+ "loss": 0.7471,
1584
+ "step": 225
1585
+ },
1586
+ {
1587
+ "epoch": 1.91,
1588
+ "grad_norm": 1.7934500775315345,
1589
+ "learning_rate": 2.7226342852978542e-06,
1590
+ "loss": 0.7443,
1591
+ "step": 226
1592
+ },
1593
+ {
1594
+ "epoch": 1.92,
1595
+ "grad_norm": 2.268705081511345,
1596
+ "learning_rate": 2.7055488838820266e-06,
1597
+ "loss": 0.7414,
1598
+ "step": 227
1599
+ },
1600
+ {
1601
+ "epoch": 1.93,
1602
+ "grad_norm": 1.9139342117735205,
1603
+ "learning_rate": 2.6884538112251147e-06,
1604
+ "loss": 0.7406,
1605
+ "step": 228
1606
+ },
1607
+ {
1608
+ "epoch": 1.94,
1609
+ "grad_norm": 2.3588112901077714,
1610
+ "learning_rate": 2.6713498716641017e-06,
1611
+ "loss": 0.7575,
1612
+ "step": 229
1613
+ },
1614
+ {
1615
+ "epoch": 1.95,
1616
+ "grad_norm": 1.7941800288489695,
1617
+ "learning_rate": 2.6542378699531645e-06,
1618
+ "loss": 0.7459,
1619
+ "step": 230
1620
+ },
1621
+ {
1622
+ "epoch": 1.96,
1623
+ "grad_norm": 2.5891645782274955,
1624
+ "learning_rate": 2.6371186112258118e-06,
1625
+ "loss": 0.7472,
1626
+ "step": 231
1627
+ },
1628
+ {
1629
+ "epoch": 1.96,
1630
+ "grad_norm": 1.8268045992021926,
1631
+ "learning_rate": 2.6199929009570003e-06,
1632
+ "loss": 0.7489,
1633
+ "step": 232
1634
+ },
1635
+ {
1636
+ "epoch": 1.97,
1637
+ "grad_norm": 2.703784770062622,
1638
+ "learning_rate": 2.602861544925236e-06,
1639
+ "loss": 0.7272,
1640
+ "step": 233
1641
+ },
1642
+ {
1643
+ "epoch": 1.98,
1644
+ "grad_norm": 2.00621810320303,
1645
+ "learning_rate": 2.5857253491746646e-06,
1646
+ "loss": 0.7434,
1647
+ "step": 234
1648
+ },
1649
+ {
1650
+ "epoch": 1.99,
1651
+ "grad_norm": 3.0770514249566507,
1652
+ "learning_rate": 2.568585119977142e-06,
1653
+ "loss": 0.7547,
1654
+ "step": 235
1655
+ },
1656
+ {
1657
+ "epoch": 2.0,
1658
+ "grad_norm": 2.199087929149206,
1659
+ "learning_rate": 2.551441663794304e-06,
1660
+ "loss": 0.7362,
1661
+ "step": 236
1662
+ },
1663
+ {
1664
+ "epoch": 2.01,
1665
+ "grad_norm": 2.609728798751417,
1666
+ "learning_rate": 2.5342957872396156e-06,
1667
+ "loss": 0.7387,
1668
+ "step": 237
1669
+ },
1670
+ {
1671
+ "epoch": 2.01,
1672
+ "grad_norm": 2.4167509521439583,
1673
+ "learning_rate": 2.5171482970404244e-06,
1674
+ "loss": 0.7291,
1675
+ "step": 238
1676
+ },
1677
+ {
1678
+ "epoch": 2.0,
1679
+ "grad_norm": 2.272061225763508,
1680
+ "learning_rate": 2.5e-06,
1681
+ "loss": 0.7054,
1682
+ "step": 239
1683
+ },
1684
+ {
1685
+ "epoch": 2.01,
1686
+ "grad_norm": 2.824272633709913,
1687
+ "learning_rate": 2.482851702959577e-06,
1688
+ "loss": 0.6944,
1689
+ "step": 240
1690
+ },
1691
+ {
1692
+ "epoch": 2.02,
1693
+ "grad_norm": 3.5666822575384316,
1694
+ "learning_rate": 2.4657042127603853e-06,
1695
+ "loss": 0.6993,
1696
+ "step": 241
1697
+ },
1698
+ {
1699
+ "epoch": 2.03,
1700
+ "grad_norm": 2.3931138116624724,
1701
+ "learning_rate": 2.4485583362056975e-06,
1702
+ "loss": 0.7047,
1703
+ "step": 242
1704
+ },
1705
+ {
1706
+ "epoch": 2.04,
1707
+ "grad_norm": 2.9320600754571795,
1708
+ "learning_rate": 2.4314148800228584e-06,
1709
+ "loss": 0.6925,
1710
+ "step": 243
1711
+ },
1712
+ {
1713
+ "epoch": 2.05,
1714
+ "grad_norm": 2.8973919990299133,
1715
+ "learning_rate": 2.4142746508253367e-06,
1716
+ "loss": 0.6965,
1717
+ "step": 244
1718
+ },
1719
+ {
1720
+ "epoch": 2.06,
1721
+ "grad_norm": 2.625815226251557,
1722
+ "learning_rate": 2.3971384550747644e-06,
1723
+ "loss": 0.683,
1724
+ "step": 245
1725
+ },
1726
+ {
1727
+ "epoch": 2.06,
1728
+ "grad_norm": 2.4155664260482714,
1729
+ "learning_rate": 2.3800070990430006e-06,
1730
+ "loss": 0.6994,
1731
+ "step": 246
1732
+ },
1733
+ {
1734
+ "epoch": 2.07,
1735
+ "grad_norm": 2.411022689524766,
1736
+ "learning_rate": 2.3628813887741882e-06,
1737
+ "loss": 0.6894,
1738
+ "step": 247
1739
+ },
1740
+ {
1741
+ "epoch": 2.08,
1742
+ "grad_norm": 2.28077143295534,
1743
+ "learning_rate": 2.345762130046836e-06,
1744
+ "loss": 0.7023,
1745
+ "step": 248
1746
+ },
1747
+ {
1748
+ "epoch": 2.09,
1749
+ "grad_norm": 2.401968274277045,
1750
+ "learning_rate": 2.3286501283358987e-06,
1751
+ "loss": 0.6759,
1752
+ "step": 249
1753
+ },
1754
+ {
1755
+ "epoch": 2.1,
1756
+ "grad_norm": 2.378798265143013,
1757
+ "learning_rate": 2.311546188774886e-06,
1758
+ "loss": 0.6958,
1759
+ "step": 250
1760
+ },
1761
+ {
1762
+ "epoch": 2.11,
1763
+ "grad_norm": 2.3673961258869904,
1764
+ "learning_rate": 2.2944511161179743e-06,
1765
+ "loss": 0.6838,
1766
+ "step": 251
1767
+ },
1768
+ {
1769
+ "epoch": 2.12,
1770
+ "grad_norm": 2.516008730553284,
1771
+ "learning_rate": 2.2773657147021466e-06,
1772
+ "loss": 0.6909,
1773
+ "step": 252
1774
+ },
1775
+ {
1776
+ "epoch": 2.12,
1777
+ "grad_norm": 2.2281402280211466,
1778
+ "learning_rate": 2.2602907884093454e-06,
1779
+ "loss": 0.6811,
1780
+ "step": 253
1781
+ },
1782
+ {
1783
+ "epoch": 2.13,
1784
+ "grad_norm": 2.1439630603489315,
1785
+ "learning_rate": 2.24322714062865e-06,
1786
+ "loss": 0.6952,
1787
+ "step": 254
1788
+ },
1789
+ {
1790
+ "epoch": 2.14,
1791
+ "grad_norm": 2.1634818483654077,
1792
+ "learning_rate": 2.2261755742184783e-06,
1793
+ "loss": 0.698,
1794
+ "step": 255
1795
+ },
1796
+ {
1797
+ "epoch": 2.15,
1798
+ "grad_norm": 2.196698584094087,
1799
+ "learning_rate": 2.2091368914688067e-06,
1800
+ "loss": 0.6791,
1801
+ "step": 256
1802
+ },
1803
+ {
1804
+ "epoch": 2.16,
1805
+ "grad_norm": 2.146209315784236,
1806
+ "learning_rate": 2.19211189406343e-06,
1807
+ "loss": 0.7023,
1808
+ "step": 257
1809
+ },
1810
+ {
1811
+ "epoch": 2.17,
1812
+ "grad_norm": 2.1405757699513766,
1813
+ "learning_rate": 2.1751013830422303e-06,
1814
+ "loss": 0.6977,
1815
+ "step": 258
1816
+ },
1817
+ {
1818
+ "epoch": 2.18,
1819
+ "grad_norm": 1.9616924770190525,
1820
+ "learning_rate": 2.1581061587634992e-06,
1821
+ "loss": 0.6812,
1822
+ "step": 259
1823
+ },
1824
+ {
1825
+ "epoch": 2.18,
1826
+ "grad_norm": 2.23295032851048,
1827
+ "learning_rate": 2.14112702086627e-06,
1828
+ "loss": 0.685,
1829
+ "step": 260
1830
+ },
1831
+ {
1832
+ "epoch": 2.19,
1833
+ "grad_norm": 2.279103072547858,
1834
+ "learning_rate": 2.1241647682327037e-06,
1835
+ "loss": 0.6954,
1836
+ "step": 261
1837
+ },
1838
+ {
1839
+ "epoch": 2.2,
1840
+ "grad_norm": 2.1829812242717317,
1841
+ "learning_rate": 2.1072201989504914e-06,
1842
+ "loss": 0.6934,
1843
+ "step": 262
1844
+ },
1845
+ {
1846
+ "epoch": 2.21,
1847
+ "grad_norm": 2.2223700306607537,
1848
+ "learning_rate": 2.09029411027531e-06,
1849
+ "loss": 0.6886,
1850
+ "step": 263
1851
+ },
1852
+ {
1853
+ "epoch": 2.22,
1854
+ "grad_norm": 2.0785442261456337,
1855
+ "learning_rate": 2.073387298593311e-06,
1856
+ "loss": 0.6994,
1857
+ "step": 264
1858
+ },
1859
+ {
1860
+ "epoch": 2.23,
1861
+ "grad_norm": 2.266610094629974,
1862
+ "learning_rate": 2.0565005593836434e-06,
1863
+ "loss": 0.6969,
1864
+ "step": 265
1865
+ },
1866
+ {
1867
+ "epoch": 2.24,
1868
+ "grad_norm": 2.132447543935174,
1869
+ "learning_rate": 2.0396346871810347e-06,
1870
+ "loss": 0.6773,
1871
+ "step": 266
1872
+ },
1873
+ {
1874
+ "epoch": 2.24,
1875
+ "grad_norm": 2.0884173463715228,
1876
+ "learning_rate": 2.0227904755383985e-06,
1877
+ "loss": 0.6945,
1878
+ "step": 267
1879
+ },
1880
+ {
1881
+ "epoch": 2.25,
1882
+ "grad_norm": 2.2085913230241396,
1883
+ "learning_rate": 2.005968716989507e-06,
1884
+ "loss": 0.6949,
1885
+ "step": 268
1886
+ },
1887
+ {
1888
+ "epoch": 2.26,
1889
+ "grad_norm": 2.0268180328155507,
1890
+ "learning_rate": 1.98917020301169e-06,
1891
+ "loss": 0.7027,
1892
+ "step": 269
1893
+ },
1894
+ {
1895
+ "epoch": 2.27,
1896
+ "grad_norm": 2.4747960228822037,
1897
+ "learning_rate": 1.9723957239886067e-06,
1898
+ "loss": 0.6794,
1899
+ "step": 270
1900
+ },
1901
+ {
1902
+ "epoch": 2.28,
1903
+ "grad_norm": 2.095701576576396,
1904
+ "learning_rate": 1.955646069173048e-06,
1905
+ "loss": 0.6961,
1906
+ "step": 271
1907
+ },
1908
+ {
1909
+ "epoch": 2.29,
1910
+ "grad_norm": 2.0823665858025313,
1911
+ "learning_rate": 1.93892202664981e-06,
1912
+ "loss": 0.6786,
1913
+ "step": 272
1914
+ },
1915
+ {
1916
+ "epoch": 2.29,
1917
+ "grad_norm": 2.245218571211113,
1918
+ "learning_rate": 1.922224383298606e-06,
1919
+ "loss": 0.6974,
1920
+ "step": 273
1921
+ },
1922
+ {
1923
+ "epoch": 2.3,
1924
+ "grad_norm": 2.158681117909884,
1925
+ "learning_rate": 1.905553924757049e-06,
1926
+ "loss": 0.7002,
1927
+ "step": 274
1928
+ },
1929
+ {
1930
+ "epoch": 2.31,
1931
+ "grad_norm": 2.146126951984283,
1932
+ "learning_rate": 1.888911435383684e-06,
1933
+ "loss": 0.6843,
1934
+ "step": 275
1935
+ },
1936
+ {
1937
+ "epoch": 2.32,
1938
+ "grad_norm": 2.1238895111610048,
1939
+ "learning_rate": 1.8722976982210845e-06,
1940
+ "loss": 0.684,
1941
+ "step": 276
1942
+ },
1943
+ {
1944
+ "epoch": 2.33,
1945
+ "grad_norm": 2.1081813807297984,
1946
+ "learning_rate": 1.8557134949590087e-06,
1947
+ "loss": 0.6868,
1948
+ "step": 277
1949
+ },
1950
+ {
1951
+ "epoch": 2.34,
1952
+ "grad_norm": 2.0759325520644096,
1953
+ "learning_rate": 1.8391596058976214e-06,
1954
+ "loss": 0.69,
1955
+ "step": 278
1956
+ },
1957
+ {
1958
+ "epoch": 2.35,
1959
+ "grad_norm": 2.087216621474724,
1960
+ "learning_rate": 1.8226368099107793e-06,
1961
+ "loss": 0.6923,
1962
+ "step": 279
1963
+ },
1964
+ {
1965
+ "epoch": 2.35,
1966
+ "grad_norm": 2.4018313648831113,
1967
+ "learning_rate": 1.806145884409386e-06,
1968
+ "loss": 0.6931,
1969
+ "step": 280
1970
+ },
1971
+ {
1972
+ "epoch": 2.36,
1973
+ "grad_norm": 2.0013309281300216,
1974
+ "learning_rate": 1.7896876053048112e-06,
1975
+ "loss": 0.6893,
1976
+ "step": 281
1977
+ },
1978
+ {
1979
+ "epoch": 2.37,
1980
+ "grad_norm": 2.059546352986111,
1981
+ "learning_rate": 1.7732627469723868e-06,
1982
+ "loss": 0.6867,
1983
+ "step": 282
1984
+ },
1985
+ {
1986
+ "epoch": 2.38,
1987
+ "grad_norm": 2.000325940508461,
1988
+ "learning_rate": 1.756872082214969e-06,
1989
+ "loss": 0.6914,
1990
+ "step": 283
1991
+ },
1992
+ {
1993
+ "epoch": 2.39,
1994
+ "grad_norm": 2.379361502001129,
1995
+ "learning_rate": 1.7405163822265803e-06,
1996
+ "loss": 0.6906,
1997
+ "step": 284
1998
+ },
1999
+ {
2000
+ "epoch": 2.4,
2001
+ "grad_norm": 1.8960887672873148,
2002
+ "learning_rate": 1.7241964165561204e-06,
2003
+ "loss": 0.6673,
2004
+ "step": 285
2005
+ },
2006
+ {
2007
+ "epoch": 2.41,
2008
+ "grad_norm": 2.172181466809878,
2009
+ "learning_rate": 1.707912953071163e-06,
2010
+ "loss": 0.6781,
2011
+ "step": 286
2012
+ },
2013
+ {
2014
+ "epoch": 2.41,
2015
+ "grad_norm": 2.015019267745016,
2016
+ "learning_rate": 1.6916667579218216e-06,
2017
+ "loss": 0.6963,
2018
+ "step": 287
2019
+ },
2020
+ {
2021
+ "epoch": 2.42,
2022
+ "grad_norm": 2.0355415165674846,
2023
+ "learning_rate": 1.6754585955047081e-06,
2024
+ "loss": 0.6779,
2025
+ "step": 288
2026
+ },
2027
+ {
2028
+ "epoch": 2.43,
2029
+ "grad_norm": 2.1086130667226977,
2030
+ "learning_rate": 1.6592892284269597e-06,
2031
+ "loss": 0.6998,
2032
+ "step": 289
2033
+ },
2034
+ {
2035
+ "epoch": 2.44,
2036
+ "grad_norm": 2.059886916939569,
2037
+ "learning_rate": 1.6431594174703647e-06,
2038
+ "loss": 0.6802,
2039
+ "step": 290
2040
+ },
2041
+ {
2042
+ "epoch": 2.45,
2043
+ "grad_norm": 2.0641741782118332,
2044
+ "learning_rate": 1.6270699215555608e-06,
2045
+ "loss": 0.6854,
2046
+ "step": 291
2047
+ },
2048
+ {
2049
+ "epoch": 2.46,
2050
+ "grad_norm": 2.1806834552623444,
2051
+ "learning_rate": 1.6110214977063345e-06,
2052
+ "loss": 0.6987,
2053
+ "step": 292
2054
+ },
2055
+ {
2056
+ "epoch": 2.47,
2057
+ "grad_norm": 2.158353312239409,
2058
+ "learning_rate": 1.5950149010139938e-06,
2059
+ "loss": 0.6823,
2060
+ "step": 293
2061
+ },
2062
+ {
2063
+ "epoch": 2.47,
2064
+ "grad_norm": 1.921092711707764,
2065
+ "learning_rate": 1.5790508846018493e-06,
2066
+ "loss": 0.6941,
2067
+ "step": 294
2068
+ },
2069
+ {
2070
+ "epoch": 2.48,
2071
+ "grad_norm": 2.3977437347525594,
2072
+ "learning_rate": 1.563130199589773e-06,
2073
+ "loss": 0.6915,
2074
+ "step": 295
2075
+ },
2076
+ {
2077
+ "epoch": 2.49,
2078
+ "grad_norm": 2.0303949213498167,
2079
+ "learning_rate": 1.5472535950588575e-06,
2080
+ "loss": 0.6971,
2081
+ "step": 296
2082
+ },
2083
+ {
2084
+ "epoch": 2.5,
2085
+ "grad_norm": 2.176759402835286,
2086
+ "learning_rate": 1.5314218180161783e-06,
2087
+ "loss": 0.6809,
2088
+ "step": 297
2089
+ },
2090
+ {
2091
+ "epoch": 2.51,
2092
+ "grad_norm": 1.9729876407553733,
2093
+ "learning_rate": 1.5156356133596356e-06,
2094
+ "loss": 0.6933,
2095
+ "step": 298
2096
+ },
2097
+ {
2098
+ "epoch": 2.52,
2099
+ "grad_norm": 2.168577615246608,
2100
+ "learning_rate": 1.4998957238429173e-06,
2101
+ "loss": 0.6873,
2102
+ "step": 299
2103
+ },
2104
+ {
2105
+ "epoch": 2.53,
2106
+ "grad_norm": 2.0839917041722704,
2107
+ "learning_rate": 1.4842028900405422e-06,
2108
+ "loss": 0.6984,
2109
+ "step": 300
2110
+ },
2111
+ {
2112
+ "epoch": 2.53,
2113
+ "grad_norm": 1.893739609530612,
2114
+ "learning_rate": 1.4685578503130227e-06,
2115
+ "loss": 0.6922,
2116
+ "step": 301
2117
+ },
2118
+ {
2119
+ "epoch": 2.54,
2120
+ "grad_norm": 2.0857317964888193,
2121
+ "learning_rate": 1.4529613407721193e-06,
2122
+ "loss": 0.6908,
2123
+ "step": 302
2124
+ },
2125
+ {
2126
+ "epoch": 2.55,
2127
+ "grad_norm": 2.0026826285563564,
2128
+ "learning_rate": 1.4374140952462109e-06,
2129
+ "loss": 0.6752,
2130
+ "step": 303
2131
+ },
2132
+ {
2133
+ "epoch": 2.56,
2134
+ "grad_norm": 1.914005315845231,
2135
+ "learning_rate": 1.4219168452457593e-06,
2136
+ "loss": 0.6988,
2137
+ "step": 304
2138
+ },
2139
+ {
2140
+ "epoch": 2.57,
2141
+ "grad_norm": 2.0223609197099552,
2142
+ "learning_rate": 1.4064703199289038e-06,
2143
+ "loss": 0.6842,
2144
+ "step": 305
2145
+ },
2146
+ {
2147
+ "epoch": 2.58,
2148
+ "grad_norm": 2.1096738426378407,
2149
+ "learning_rate": 1.391075246067139e-06,
2150
+ "loss": 0.6823,
2151
+ "step": 306
2152
+ },
2153
+ {
2154
+ "epoch": 2.59,
2155
+ "grad_norm": 2.0768301164703438,
2156
+ "learning_rate": 1.375732348011132e-06,
2157
+ "loss": 0.6898,
2158
+ "step": 307
2159
+ },
2160
+ {
2161
+ "epoch": 2.59,
2162
+ "grad_norm": 2.123519591919823,
2163
+ "learning_rate": 1.3604423476566342e-06,
2164
+ "loss": 0.6732,
2165
+ "step": 308
2166
+ },
2167
+ {
2168
+ "epoch": 2.6,
2169
+ "grad_norm": 2.1761224742449934,
2170
+ "learning_rate": 1.3452059644105174e-06,
2171
+ "loss": 0.6915,
2172
+ "step": 309
2173
+ },
2174
+ {
2175
+ "epoch": 2.61,
2176
+ "grad_norm": 2.1481136545780246,
2177
+ "learning_rate": 1.3300239151569251e-06,
2178
+ "loss": 0.6942,
2179
+ "step": 310
2180
+ },
2181
+ {
2182
+ "epoch": 2.62,
2183
+ "grad_norm": 2.194600040469437,
2184
+ "learning_rate": 1.3148969142235436e-06,
2185
+ "loss": 0.6788,
2186
+ "step": 311
2187
+ },
2188
+ {
2189
+ "epoch": 2.63,
2190
+ "grad_norm": 2.1089291541456223,
2191
+ "learning_rate": 1.2998256733479896e-06,
2192
+ "loss": 0.7013,
2193
+ "step": 312
2194
+ },
2195
+ {
2196
+ "epoch": 2.64,
2197
+ "grad_norm": 2.19121425222603,
2198
+ "learning_rate": 1.2848109016443255e-06,
2199
+ "loss": 0.6897,
2200
+ "step": 313
2201
+ },
2202
+ {
2203
+ "epoch": 2.65,
2204
+ "grad_norm": 1.9319788219662473,
2205
+ "learning_rate": 1.2698533055696926e-06,
2206
+ "loss": 0.6976,
2207
+ "step": 314
2208
+ },
2209
+ {
2210
+ "epoch": 2.65,
2211
+ "grad_norm": 2.275808654157748,
2212
+ "learning_rate": 1.254953588891073e-06,
2213
+ "loss": 0.6839,
2214
+ "step": 315
2215
+ },
2216
+ {
2217
+ "epoch": 2.66,
2218
+ "grad_norm": 2.198386215375118,
2219
+ "learning_rate": 1.2401124526521763e-06,
2220
+ "loss": 0.6946,
2221
+ "step": 316
2222
+ },
2223
+ {
2224
+ "epoch": 2.67,
2225
+ "grad_norm": 1.8790492694109773,
2226
+ "learning_rate": 1.225330595140455e-06,
2227
+ "loss": 0.676,
2228
+ "step": 317
2229
+ },
2230
+ {
2231
+ "epoch": 2.68,
2232
+ "grad_norm": 1.9926892667726497,
2233
+ "learning_rate": 1.2106087118542504e-06,
2234
+ "loss": 0.6824,
2235
+ "step": 318
2236
+ },
2237
+ {
2238
+ "epoch": 2.69,
2239
+ "grad_norm": 2.0144848585141206,
2240
+ "learning_rate": 1.1959474954700667e-06,
2241
+ "loss": 0.6906,
2242
+ "step": 319
2243
+ },
2244
+ {
2245
+ "epoch": 2.7,
2246
+ "grad_norm": 2.0949703627379446,
2247
+ "learning_rate": 1.1813476358099824e-06,
2248
+ "loss": 0.6952,
2249
+ "step": 320
2250
+ },
2251
+ {
2252
+ "epoch": 2.71,
2253
+ "grad_norm": 1.878706080567921,
2254
+ "learning_rate": 1.166809819809192e-06,
2255
+ "loss": 0.6846,
2256
+ "step": 321
2257
+ },
2258
+ {
2259
+ "epoch": 2.71,
2260
+ "grad_norm": 2.106975261880749,
2261
+ "learning_rate": 1.1523347314836857e-06,
2262
+ "loss": 0.6916,
2263
+ "step": 322
2264
+ },
2265
+ {
2266
+ "epoch": 2.72,
2267
+ "grad_norm": 2.0193667358417486,
2268
+ "learning_rate": 1.1379230518980663e-06,
2269
+ "loss": 0.695,
2270
+ "step": 323
2271
+ },
2272
+ {
2273
+ "epoch": 2.73,
2274
+ "grad_norm": 1.8829454822076184,
2275
+ "learning_rate": 1.123575459133504e-06,
2276
+ "loss": 0.6856,
2277
+ "step": 324
2278
+ },
2279
+ {
2280
+ "epoch": 2.74,
2281
+ "grad_norm": 1.9885667241669744,
2282
+ "learning_rate": 1.109292628255832e-06,
2283
+ "loss": 0.6849,
2284
+ "step": 325
2285
+ },
2286
+ {
2287
+ "epoch": 2.75,
2288
+ "grad_norm": 1.9038960696049037,
2289
+ "learning_rate": 1.0950752312837846e-06,
2290
+ "loss": 0.6901,
2291
+ "step": 326
2292
+ },
2293
+ {
2294
+ "epoch": 2.76,
2295
+ "grad_norm": 2.0104842271889467,
2296
+ "learning_rate": 1.0809239371573779e-06,
2297
+ "loss": 0.7014,
2298
+ "step": 327
2299
+ },
2300
+ {
2301
+ "epoch": 2.76,
2302
+ "grad_norm": 1.9066116631636623,
2303
+ "learning_rate": 1.0668394117064365e-06,
2304
+ "loss": 0.6798,
2305
+ "step": 328
2306
+ },
2307
+ {
2308
+ "epoch": 2.77,
2309
+ "grad_norm": 1.9713999361623535,
2310
+ "learning_rate": 1.0528223176192618e-06,
2311
+ "loss": 0.6979,
2312
+ "step": 329
2313
+ },
2314
+ {
2315
+ "epoch": 2.78,
2316
+ "grad_norm": 1.8465153504391632,
2317
+ "learning_rate": 1.0388733144114605e-06,
2318
+ "loss": 0.6892,
2319
+ "step": 330
2320
+ },
2321
+ {
2322
+ "epoch": 2.79,
2323
+ "grad_norm": 1.9408714006937027,
2324
+ "learning_rate": 1.024993058394902e-06,
2325
+ "loss": 0.6985,
2326
+ "step": 331
2327
+ },
2328
+ {
2329
+ "epoch": 2.8,
2330
+ "grad_norm": 2.030993015395332,
2331
+ "learning_rate": 1.0111822026468515e-06,
2332
+ "loss": 0.6925,
2333
+ "step": 332
2334
+ },
2335
+ {
2336
+ "epoch": 2.81,
2337
+ "grad_norm": 1.811976430858568,
2338
+ "learning_rate": 9.974413969792285e-07,
2339
+ "loss": 0.6805,
2340
+ "step": 333
2341
+ },
2342
+ {
2343
+ "epoch": 2.82,
2344
+ "grad_norm": 2.025426310321446,
2345
+ "learning_rate": 9.837712879080464e-07,
2346
+ "loss": 0.6884,
2347
+ "step": 334
2348
+ },
2349
+ {
2350
+ "epoch": 2.82,
2351
+ "grad_norm": 1.8699504401283087,
2352
+ "learning_rate": 9.701725186229801e-07,
2353
+ "loss": 0.6766,
2354
+ "step": 335
2355
+ },
2356
+ {
2357
+ "epoch": 2.83,
2358
+ "grad_norm": 1.9813729971640541,
2359
+ "learning_rate": 9.56645728957117e-07,
2360
+ "loss": 0.6816,
2361
+ "step": 336
2362
+ },
2363
+ {
2364
+ "epoch": 2.84,
2365
+ "grad_norm": 1.857568380571694,
2366
+ "learning_rate": 9.431915553568374e-07,
2367
+ "loss": 0.6941,
2368
+ "step": 337
2369
+ },
2370
+ {
2371
+ "epoch": 2.85,
2372
+ "grad_norm": 1.8075501016131494,
2373
+ "learning_rate": 9.298106308518847e-07,
2374
+ "loss": 0.6915,
2375
+ "step": 338
2376
+ },
2377
+ {
2378
+ "epoch": 2.86,
2379
+ "grad_norm": 1.896748082277053,
2380
+ "learning_rate": 9.165035850255672e-07,
2381
+ "loss": 0.6965,
2382
+ "step": 339
2383
+ },
2384
+ {
2385
+ "epoch": 2.87,
2386
+ "grad_norm": 1.903236161607879,
2387
+ "learning_rate": 9.032710439851444e-07,
2388
+ "loss": 0.6942,
2389
+ "step": 340
2390
+ },
2391
+ {
2392
+ "epoch": 2.88,
2393
+ "grad_norm": 2.0473184895907344,
2394
+ "learning_rate": 8.901136303323654e-07,
2395
+ "loss": 0.6868,
2396
+ "step": 341
2397
+ },
2398
+ {
2399
+ "epoch": 2.88,
2400
+ "grad_norm": 1.9225358657320613,
2401
+ "learning_rate": 8.770319631341745e-07,
2402
+ "loss": 0.6833,
2403
+ "step": 342
2404
+ },
2405
+ {
2406
+ "epoch": 2.89,
2407
+ "grad_norm": 1.8842364675717973,
2408
+ "learning_rate": 8.640266578935841e-07,
2409
+ "loss": 0.7059,
2410
+ "step": 343
2411
+ },
2412
+ {
2413
+ "epoch": 2.9,
2414
+ "grad_norm": 1.9601955634309354,
2415
+ "learning_rate": 8.510983265207152e-07,
2416
+ "loss": 0.6996,
2417
+ "step": 344
2418
+ },
2419
+ {
2420
+ "epoch": 2.91,
2421
+ "grad_norm": 1.8136997160021915,
2422
+ "learning_rate": 8.382475773040055e-07,
2423
+ "loss": 0.6836,
2424
+ "step": 345
2425
+ },
2426
+ {
2427
+ "epoch": 2.92,
2428
+ "grad_norm": 1.9111490776903417,
2429
+ "learning_rate": 8.254750148815893e-07,
2430
+ "loss": 0.6996,
2431
+ "step": 346
2432
+ },
2433
+ {
2434
+ "epoch": 2.93,
2435
+ "grad_norm": 1.8878734449529964,
2436
+ "learning_rate": 8.127812402128521e-07,
2437
+ "loss": 0.6932,
2438
+ "step": 347
2439
+ },
2440
+ {
2441
+ "epoch": 2.94,
2442
+ "grad_norm": 1.7623368894317115,
2443
+ "learning_rate": 8.001668505501464e-07,
2444
+ "loss": 0.696,
2445
+ "step": 348
2446
+ },
2447
+ {
2448
+ "epoch": 2.94,
2449
+ "grad_norm": 1.9509975903694705,
2450
+ "learning_rate": 7.876324394107018e-07,
2451
+ "loss": 0.6886,
2452
+ "step": 349
2453
+ },
2454
+ {
2455
+ "epoch": 2.95,
2456
+ "grad_norm": 1.8836926534531768,
2457
+ "learning_rate": 7.751785965486894e-07,
2458
+ "loss": 0.6898,
2459
+ "step": 350
2460
+ },
2461
+ {
2462
+ "epoch": 2.96,
2463
+ "grad_norm": 1.9384474477733897,
2464
+ "learning_rate": 7.628059079274793e-07,
2465
+ "loss": 0.6829,
2466
+ "step": 351
2467
+ },
2468
+ {
2469
+ "epoch": 2.97,
2470
+ "grad_norm": 1.8215374593231801,
2471
+ "learning_rate": 7.505149556920698e-07,
2472
+ "loss": 0.6908,
2473
+ "step": 352
2474
+ },
2475
+ {
2476
+ "epoch": 2.98,
2477
+ "grad_norm": 1.8093873518769943,
2478
+ "learning_rate": 7.383063181416955e-07,
2479
+ "loss": 0.6983,
2480
+ "step": 353
2481
+ },
2482
+ {
2483
+ "epoch": 2.99,
2484
+ "grad_norm": 1.9915437999230632,
2485
+ "learning_rate": 7.261805697026178e-07,
2486
+ "loss": 0.7005,
2487
+ "step": 354
2488
+ },
2489
+ {
2490
+ "epoch": 3.0,
2491
+ "grad_norm": 1.89611825729105,
2492
+ "learning_rate": 7.141382809010999e-07,
2493
+ "loss": 0.6931,
2494
+ "step": 355
2495
+ },
2496
+ {
2497
+ "epoch": 3.0,
2498
+ "grad_norm": 1.8365953198306064,
2499
+ "learning_rate": 7.021800183365607e-07,
2500
+ "loss": 0.6817,
2501
+ "step": 356
2502
+ },
2503
+ {
2504
+ "epoch": 3.01,
2505
+ "grad_norm": 1.8887825422099398,
2506
+ "learning_rate": 6.903063446549166e-07,
2507
+ "loss": 0.6796,
2508
+ "step": 357
2509
+ },
2510
+ {
2511
+ "epoch": 3.0,
2512
+ "grad_norm": 2.0505162217401396,
2513
+ "learning_rate": 6.785178185221095e-07,
2514
+ "loss": 0.6823,
2515
+ "step": 358
2516
+ },
2517
+ {
2518
+ "epoch": 3.01,
2519
+ "grad_norm": 2.8780046222752,
2520
+ "learning_rate": 6.668149945978203e-07,
2521
+ "loss": 0.6598,
2522
+ "step": 359
2523
+ },
2524
+ {
2525
+ "epoch": 3.02,
2526
+ "grad_norm": 2.320474085762604,
2527
+ "learning_rate": 6.551984235093692e-07,
2528
+ "loss": 0.6646,
2529
+ "step": 360
2530
+ },
2531
+ {
2532
+ "epoch": 3.03,
2533
+ "grad_norm": 3.148494101628221,
2534
+ "learning_rate": 6.436686518258156e-07,
2535
+ "loss": 0.6521,
2536
+ "step": 361
2537
+ },
2538
+ {
2539
+ "epoch": 3.04,
2540
+ "grad_norm": 2.9894322407930707,
2541
+ "learning_rate": 6.322262220322314e-07,
2542
+ "loss": 0.6497,
2543
+ "step": 362
2544
+ },
2545
+ {
2546
+ "epoch": 3.04,
2547
+ "grad_norm": 2.0905514911758116,
2548
+ "learning_rate": 6.208716725041869e-07,
2549
+ "loss": 0.6729,
2550
+ "step": 363
2551
+ },
2552
+ {
2553
+ "epoch": 3.05,
2554
+ "grad_norm": 2.5235937968654882,
2555
+ "learning_rate": 6.096055374824117e-07,
2556
+ "loss": 0.6536,
2557
+ "step": 364
2558
+ },
2559
+ {
2560
+ "epoch": 3.06,
2561
+ "grad_norm": 2.7164252624114953,
2562
+ "learning_rate": 5.984283470476621e-07,
2563
+ "loss": 0.6557,
2564
+ "step": 365
2565
+ },
2566
+ {
2567
+ "epoch": 3.07,
2568
+ "grad_norm": 2.1414297977553134,
2569
+ "learning_rate": 5.873406270957804e-07,
2570
+ "loss": 0.6517,
2571
+ "step": 366
2572
+ },
2573
+ {
2574
+ "epoch": 3.08,
2575
+ "grad_norm": 2.087954653292254,
2576
+ "learning_rate": 5.763428993129499e-07,
2577
+ "loss": 0.6535,
2578
+ "step": 367
2579
+ },
2580
+ {
2581
+ "epoch": 3.09,
2582
+ "grad_norm": 2.5430489969699166,
2583
+ "learning_rate": 5.654356811511494e-07,
2584
+ "loss": 0.6594,
2585
+ "step": 368
2586
+ },
2587
+ {
2588
+ "epoch": 3.1,
2589
+ "grad_norm": 2.4893194798160425,
2590
+ "learning_rate": 5.546194858038073e-07,
2591
+ "loss": 0.6702,
2592
+ "step": 369
2593
+ },
2594
+ {
2595
+ "epoch": 3.1,
2596
+ "grad_norm": 1.9260382585512938,
2597
+ "learning_rate": 5.438948221816559e-07,
2598
+ "loss": 0.6629,
2599
+ "step": 370
2600
+ },
2601
+ {
2602
+ "epoch": 3.11,
2603
+ "grad_norm": 2.057039216215999,
2604
+ "learning_rate": 5.332621948887823e-07,
2605
+ "loss": 0.6583,
2606
+ "step": 371
2607
+ },
2608
+ {
2609
+ "epoch": 3.12,
2610
+ "grad_norm": 2.182074257751017,
2611
+ "learning_rate": 5.227221041988955e-07,
2612
+ "loss": 0.6602,
2613
+ "step": 372
2614
+ },
2615
+ {
2616
+ "epoch": 3.13,
2617
+ "grad_norm": 1.9356067875549532,
2618
+ "learning_rate": 5.122750460317768e-07,
2619
+ "loss": 0.6621,
2620
+ "step": 373
2621
+ },
2622
+ {
2623
+ "epoch": 3.14,
2624
+ "grad_norm": 1.9075744893117703,
2625
+ "learning_rate": 5.019215119299578e-07,
2626
+ "loss": 0.6673,
2627
+ "step": 374
2628
+ },
2629
+ {
2630
+ "epoch": 3.15,
2631
+ "grad_norm": 2.0600626341053028,
2632
+ "learning_rate": 4.916619890355812e-07,
2633
+ "loss": 0.6577,
2634
+ "step": 375
2635
+ },
2636
+ {
2637
+ "epoch": 3.16,
2638
+ "grad_norm": 1.847664227547946,
2639
+ "learning_rate": 4.814969600674926e-07,
2640
+ "loss": 0.6566,
2641
+ "step": 376
2642
+ },
2643
+ {
2644
+ "epoch": 3.16,
2645
+ "grad_norm": 1.9200825550285445,
2646
+ "learning_rate": 4.714269032985161e-07,
2647
+ "loss": 0.6531,
2648
+ "step": 377
2649
+ },
2650
+ {
2651
+ "epoch": 3.17,
2652
+ "grad_norm": 1.945604786921752,
2653
+ "learning_rate": 4.614522925329626e-07,
2654
+ "loss": 0.6577,
2655
+ "step": 378
2656
+ },
2657
+ {
2658
+ "epoch": 3.18,
2659
+ "grad_norm": 1.9471196049311694,
2660
+ "learning_rate": 4.515735970843263e-07,
2661
+ "loss": 0.6659,
2662
+ "step": 379
2663
+ },
2664
+ {
2665
+ "epoch": 3.19,
2666
+ "grad_norm": 1.8278961360694248,
2667
+ "learning_rate": 4.417912817532133e-07,
2668
+ "loss": 0.6554,
2669
+ "step": 380
2670
+ },
2671
+ {
2672
+ "epoch": 3.2,
2673
+ "grad_norm": 1.88830260098924,
2674
+ "learning_rate": 4.321058068054626e-07,
2675
+ "loss": 0.6563,
2676
+ "step": 381
2677
+ },
2678
+ {
2679
+ "epoch": 3.21,
2680
+ "grad_norm": 1.9149749844100774,
2681
+ "learning_rate": 4.225176279504975e-07,
2682
+ "loss": 0.6571,
2683
+ "step": 382
2684
+ },
2685
+ {
2686
+ "epoch": 3.22,
2687
+ "grad_norm": 1.8814221934773716,
2688
+ "learning_rate": 4.130271963198815e-07,
2689
+ "loss": 0.6572,
2690
+ "step": 383
2691
+ },
2692
+ {
2693
+ "epoch": 3.22,
2694
+ "grad_norm": 1.8849419819366298,
2695
+ "learning_rate": 4.0363495844609134e-07,
2696
+ "loss": 0.6604,
2697
+ "step": 384
2698
+ },
2699
+ {
2700
+ "epoch": 3.23,
2701
+ "grad_norm": 1.9046094115295815,
2702
+ "learning_rate": 3.9434135624150854e-07,
2703
+ "loss": 0.6652,
2704
+ "step": 385
2705
+ },
2706
+ {
2707
+ "epoch": 3.24,
2708
+ "grad_norm": 1.944275827853693,
2709
+ "learning_rate": 3.8514682697762706e-07,
2710
+ "loss": 0.6572,
2711
+ "step": 386
2712
+ },
2713
+ {
2714
+ "epoch": 3.25,
2715
+ "grad_norm": 1.8699721288071858,
2716
+ "learning_rate": 3.7605180326447806e-07,
2717
+ "loss": 0.6401,
2718
+ "step": 387
2719
+ },
2720
+ {
2721
+ "epoch": 3.26,
2722
+ "grad_norm": 1.775035768873695,
2723
+ "learning_rate": 3.6705671303027687e-07,
2724
+ "loss": 0.6523,
2725
+ "step": 388
2726
+ },
2727
+ {
2728
+ "epoch": 3.27,
2729
+ "grad_norm": 1.7843895394177849,
2730
+ "learning_rate": 3.581619795012875e-07,
2731
+ "loss": 0.6516,
2732
+ "step": 389
2733
+ },
2734
+ {
2735
+ "epoch": 3.28,
2736
+ "grad_norm": 1.919359950542867,
2737
+ "learning_rate": 3.493680211819103e-07,
2738
+ "loss": 0.6607,
2739
+ "step": 390
2740
+ },
2741
+ {
2742
+ "epoch": 3.28,
2743
+ "grad_norm": 1.8576252034229292,
2744
+ "learning_rate": 3.4067525183499013e-07,
2745
+ "loss": 0.6663,
2746
+ "step": 391
2747
+ },
2748
+ {
2749
+ "epoch": 3.29,
2750
+ "grad_norm": 1.7764574523914607,
2751
+ "learning_rate": 3.3208408046234904e-07,
2752
+ "loss": 0.6576,
2753
+ "step": 392
2754
+ },
2755
+ {
2756
+ "epoch": 3.3,
2757
+ "grad_norm": 1.8446907169053142,
2758
+ "learning_rate": 3.2359491128554214e-07,
2759
+ "loss": 0.6582,
2760
+ "step": 393
2761
+ },
2762
+ {
2763
+ "epoch": 3.31,
2764
+ "grad_norm": 1.7493689583147616,
2765
+ "learning_rate": 3.152081437268398e-07,
2766
+ "loss": 0.6548,
2767
+ "step": 394
2768
+ },
2769
+ {
2770
+ "epoch": 3.32,
2771
+ "grad_norm": 1.816698197495291,
2772
+ "learning_rate": 3.069241723904318e-07,
2773
+ "loss": 0.6636,
2774
+ "step": 395
2775
+ },
2776
+ {
2777
+ "epoch": 3.33,
2778
+ "grad_norm": 1.790271464078186,
2779
+ "learning_rate": 2.987433870438641e-07,
2780
+ "loss": 0.657,
2781
+ "step": 396
2782
+ },
2783
+ {
2784
+ "epoch": 3.34,
2785
+ "grad_norm": 1.743131857961643,
2786
+ "learning_rate": 2.906661725996976e-07,
2787
+ "loss": 0.6652,
2788
+ "step": 397
2789
+ },
2790
+ {
2791
+ "epoch": 3.34,
2792
+ "grad_norm": 1.7977795864445705,
2793
+ "learning_rate": 2.82692909097399e-07,
2794
+ "loss": 0.6455,
2795
+ "step": 398
2796
+ },
2797
+ {
2798
+ "epoch": 3.35,
2799
+ "grad_norm": 1.777376679638967,
2800
+ "learning_rate": 2.7482397168545895e-07,
2801
+ "loss": 0.6592,
2802
+ "step": 399
2803
+ },
2804
+ {
2805
+ "epoch": 3.36,
2806
+ "grad_norm": 1.806389217351911,
2807
+ "learning_rate": 2.670597306037412e-07,
2808
+ "loss": 0.6606,
2809
+ "step": 400
2810
+ },
2811
+ {
2812
+ "epoch": 3.37,
2813
+ "grad_norm": 1.773333434653589,
2814
+ "learning_rate": 2.59400551166063e-07,
2815
+ "loss": 0.6576,
2816
+ "step": 401
2817
+ },
2818
+ {
2819
+ "epoch": 3.38,
2820
+ "grad_norm": 1.7728777287155046,
2821
+ "learning_rate": 2.5184679374300553e-07,
2822
+ "loss": 0.6606,
2823
+ "step": 402
2824
+ },
2825
+ {
2826
+ "epoch": 3.39,
2827
+ "grad_norm": 1.83343142007096,
2828
+ "learning_rate": 2.4439881374496016e-07,
2829
+ "loss": 0.6713,
2830
+ "step": 403
2831
+ },
2832
+ {
2833
+ "epoch": 3.4,
2834
+ "grad_norm": 1.8119712073997163,
2835
+ "learning_rate": 2.3705696160540303e-07,
2836
+ "loss": 0.6596,
2837
+ "step": 404
2838
+ },
2839
+ {
2840
+ "epoch": 3.4,
2841
+ "grad_norm": 1.7575305127120062,
2842
+ "learning_rate": 2.298215827644118e-07,
2843
+ "loss": 0.6582,
2844
+ "step": 405
2845
+ },
2846
+ {
2847
+ "epoch": 3.41,
2848
+ "grad_norm": 1.80965570055429,
2849
+ "learning_rate": 2.2269301765240558e-07,
2850
+ "loss": 0.6508,
2851
+ "step": 406
2852
+ },
2853
+ {
2854
+ "epoch": 3.42,
2855
+ "grad_norm": 1.8329895956407685,
2856
+ "learning_rate": 2.1567160167413503e-07,
2857
+ "loss": 0.6657,
2858
+ "step": 407
2859
+ },
2860
+ {
2861
+ "epoch": 3.43,
2862
+ "grad_norm": 1.8295154972235375,
2863
+ "learning_rate": 2.0875766519289436e-07,
2864
+ "loss": 0.6602,
2865
+ "step": 408
2866
+ },
2867
+ {
2868
+ "epoch": 3.44,
2869
+ "grad_norm": 1.778360539334375,
2870
+ "learning_rate": 2.0195153351498325e-07,
2871
+ "loss": 0.6672,
2872
+ "step": 409
2873
+ },
2874
+ {
2875
+ "epoch": 3.45,
2876
+ "grad_norm": 1.8281360399477038,
2877
+ "learning_rate": 1.9525352687439548e-07,
2878
+ "loss": 0.6713,
2879
+ "step": 410
2880
+ },
2881
+ {
2882
+ "epoch": 3.46,
2883
+ "grad_norm": 1.798281276385492,
2884
+ "learning_rate": 1.886639604177573e-07,
2885
+ "loss": 0.6589,
2886
+ "step": 411
2887
+ },
2888
+ {
2889
+ "epoch": 3.46,
2890
+ "grad_norm": 1.8101646090365584,
2891
+ "learning_rate": 1.821831441894939e-07,
2892
+ "loss": 0.6576,
2893
+ "step": 412
2894
+ },
2895
+ {
2896
+ "epoch": 3.47,
2897
+ "grad_norm": 1.8163930084993238,
2898
+ "learning_rate": 1.7581138311724754e-07,
2899
+ "loss": 0.6509,
2900
+ "step": 413
2901
+ },
2902
+ {
2903
+ "epoch": 3.48,
2904
+ "grad_norm": 1.7889849857989786,
2905
+ "learning_rate": 1.6954897699752394e-07,
2906
+ "loss": 0.6654,
2907
+ "step": 414
2908
+ },
2909
+ {
2910
+ "epoch": 3.49,
2911
+ "grad_norm": 1.7753176697331132,
2912
+ "learning_rate": 1.6339622048159198e-07,
2913
+ "loss": 0.6555,
2914
+ "step": 415
2915
+ },
2916
+ {
2917
+ "epoch": 3.5,
2918
+ "grad_norm": 1.758833276503715,
2919
+ "learning_rate": 1.5735340306161752e-07,
2920
+ "loss": 0.665,
2921
+ "step": 416
2922
+ },
2923
+ {
2924
+ "epoch": 3.51,
2925
+ "grad_norm": 1.7863343585516815,
2926
+ "learning_rate": 1.514208090570432e-07,
2927
+ "loss": 0.6484,
2928
+ "step": 417
2929
+ },
2930
+ {
2931
+ "epoch": 3.51,
2932
+ "grad_norm": 1.7763079205782726,
2933
+ "learning_rate": 1.4559871760121108e-07,
2934
+ "loss": 0.6562,
2935
+ "step": 418
2936
+ },
2937
+ {
2938
+ "epoch": 3.52,
2939
+ "grad_norm": 1.8490593873759873,
2940
+ "learning_rate": 1.3988740262822847e-07,
2941
+ "loss": 0.6497,
2942
+ "step": 419
2943
+ },
2944
+ {
2945
+ "epoch": 3.53,
2946
+ "grad_norm": 1.7753823119901868,
2947
+ "learning_rate": 1.3428713286008005e-07,
2948
+ "loss": 0.6534,
2949
+ "step": 420
2950
+ },
2951
+ {
2952
+ "epoch": 3.54,
2953
+ "grad_norm": 1.7671712087628604,
2954
+ "learning_rate": 1.2879817179398375e-07,
2955
+ "loss": 0.6519,
2956
+ "step": 421
2957
+ },
2958
+ {
2959
+ "epoch": 3.55,
2960
+ "grad_norm": 1.7594428378082356,
2961
+ "learning_rate": 1.2342077768999372e-07,
2962
+ "loss": 0.6519,
2963
+ "step": 422
2964
+ },
2965
+ {
2966
+ "epoch": 3.56,
2967
+ "grad_norm": 1.767897963166057,
2968
+ "learning_rate": 1.1815520355884679e-07,
2969
+ "loss": 0.6528,
2970
+ "step": 423
2971
+ },
2972
+ {
2973
+ "epoch": 3.57,
2974
+ "grad_norm": 1.7463739318936164,
2975
+ "learning_rate": 1.130016971500622e-07,
2976
+ "loss": 0.6582,
2977
+ "step": 424
2978
+ },
2979
+ {
2980
+ "epoch": 3.57,
2981
+ "grad_norm": 1.747840277010472,
2982
+ "learning_rate": 1.0796050094027954e-07,
2983
+ "loss": 0.6661,
2984
+ "step": 425
2985
+ },
2986
+ {
2987
+ "epoch": 3.58,
2988
+ "grad_norm": 1.8160480622182698,
2989
+ "learning_rate": 1.0303185212185485e-07,
2990
+ "loss": 0.646,
2991
+ "step": 426
2992
+ },
2993
+ {
2994
+ "epoch": 3.59,
2995
+ "grad_norm": 1.7568873705777095,
2996
+ "learning_rate": 9.821598259169729e-08,
2997
+ "loss": 0.6554,
2998
+ "step": 427
2999
+ },
3000
+ {
3001
+ "epoch": 3.6,
3002
+ "grad_norm": 1.733832059747267,
3003
+ "learning_rate": 9.351311894036014e-08,
3004
+ "loss": 0.6632,
3005
+ "step": 428
3006
+ },
3007
+ {
3008
+ "epoch": 3.61,
3009
+ "grad_norm": 1.804637277135235,
3010
+ "learning_rate": 8.892348244137788e-08,
3011
+ "loss": 0.66,
3012
+ "step": 429
3013
+ },
3014
+ {
3015
+ "epoch": 3.62,
3016
+ "grad_norm": 1.767868039735343,
3017
+ "learning_rate": 8.444728904085737e-08,
3018
+ "loss": 0.659,
3019
+ "step": 430
3020
+ },
3021
+ {
3022
+ "epoch": 3.63,
3023
+ "grad_norm": 1.770931658466082,
3024
+ "learning_rate": 8.008474934731447e-08,
3025
+ "loss": 0.668,
3026
+ "step": 431
3027
+ },
3028
+ {
3029
+ "epoch": 3.63,
3030
+ "grad_norm": 1.7732670135950312,
3031
+ "learning_rate": 7.583606862176713e-08,
3032
+ "loss": 0.6548,
3033
+ "step": 432
3034
+ },
3035
+ {
3036
+ "epoch": 3.64,
3037
+ "grad_norm": 1.7259860505689657,
3038
+ "learning_rate": 7.170144676807683e-08,
3039
+ "loss": 0.6318,
3040
+ "step": 433
3041
+ },
3042
+ {
3043
+ "epoch": 3.65,
3044
+ "grad_norm": 1.7392331188224266,
3045
+ "learning_rate": 6.768107832354292e-08,
3046
+ "loss": 0.6636,
3047
+ "step": 434
3048
+ },
3049
+ {
3050
+ "epoch": 3.66,
3051
+ "grad_norm": 1.7732212376542704,
3052
+ "learning_rate": 6.377515244974903e-08,
3053
+ "loss": 0.6626,
3054
+ "step": 435
3055
+ },
3056
+ {
3057
+ "epoch": 3.67,
3058
+ "grad_norm": 1.7335582830409095,
3059
+ "learning_rate": 5.99838529236646e-08,
3060
+ "loss": 0.668,
3061
+ "step": 436
3062
+ },
3063
+ {
3064
+ "epoch": 3.68,
3065
+ "grad_norm": 1.7716856700895114,
3066
+ "learning_rate": 5.6307358128994685e-08,
3067
+ "loss": 0.667,
3068
+ "step": 437
3069
+ },
3070
+ {
3071
+ "epoch": 3.69,
3072
+ "grad_norm": 1.7617896255786891,
3073
+ "learning_rate": 5.274584104779157e-08,
3074
+ "loss": 0.6538,
3075
+ "step": 438
3076
+ },
3077
+ {
3078
+ "epoch": 3.69,
3079
+ "grad_norm": 1.7528019015815823,
3080
+ "learning_rate": 4.929946925231077e-08,
3081
+ "loss": 0.6534,
3082
+ "step": 439
3083
+ },
3084
+ {
3085
+ "epoch": 3.7,
3086
+ "grad_norm": 1.8117296265464948,
3087
+ "learning_rate": 4.5968404897130944e-08,
3088
+ "loss": 0.6674,
3089
+ "step": 440
3090
+ },
3091
+ {
3092
+ "epoch": 3.71,
3093
+ "grad_norm": 1.749044793771054,
3094
+ "learning_rate": 4.27528047115211e-08,
3095
+ "loss": 0.6682,
3096
+ "step": 441
3097
+ },
3098
+ {
3099
+ "epoch": 3.72,
3100
+ "grad_norm": 1.7454523412078409,
3101
+ "learning_rate": 3.965281999206899e-08,
3102
+ "loss": 0.6601,
3103
+ "step": 442
3104
+ },
3105
+ {
3106
+ "epoch": 3.73,
3107
+ "grad_norm": 1.7598878691389603,
3108
+ "learning_rate": 3.666859659556016e-08,
3109
+ "loss": 0.6603,
3110
+ "step": 443
3111
+ },
3112
+ {
3113
+ "epoch": 3.74,
3114
+ "grad_norm": 1.7046387508749583,
3115
+ "learning_rate": 3.3800274932117294e-08,
3116
+ "loss": 0.6518,
3117
+ "step": 444
3118
+ },
3119
+ {
3120
+ "epoch": 3.75,
3121
+ "grad_norm": 1.7163795248428233,
3122
+ "learning_rate": 3.1047989958592203e-08,
3123
+ "loss": 0.6651,
3124
+ "step": 445
3125
+ },
3126
+ {
3127
+ "epoch": 3.75,
3128
+ "grad_norm": 1.777257207147479,
3129
+ "learning_rate": 2.841187117221672e-08,
3130
+ "loss": 0.6558,
3131
+ "step": 446
3132
+ },
3133
+ {
3134
+ "epoch": 3.76,
3135
+ "grad_norm": 1.8219202465976836,
3136
+ "learning_rate": 2.5892042604510614e-08,
3137
+ "loss": 0.6508,
3138
+ "step": 447
3139
+ },
3140
+ {
3141
+ "epoch": 3.77,
3142
+ "grad_norm": 1.7767451714812037,
3143
+ "learning_rate": 2.348862281544323e-08,
3144
+ "loss": 0.6509,
3145
+ "step": 448
3146
+ },
3147
+ {
3148
+ "epoch": 3.78,
3149
+ "grad_norm": 1.7465806936718902,
3150
+ "learning_rate": 2.1201724887858488e-08,
3151
+ "loss": 0.6523,
3152
+ "step": 449
3153
+ },
3154
+ {
3155
+ "epoch": 3.79,
3156
+ "grad_norm": 1.7329527459099043,
3157
+ "learning_rate": 1.9031456422151374e-08,
3158
+ "loss": 0.6404,
3159
+ "step": 450
3160
+ },
3161
+ {
3162
+ "epoch": 3.8,
3163
+ "grad_norm": 1.7965434015907633,
3164
+ "learning_rate": 1.6977919531207533e-08,
3165
+ "loss": 0.6603,
3166
+ "step": 451
3167
+ },
3168
+ {
3169
+ "epoch": 3.81,
3170
+ "grad_norm": 1.7618638033364344,
3171
+ "learning_rate": 1.5041210835596288e-08,
3172
+ "loss": 0.6421,
3173
+ "step": 452
3174
+ },
3175
+ {
3176
+ "epoch": 3.81,
3177
+ "grad_norm": 1.7717100092665263,
3178
+ "learning_rate": 1.3221421459027329e-08,
3179
+ "loss": 0.6656,
3180
+ "step": 453
3181
+ },
3182
+ {
3183
+ "epoch": 3.82,
3184
+ "grad_norm": 1.7605568107436471,
3185
+ "learning_rate": 1.1518637024061086e-08,
3186
+ "loss": 0.6668,
3187
+ "step": 454
3188
+ },
3189
+ {
3190
+ "epoch": 3.83,
3191
+ "grad_norm": 1.751907548134551,
3192
+ "learning_rate": 9.932937648081397e-09,
3193
+ "loss": 0.6579,
3194
+ "step": 455
3195
+ },
3196
+ {
3197
+ "epoch": 3.84,
3198
+ "grad_norm": 1.7386986707922565,
3199
+ "learning_rate": 8.464397939524915e-09,
3200
+ "loss": 0.6703,
3201
+ "step": 456
3202
+ },
3203
+ {
3204
+ "epoch": 3.85,
3205
+ "grad_norm": 1.7643987709822369,
3206
+ "learning_rate": 7.113086994372242e-09,
3207
+ "loss": 0.666,
3208
+ "step": 457
3209
+ },
3210
+ {
3211
+ "epoch": 3.86,
3212
+ "grad_norm": 1.7296344516569304,
3213
+ "learning_rate": 5.879068392894427e-09,
3214
+ "loss": 0.6522,
3215
+ "step": 458
3216
+ },
3217
+ {
3218
+ "epoch": 3.87,
3219
+ "grad_norm": 1.7593863922129787,
3220
+ "learning_rate": 4.762400196664518e-09,
3221
+ "loss": 0.6586,
3222
+ "step": 459
3223
+ },
3224
+ {
3225
+ "epoch": 3.87,
3226
+ "grad_norm": 1.7608500271319567,
3227
+ "learning_rate": 3.763134945823088e-09,
3228
+ "loss": 0.6689,
3229
+ "step": 460
3230
+ },
3231
+ {
3232
+ "epoch": 3.88,
3233
+ "grad_norm": 1.7248718265179743,
3234
+ "learning_rate": 2.8813196566079836e-09,
3235
+ "loss": 0.6476,
3236
+ "step": 461
3237
+ },
3238
+ {
3239
+ "epoch": 3.89,
3240
+ "grad_norm": 1.7430786741620756,
3241
+ "learning_rate": 2.116995819140821e-09,
3242
+ "loss": 0.6636,
3243
+ "step": 462
3244
+ },
3245
+ {
3246
+ "epoch": 3.9,
3247
+ "grad_norm": 1.7541283862977322,
3248
+ "learning_rate": 1.4701993954760462e-09,
3249
+ "loss": 0.6639,
3250
+ "step": 463
3251
+ },
3252
+ {
3253
+ "epoch": 3.91,
3254
+ "grad_norm": 1.7203096912350941,
3255
+ "learning_rate": 9.409608179078433e-10,
3256
+ "loss": 0.6475,
3257
+ "step": 464
3258
+ },
3259
+ {
3260
+ "epoch": 3.92,
3261
+ "grad_norm": 1.7295283175572225,
3262
+ "learning_rate": 5.293049875393363e-10,
3263
+ "loss": 0.6589,
3264
+ "step": 465
3265
+ },
3266
+ {
3267
+ "epoch": 3.93,
3268
+ "grad_norm": 1.7037779795629253,
3269
+ "learning_rate": 2.3525127310936035e-10,
3270
+ "loss": 0.6521,
3271
+ "step": 466
3272
+ },
3273
+ {
3274
+ "epoch": 3.93,
3275
+ "grad_norm": 1.7513315633630457,
3276
+ "learning_rate": 5.88135100831888e-11,
3277
+ "loss": 0.6556,
3278
+ "step": 467
3279
+ },
3280
+ {
3281
+ "epoch": 3.94,
3282
+ "grad_norm": 1.7159722803516417,
3283
+ "learning_rate": 0.0,
3284
+ "loss": 0.6477,
3285
+ "step": 468
3286
+ }
3287
+ ],
3288
+ "logging_steps": 1,
3289
+ "max_steps": 468,
3290
+ "num_input_tokens_seen": 0,
3291
+ "num_train_epochs": 4,
3292
+ "save_steps": 117,
3293
+ "total_flos": 783498671554560.0,
3294
+ "train_batch_size": 2,
3295
+ "trial_name": null,
3296
+ "trial_params": null
3297
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7fa71f886375a49c8138f5964d4f7cd59fe86af7b3bafb59f0884948d34f423
3
+ size 7608