Chriskuei commited on
Commit
03d29ec
1 Parent(s): 33b7510
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ {
2
+ "[PAD]": 68419
3
+ }
config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/data/searchgpt/pretrained_models/gogpt-7b-v4",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "bos_token_id": 0,
7
+ "eos_token_id": 1,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 4096,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 11008,
12
+ "max_position_embeddings": 2048,
13
+ "max_sequence_length": 2048,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 32,
17
+ "pad_token_id": -1,
18
+ "rms_norm_eps": 1e-06,
19
+ "tie_word_embeddings": false,
20
+ "torch_dtype": "bfloat16",
21
+ "transformers_version": "4.29.1",
22
+ "use_cache": false,
23
+ "vocab_size": 68420
24
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 1,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.29.1"
7
+ }
pytorch_model-00001-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36f1e6fc7469a9cece4c656da6deb8d0ae9f9387a294985a4e7b90960d125833
3
+ size 9970881453
pytorch_model-00002-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6d9affcbbe40694fc9b8fc0c70a947323332a20e08b4cbad1c1378237760769
3
+ size 4102774289
pytorch_model.bin.index.json ADDED
@@ -0,0 +1,330 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 14073540608
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "pytorch_model-00002-of-00002.bin",
7
+ "model.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
8
+ "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
9
+ "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
10
+ "model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
11
+ "model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
12
+ "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
13
+ "model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
14
+ "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
15
+ "model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
16
+ "model.layers.0.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
17
+ "model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
18
+ "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
19
+ "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
20
+ "model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
21
+ "model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
22
+ "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
23
+ "model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
24
+ "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
25
+ "model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
26
+ "model.layers.1.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
27
+ "model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
28
+ "model.layers.10.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
29
+ "model.layers.10.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
30
+ "model.layers.10.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
31
+ "model.layers.10.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
32
+ "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
33
+ "model.layers.10.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
34
+ "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
35
+ "model.layers.10.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
36
+ "model.layers.10.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
37
+ "model.layers.10.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
38
+ "model.layers.11.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
39
+ "model.layers.11.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
40
+ "model.layers.11.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
41
+ "model.layers.11.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
42
+ "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
43
+ "model.layers.11.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
44
+ "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
45
+ "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
46
+ "model.layers.11.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
47
+ "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
48
+ "model.layers.12.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
49
+ "model.layers.12.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
50
+ "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
51
+ "model.layers.12.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
52
+ "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
53
+ "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
54
+ "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
55
+ "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
56
+ "model.layers.12.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
57
+ "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
58
+ "model.layers.13.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
59
+ "model.layers.13.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
60
+ "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
61
+ "model.layers.13.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
62
+ "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
63
+ "model.layers.13.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
64
+ "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
65
+ "model.layers.13.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
66
+ "model.layers.13.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
67
+ "model.layers.13.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
68
+ "model.layers.14.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
69
+ "model.layers.14.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
70
+ "model.layers.14.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
71
+ "model.layers.14.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
72
+ "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
73
+ "model.layers.14.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
74
+ "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
75
+ "model.layers.14.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
76
+ "model.layers.14.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
77
+ "model.layers.14.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
78
+ "model.layers.15.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
79
+ "model.layers.15.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
80
+ "model.layers.15.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
81
+ "model.layers.15.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
82
+ "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
83
+ "model.layers.15.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
84
+ "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
85
+ "model.layers.15.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
86
+ "model.layers.15.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
87
+ "model.layers.15.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
88
+ "model.layers.16.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
89
+ "model.layers.16.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
90
+ "model.layers.16.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
91
+ "model.layers.16.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
92
+ "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
93
+ "model.layers.16.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
94
+ "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
95
+ "model.layers.16.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
96
+ "model.layers.16.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
97
+ "model.layers.16.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
98
+ "model.layers.17.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
99
+ "model.layers.17.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
100
+ "model.layers.17.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
101
+ "model.layers.17.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
102
+ "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
103
+ "model.layers.17.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
104
+ "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
105
+ "model.layers.17.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
106
+ "model.layers.17.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
107
+ "model.layers.17.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
108
+ "model.layers.18.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
109
+ "model.layers.18.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
110
+ "model.layers.18.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
111
+ "model.layers.18.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
112
+ "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
113
+ "model.layers.18.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
114
+ "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
115
+ "model.layers.18.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
116
+ "model.layers.18.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
117
+ "model.layers.18.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
118
+ "model.layers.19.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
119
+ "model.layers.19.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
120
+ "model.layers.19.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
121
+ "model.layers.19.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
122
+ "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
123
+ "model.layers.19.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
124
+ "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
125
+ "model.layers.19.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
126
+ "model.layers.19.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
127
+ "model.layers.19.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
128
+ "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
129
+ "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
130
+ "model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
131
+ "model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
132
+ "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
133
+ "model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
134
+ "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
135
+ "model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
136
+ "model.layers.2.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
137
+ "model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
138
+ "model.layers.20.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
139
+ "model.layers.20.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
140
+ "model.layers.20.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
141
+ "model.layers.20.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
142
+ "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
143
+ "model.layers.20.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
144
+ "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
145
+ "model.layers.20.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
146
+ "model.layers.20.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
147
+ "model.layers.20.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
148
+ "model.layers.21.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
149
+ "model.layers.21.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
150
+ "model.layers.21.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
151
+ "model.layers.21.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
152
+ "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
153
+ "model.layers.21.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
154
+ "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
155
+ "model.layers.21.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
156
+ "model.layers.21.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
157
+ "model.layers.21.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
158
+ "model.layers.22.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
159
+ "model.layers.22.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
160
+ "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
161
+ "model.layers.22.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
162
+ "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
163
+ "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
164
+ "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
165
+ "model.layers.22.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
166
+ "model.layers.22.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
167
+ "model.layers.22.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
168
+ "model.layers.23.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
169
+ "model.layers.23.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
170
+ "model.layers.23.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
171
+ "model.layers.23.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
172
+ "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
173
+ "model.layers.23.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
174
+ "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
175
+ "model.layers.23.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
176
+ "model.layers.23.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
177
+ "model.layers.23.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
178
+ "model.layers.24.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
179
+ "model.layers.24.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
180
+ "model.layers.24.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
181
+ "model.layers.24.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
182
+ "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
183
+ "model.layers.24.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
184
+ "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
185
+ "model.layers.24.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
186
+ "model.layers.24.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
187
+ "model.layers.24.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
188
+ "model.layers.25.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
189
+ "model.layers.25.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
190
+ "model.layers.25.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
191
+ "model.layers.25.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
192
+ "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
193
+ "model.layers.25.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
194
+ "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
195
+ "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
196
+ "model.layers.25.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
197
+ "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
198
+ "model.layers.26.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
199
+ "model.layers.26.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
200
+ "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
201
+ "model.layers.26.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
202
+ "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
203
+ "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
204
+ "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
205
+ "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
206
+ "model.layers.26.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
207
+ "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
208
+ "model.layers.27.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
209
+ "model.layers.27.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
210
+ "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
211
+ "model.layers.27.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
212
+ "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
213
+ "model.layers.27.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
214
+ "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
215
+ "model.layers.27.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
216
+ "model.layers.27.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
217
+ "model.layers.27.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
218
+ "model.layers.28.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
219
+ "model.layers.28.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
220
+ "model.layers.28.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
221
+ "model.layers.28.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
222
+ "model.layers.28.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
223
+ "model.layers.28.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
224
+ "model.layers.28.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
225
+ "model.layers.28.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
226
+ "model.layers.28.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
227
+ "model.layers.28.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
228
+ "model.layers.29.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
229
+ "model.layers.29.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
230
+ "model.layers.29.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
231
+ "model.layers.29.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
232
+ "model.layers.29.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
233
+ "model.layers.29.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
234
+ "model.layers.29.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
235
+ "model.layers.29.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
236
+ "model.layers.29.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
237
+ "model.layers.29.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
238
+ "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
239
+ "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
240
+ "model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
241
+ "model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
242
+ "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
243
+ "model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
244
+ "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
245
+ "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
246
+ "model.layers.3.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
247
+ "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
248
+ "model.layers.30.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
249
+ "model.layers.30.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
250
+ "model.layers.30.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
251
+ "model.layers.30.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
252
+ "model.layers.30.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
253
+ "model.layers.30.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
254
+ "model.layers.30.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
255
+ "model.layers.30.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
256
+ "model.layers.30.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
257
+ "model.layers.30.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
258
+ "model.layers.31.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
259
+ "model.layers.31.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
260
+ "model.layers.31.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
261
+ "model.layers.31.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
262
+ "model.layers.31.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
263
+ "model.layers.31.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
264
+ "model.layers.31.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
265
+ "model.layers.31.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
266
+ "model.layers.31.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
267
+ "model.layers.31.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
268
+ "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
269
+ "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
270
+ "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
271
+ "model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
272
+ "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
273
+ "model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
274
+ "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
275
+ "model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
276
+ "model.layers.4.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
277
+ "model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
278
+ "model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
279
+ "model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
280
+ "model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
281
+ "model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
282
+ "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
283
+ "model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
284
+ "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
285
+ "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
286
+ "model.layers.5.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
287
+ "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
288
+ "model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
289
+ "model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
290
+ "model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
291
+ "model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
292
+ "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
293
+ "model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
294
+ "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
295
+ "model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
296
+ "model.layers.6.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
297
+ "model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
298
+ "model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
299
+ "model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
300
+ "model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
301
+ "model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
302
+ "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
303
+ "model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
304
+ "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
305
+ "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
306
+ "model.layers.7.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
307
+ "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
308
+ "model.layers.8.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
309
+ "model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
310
+ "model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
311
+ "model.layers.8.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
312
+ "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
313
+ "model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
314
+ "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
315
+ "model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
316
+ "model.layers.8.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
317
+ "model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
318
+ "model.layers.9.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
319
+ "model.layers.9.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
320
+ "model.layers.9.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
321
+ "model.layers.9.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
322
+ "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
323
+ "model.layers.9.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
324
+ "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
325
+ "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
326
+ "model.layers.9.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
327
+ "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
328
+ "model.norm.weight": "pytorch_model-00002-of-00002.bin"
329
+ }
330
+ }
runs/Jul17_18-12-40_715436/1689588800.246925/events.out.tfevents.1689588800.715436.92583.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd39bf544177964a0e2d6c4481eb71693ac4f379250846c277e1986ae11b4ad6
3
+ size 6093
runs/Jul17_18-12-40_715436/events.out.tfevents.1689588800.715436.92583.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7dfe2fd3a6ebcb1cb31fc1e3e0954f383dd123cafa3f826818dfcc8ff133a7c7
3
+ size 49805
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "[PAD]",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": true,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd5ab2c18ed07a14f3aa55518dcf08bbee4fe86c9423e86ba61f60a82ab31fa7
3
+ size 1077901
tokenizer_config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "bos_token": {
5
+ "__type": "AddedToken",
6
+ "content": "<s>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "clean_up_tokenization_spaces": false,
13
+ "eos_token": {
14
+ "__type": "AddedToken",
15
+ "content": "</s>",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false
20
+ },
21
+ "model_max_length": 512,
22
+ "pad_token": null,
23
+ "padding_side": "right",
24
+ "sp_model_kwargs": {},
25
+ "tokenizer_class": "LlamaTokenizer",
26
+ "unk_token": {
27
+ "__type": "AddedToken",
28
+ "content": "<unk>",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ },
34
+ "use_fast": false
35
+ }
trainer_state.json ADDED
@@ -0,0 +1,1759 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 0.9998273778698429,
5
+ "global_step": 2896,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.0,
12
+ "learning_rate": 1.993093922651934e-05,
13
+ "loss": 1.9454,
14
+ "step": 10
15
+ },
16
+ {
17
+ "epoch": 0.01,
18
+ "learning_rate": 1.9861878453038676e-05,
19
+ "loss": 1.9111,
20
+ "step": 20
21
+ },
22
+ {
23
+ "epoch": 0.01,
24
+ "learning_rate": 1.979281767955801e-05,
25
+ "loss": 1.9728,
26
+ "step": 30
27
+ },
28
+ {
29
+ "epoch": 0.01,
30
+ "learning_rate": 1.972375690607735e-05,
31
+ "loss": 1.8986,
32
+ "step": 40
33
+ },
34
+ {
35
+ "epoch": 0.02,
36
+ "learning_rate": 1.9654696132596688e-05,
37
+ "loss": 1.8296,
38
+ "step": 50
39
+ },
40
+ {
41
+ "epoch": 0.02,
42
+ "learning_rate": 1.9585635359116023e-05,
43
+ "loss": 1.8251,
44
+ "step": 60
45
+ },
46
+ {
47
+ "epoch": 0.02,
48
+ "learning_rate": 1.9516574585635362e-05,
49
+ "loss": 1.8381,
50
+ "step": 70
51
+ },
52
+ {
53
+ "epoch": 0.03,
54
+ "learning_rate": 1.9447513812154697e-05,
55
+ "loss": 1.8186,
56
+ "step": 80
57
+ },
58
+ {
59
+ "epoch": 0.03,
60
+ "learning_rate": 1.9378453038674036e-05,
61
+ "loss": 1.862,
62
+ "step": 90
63
+ },
64
+ {
65
+ "epoch": 0.03,
66
+ "learning_rate": 1.930939226519337e-05,
67
+ "loss": 1.8683,
68
+ "step": 100
69
+ },
70
+ {
71
+ "epoch": 0.04,
72
+ "learning_rate": 1.924033149171271e-05,
73
+ "loss": 1.7295,
74
+ "step": 110
75
+ },
76
+ {
77
+ "epoch": 0.04,
78
+ "learning_rate": 1.9171270718232045e-05,
79
+ "loss": 1.8422,
80
+ "step": 120
81
+ },
82
+ {
83
+ "epoch": 0.04,
84
+ "learning_rate": 1.910220994475138e-05,
85
+ "loss": 1.8391,
86
+ "step": 130
87
+ },
88
+ {
89
+ "epoch": 0.05,
90
+ "learning_rate": 1.903314917127072e-05,
91
+ "loss": 1.7898,
92
+ "step": 140
93
+ },
94
+ {
95
+ "epoch": 0.05,
96
+ "learning_rate": 1.8964088397790058e-05,
97
+ "loss": 1.8654,
98
+ "step": 150
99
+ },
100
+ {
101
+ "epoch": 0.06,
102
+ "learning_rate": 1.8895027624309393e-05,
103
+ "loss": 1.7978,
104
+ "step": 160
105
+ },
106
+ {
107
+ "epoch": 0.06,
108
+ "learning_rate": 1.882596685082873e-05,
109
+ "loss": 1.8074,
110
+ "step": 170
111
+ },
112
+ {
113
+ "epoch": 0.06,
114
+ "learning_rate": 1.8756906077348067e-05,
115
+ "loss": 1.8771,
116
+ "step": 180
117
+ },
118
+ {
119
+ "epoch": 0.07,
120
+ "learning_rate": 1.8687845303867406e-05,
121
+ "loss": 1.8324,
122
+ "step": 190
123
+ },
124
+ {
125
+ "epoch": 0.07,
126
+ "learning_rate": 1.861878453038674e-05,
127
+ "loss": 1.8138,
128
+ "step": 200
129
+ },
130
+ {
131
+ "epoch": 0.07,
132
+ "learning_rate": 1.854972375690608e-05,
133
+ "loss": 1.7218,
134
+ "step": 210
135
+ },
136
+ {
137
+ "epoch": 0.08,
138
+ "learning_rate": 1.8480662983425418e-05,
139
+ "loss": 1.7777,
140
+ "step": 220
141
+ },
142
+ {
143
+ "epoch": 0.08,
144
+ "learning_rate": 1.8411602209944753e-05,
145
+ "loss": 1.9651,
146
+ "step": 230
147
+ },
148
+ {
149
+ "epoch": 0.08,
150
+ "learning_rate": 1.834254143646409e-05,
151
+ "loss": 1.7814,
152
+ "step": 240
153
+ },
154
+ {
155
+ "epoch": 0.09,
156
+ "learning_rate": 1.8273480662983427e-05,
157
+ "loss": 1.8694,
158
+ "step": 250
159
+ },
160
+ {
161
+ "epoch": 0.09,
162
+ "learning_rate": 1.8204419889502766e-05,
163
+ "loss": 1.8094,
164
+ "step": 260
165
+ },
166
+ {
167
+ "epoch": 0.09,
168
+ "learning_rate": 1.81353591160221e-05,
169
+ "loss": 1.8144,
170
+ "step": 270
171
+ },
172
+ {
173
+ "epoch": 0.1,
174
+ "learning_rate": 1.8066298342541436e-05,
175
+ "loss": 1.7834,
176
+ "step": 280
177
+ },
178
+ {
179
+ "epoch": 0.1,
180
+ "learning_rate": 1.7997237569060775e-05,
181
+ "loss": 1.8326,
182
+ "step": 290
183
+ },
184
+ {
185
+ "epoch": 0.1,
186
+ "learning_rate": 1.7928176795580114e-05,
187
+ "loss": 1.8524,
188
+ "step": 300
189
+ },
190
+ {
191
+ "epoch": 0.11,
192
+ "learning_rate": 1.785911602209945e-05,
193
+ "loss": 1.8125,
194
+ "step": 310
195
+ },
196
+ {
197
+ "epoch": 0.11,
198
+ "learning_rate": 1.7790055248618784e-05,
199
+ "loss": 1.7685,
200
+ "step": 320
201
+ },
202
+ {
203
+ "epoch": 0.11,
204
+ "learning_rate": 1.7720994475138123e-05,
205
+ "loss": 1.8259,
206
+ "step": 330
207
+ },
208
+ {
209
+ "epoch": 0.12,
210
+ "learning_rate": 1.765193370165746e-05,
211
+ "loss": 1.8165,
212
+ "step": 340
213
+ },
214
+ {
215
+ "epoch": 0.12,
216
+ "learning_rate": 1.7582872928176797e-05,
217
+ "loss": 1.734,
218
+ "step": 350
219
+ },
220
+ {
221
+ "epoch": 0.12,
222
+ "learning_rate": 1.7513812154696135e-05,
223
+ "loss": 1.8719,
224
+ "step": 360
225
+ },
226
+ {
227
+ "epoch": 0.13,
228
+ "learning_rate": 1.744475138121547e-05,
229
+ "loss": 1.84,
230
+ "step": 370
231
+ },
232
+ {
233
+ "epoch": 0.13,
234
+ "learning_rate": 1.7375690607734806e-05,
235
+ "loss": 1.739,
236
+ "step": 380
237
+ },
238
+ {
239
+ "epoch": 0.13,
240
+ "learning_rate": 1.7306629834254145e-05,
241
+ "loss": 1.7121,
242
+ "step": 390
243
+ },
244
+ {
245
+ "epoch": 0.14,
246
+ "learning_rate": 1.7237569060773483e-05,
247
+ "loss": 1.7994,
248
+ "step": 400
249
+ },
250
+ {
251
+ "epoch": 0.14,
252
+ "learning_rate": 1.716850828729282e-05,
253
+ "loss": 1.8029,
254
+ "step": 410
255
+ },
256
+ {
257
+ "epoch": 0.15,
258
+ "learning_rate": 1.7099447513812154e-05,
259
+ "loss": 1.7876,
260
+ "step": 420
261
+ },
262
+ {
263
+ "epoch": 0.15,
264
+ "learning_rate": 1.7030386740331492e-05,
265
+ "loss": 1.7399,
266
+ "step": 430
267
+ },
268
+ {
269
+ "epoch": 0.15,
270
+ "learning_rate": 1.696132596685083e-05,
271
+ "loss": 1.8004,
272
+ "step": 440
273
+ },
274
+ {
275
+ "epoch": 0.16,
276
+ "learning_rate": 1.6892265193370166e-05,
277
+ "loss": 1.8154,
278
+ "step": 450
279
+ },
280
+ {
281
+ "epoch": 0.16,
282
+ "learning_rate": 1.6823204419889505e-05,
283
+ "loss": 1.7839,
284
+ "step": 460
285
+ },
286
+ {
287
+ "epoch": 0.16,
288
+ "learning_rate": 1.675414364640884e-05,
289
+ "loss": 1.7881,
290
+ "step": 470
291
+ },
292
+ {
293
+ "epoch": 0.17,
294
+ "learning_rate": 1.668508287292818e-05,
295
+ "loss": 1.7409,
296
+ "step": 480
297
+ },
298
+ {
299
+ "epoch": 0.17,
300
+ "learning_rate": 1.6616022099447514e-05,
301
+ "loss": 1.7907,
302
+ "step": 490
303
+ },
304
+ {
305
+ "epoch": 0.17,
306
+ "learning_rate": 1.6546961325966853e-05,
307
+ "loss": 1.8985,
308
+ "step": 500
309
+ },
310
+ {
311
+ "epoch": 0.18,
312
+ "learning_rate": 1.647790055248619e-05,
313
+ "loss": 1.8918,
314
+ "step": 510
315
+ },
316
+ {
317
+ "epoch": 0.18,
318
+ "learning_rate": 1.6408839779005527e-05,
319
+ "loss": 1.8328,
320
+ "step": 520
321
+ },
322
+ {
323
+ "epoch": 0.18,
324
+ "learning_rate": 1.6339779005524862e-05,
325
+ "loss": 1.7157,
326
+ "step": 530
327
+ },
328
+ {
329
+ "epoch": 0.19,
330
+ "learning_rate": 1.62707182320442e-05,
331
+ "loss": 1.6767,
332
+ "step": 540
333
+ },
334
+ {
335
+ "epoch": 0.19,
336
+ "learning_rate": 1.620165745856354e-05,
337
+ "loss": 1.7403,
338
+ "step": 550
339
+ },
340
+ {
341
+ "epoch": 0.19,
342
+ "learning_rate": 1.6132596685082875e-05,
343
+ "loss": 1.8535,
344
+ "step": 560
345
+ },
346
+ {
347
+ "epoch": 0.2,
348
+ "learning_rate": 1.606353591160221e-05,
349
+ "loss": 1.8406,
350
+ "step": 570
351
+ },
352
+ {
353
+ "epoch": 0.2,
354
+ "learning_rate": 1.599447513812155e-05,
355
+ "loss": 1.7262,
356
+ "step": 580
357
+ },
358
+ {
359
+ "epoch": 0.2,
360
+ "learning_rate": 1.5925414364640884e-05,
361
+ "loss": 1.7585,
362
+ "step": 590
363
+ },
364
+ {
365
+ "epoch": 0.21,
366
+ "learning_rate": 1.5856353591160222e-05,
367
+ "loss": 1.7821,
368
+ "step": 600
369
+ },
370
+ {
371
+ "epoch": 0.21,
372
+ "learning_rate": 1.578729281767956e-05,
373
+ "loss": 1.839,
374
+ "step": 610
375
+ },
376
+ {
377
+ "epoch": 0.21,
378
+ "learning_rate": 1.5718232044198896e-05,
379
+ "loss": 1.8768,
380
+ "step": 620
381
+ },
382
+ {
383
+ "epoch": 0.22,
384
+ "learning_rate": 1.564917127071823e-05,
385
+ "loss": 1.772,
386
+ "step": 630
387
+ },
388
+ {
389
+ "epoch": 0.22,
390
+ "learning_rate": 1.558011049723757e-05,
391
+ "loss": 1.7883,
392
+ "step": 640
393
+ },
394
+ {
395
+ "epoch": 0.22,
396
+ "learning_rate": 1.551104972375691e-05,
397
+ "loss": 1.6712,
398
+ "step": 650
399
+ },
400
+ {
401
+ "epoch": 0.23,
402
+ "learning_rate": 1.5441988950276244e-05,
403
+ "loss": 1.7631,
404
+ "step": 660
405
+ },
406
+ {
407
+ "epoch": 0.23,
408
+ "learning_rate": 1.537292817679558e-05,
409
+ "loss": 1.7849,
410
+ "step": 670
411
+ },
412
+ {
413
+ "epoch": 0.23,
414
+ "learning_rate": 1.5303867403314918e-05,
415
+ "loss": 1.6738,
416
+ "step": 680
417
+ },
418
+ {
419
+ "epoch": 0.24,
420
+ "learning_rate": 1.5234806629834255e-05,
421
+ "loss": 1.8382,
422
+ "step": 690
423
+ },
424
+ {
425
+ "epoch": 0.24,
426
+ "learning_rate": 1.5165745856353594e-05,
427
+ "loss": 1.7811,
428
+ "step": 700
429
+ },
430
+ {
431
+ "epoch": 0.25,
432
+ "learning_rate": 1.5096685082872929e-05,
433
+ "loss": 1.7594,
434
+ "step": 710
435
+ },
436
+ {
437
+ "epoch": 0.25,
438
+ "learning_rate": 1.5027624309392266e-05,
439
+ "loss": 1.7665,
440
+ "step": 720
441
+ },
442
+ {
443
+ "epoch": 0.25,
444
+ "learning_rate": 1.4958563535911603e-05,
445
+ "loss": 1.7435,
446
+ "step": 730
447
+ },
448
+ {
449
+ "epoch": 0.26,
450
+ "learning_rate": 1.4889502762430942e-05,
451
+ "loss": 1.7131,
452
+ "step": 740
453
+ },
454
+ {
455
+ "epoch": 0.26,
456
+ "learning_rate": 1.4820441988950279e-05,
457
+ "loss": 1.8457,
458
+ "step": 750
459
+ },
460
+ {
461
+ "epoch": 0.26,
462
+ "learning_rate": 1.4751381215469614e-05,
463
+ "loss": 1.8551,
464
+ "step": 760
465
+ },
466
+ {
467
+ "epoch": 0.27,
468
+ "learning_rate": 1.468232044198895e-05,
469
+ "loss": 1.7981,
470
+ "step": 770
471
+ },
472
+ {
473
+ "epoch": 0.27,
474
+ "learning_rate": 1.4613259668508288e-05,
475
+ "loss": 1.8244,
476
+ "step": 780
477
+ },
478
+ {
479
+ "epoch": 0.27,
480
+ "learning_rate": 1.4544198895027626e-05,
481
+ "loss": 1.8683,
482
+ "step": 790
483
+ },
484
+ {
485
+ "epoch": 0.28,
486
+ "learning_rate": 1.4475138121546963e-05,
487
+ "loss": 1.7665,
488
+ "step": 800
489
+ },
490
+ {
491
+ "epoch": 0.28,
492
+ "learning_rate": 1.4406077348066299e-05,
493
+ "loss": 1.7605,
494
+ "step": 810
495
+ },
496
+ {
497
+ "epoch": 0.28,
498
+ "learning_rate": 1.4337016574585636e-05,
499
+ "loss": 1.8339,
500
+ "step": 820
501
+ },
502
+ {
503
+ "epoch": 0.29,
504
+ "learning_rate": 1.4267955801104974e-05,
505
+ "loss": 1.8634,
506
+ "step": 830
507
+ },
508
+ {
509
+ "epoch": 0.29,
510
+ "learning_rate": 1.4198895027624311e-05,
511
+ "loss": 1.7421,
512
+ "step": 840
513
+ },
514
+ {
515
+ "epoch": 0.29,
516
+ "learning_rate": 1.4129834254143648e-05,
517
+ "loss": 1.7681,
518
+ "step": 850
519
+ },
520
+ {
521
+ "epoch": 0.3,
522
+ "learning_rate": 1.4060773480662983e-05,
523
+ "loss": 1.749,
524
+ "step": 860
525
+ },
526
+ {
527
+ "epoch": 0.3,
528
+ "learning_rate": 1.399171270718232e-05,
529
+ "loss": 1.7926,
530
+ "step": 870
531
+ },
532
+ {
533
+ "epoch": 0.3,
534
+ "learning_rate": 1.3922651933701659e-05,
535
+ "loss": 1.7425,
536
+ "step": 880
537
+ },
538
+ {
539
+ "epoch": 0.31,
540
+ "learning_rate": 1.3853591160220996e-05,
541
+ "loss": 1.7008,
542
+ "step": 890
543
+ },
544
+ {
545
+ "epoch": 0.31,
546
+ "learning_rate": 1.3784530386740333e-05,
547
+ "loss": 1.6872,
548
+ "step": 900
549
+ },
550
+ {
551
+ "epoch": 0.31,
552
+ "learning_rate": 1.3715469613259668e-05,
553
+ "loss": 1.7296,
554
+ "step": 910
555
+ },
556
+ {
557
+ "epoch": 0.32,
558
+ "learning_rate": 1.3646408839779007e-05,
559
+ "loss": 1.7473,
560
+ "step": 920
561
+ },
562
+ {
563
+ "epoch": 0.32,
564
+ "learning_rate": 1.3577348066298344e-05,
565
+ "loss": 1.7689,
566
+ "step": 930
567
+ },
568
+ {
569
+ "epoch": 0.32,
570
+ "learning_rate": 1.350828729281768e-05,
571
+ "loss": 1.7871,
572
+ "step": 940
573
+ },
574
+ {
575
+ "epoch": 0.33,
576
+ "learning_rate": 1.343922651933702e-05,
577
+ "loss": 1.7677,
578
+ "step": 950
579
+ },
580
+ {
581
+ "epoch": 0.33,
582
+ "learning_rate": 1.3370165745856355e-05,
583
+ "loss": 1.7477,
584
+ "step": 960
585
+ },
586
+ {
587
+ "epoch": 0.33,
588
+ "learning_rate": 1.3301104972375692e-05,
589
+ "loss": 1.8677,
590
+ "step": 970
591
+ },
592
+ {
593
+ "epoch": 0.34,
594
+ "learning_rate": 1.3232044198895029e-05,
595
+ "loss": 1.8348,
596
+ "step": 980
597
+ },
598
+ {
599
+ "epoch": 0.34,
600
+ "learning_rate": 1.3162983425414365e-05,
601
+ "loss": 1.7448,
602
+ "step": 990
603
+ },
604
+ {
605
+ "epoch": 0.35,
606
+ "learning_rate": 1.3093922651933704e-05,
607
+ "loss": 1.7743,
608
+ "step": 1000
609
+ },
610
+ {
611
+ "epoch": 0.35,
612
+ "learning_rate": 1.302486187845304e-05,
613
+ "loss": 1.6812,
614
+ "step": 1010
615
+ },
616
+ {
617
+ "epoch": 0.35,
618
+ "learning_rate": 1.2955801104972376e-05,
619
+ "loss": 1.7649,
620
+ "step": 1020
621
+ },
622
+ {
623
+ "epoch": 0.36,
624
+ "learning_rate": 1.2886740331491713e-05,
625
+ "loss": 1.8005,
626
+ "step": 1030
627
+ },
628
+ {
629
+ "epoch": 0.36,
630
+ "learning_rate": 1.2817679558011052e-05,
631
+ "loss": 1.8059,
632
+ "step": 1040
633
+ },
634
+ {
635
+ "epoch": 0.36,
636
+ "learning_rate": 1.2748618784530389e-05,
637
+ "loss": 1.7549,
638
+ "step": 1050
639
+ },
640
+ {
641
+ "epoch": 0.37,
642
+ "learning_rate": 1.2679558011049724e-05,
643
+ "loss": 1.7787,
644
+ "step": 1060
645
+ },
646
+ {
647
+ "epoch": 0.37,
648
+ "learning_rate": 1.2610497237569061e-05,
649
+ "loss": 1.722,
650
+ "step": 1070
651
+ },
652
+ {
653
+ "epoch": 0.37,
654
+ "learning_rate": 1.25414364640884e-05,
655
+ "loss": 1.7192,
656
+ "step": 1080
657
+ },
658
+ {
659
+ "epoch": 0.38,
660
+ "learning_rate": 1.2472375690607737e-05,
661
+ "loss": 1.8681,
662
+ "step": 1090
663
+ },
664
+ {
665
+ "epoch": 0.38,
666
+ "learning_rate": 1.2403314917127074e-05,
667
+ "loss": 1.8199,
668
+ "step": 1100
669
+ },
670
+ {
671
+ "epoch": 0.38,
672
+ "learning_rate": 1.2334254143646409e-05,
673
+ "loss": 1.8003,
674
+ "step": 1110
675
+ },
676
+ {
677
+ "epoch": 0.39,
678
+ "learning_rate": 1.2265193370165746e-05,
679
+ "loss": 1.8412,
680
+ "step": 1120
681
+ },
682
+ {
683
+ "epoch": 0.39,
684
+ "learning_rate": 1.2196132596685085e-05,
685
+ "loss": 1.7463,
686
+ "step": 1130
687
+ },
688
+ {
689
+ "epoch": 0.39,
690
+ "learning_rate": 1.2127071823204422e-05,
691
+ "loss": 1.8106,
692
+ "step": 1140
693
+ },
694
+ {
695
+ "epoch": 0.4,
696
+ "learning_rate": 1.2058011049723757e-05,
697
+ "loss": 1.6872,
698
+ "step": 1150
699
+ },
700
+ {
701
+ "epoch": 0.4,
702
+ "learning_rate": 1.1988950276243094e-05,
703
+ "loss": 1.6963,
704
+ "step": 1160
705
+ },
706
+ {
707
+ "epoch": 0.4,
708
+ "learning_rate": 1.1919889502762432e-05,
709
+ "loss": 1.7752,
710
+ "step": 1170
711
+ },
712
+ {
713
+ "epoch": 0.41,
714
+ "learning_rate": 1.185082872928177e-05,
715
+ "loss": 1.8593,
716
+ "step": 1180
717
+ },
718
+ {
719
+ "epoch": 0.41,
720
+ "learning_rate": 1.1781767955801106e-05,
721
+ "loss": 1.7515,
722
+ "step": 1190
723
+ },
724
+ {
725
+ "epoch": 0.41,
726
+ "learning_rate": 1.1712707182320442e-05,
727
+ "loss": 1.7299,
728
+ "step": 1200
729
+ },
730
+ {
731
+ "epoch": 0.42,
732
+ "learning_rate": 1.1643646408839779e-05,
733
+ "loss": 1.7632,
734
+ "step": 1210
735
+ },
736
+ {
737
+ "epoch": 0.42,
738
+ "learning_rate": 1.1574585635359117e-05,
739
+ "loss": 1.7982,
740
+ "step": 1220
741
+ },
742
+ {
743
+ "epoch": 0.42,
744
+ "learning_rate": 1.1505524861878454e-05,
745
+ "loss": 1.7833,
746
+ "step": 1230
747
+ },
748
+ {
749
+ "epoch": 0.43,
750
+ "learning_rate": 1.1436464088397791e-05,
751
+ "loss": 1.7549,
752
+ "step": 1240
753
+ },
754
+ {
755
+ "epoch": 0.43,
756
+ "learning_rate": 1.1367403314917126e-05,
757
+ "loss": 1.726,
758
+ "step": 1250
759
+ },
760
+ {
761
+ "epoch": 0.44,
762
+ "learning_rate": 1.1298342541436465e-05,
763
+ "loss": 1.7221,
764
+ "step": 1260
765
+ },
766
+ {
767
+ "epoch": 0.44,
768
+ "learning_rate": 1.1229281767955802e-05,
769
+ "loss": 1.8503,
770
+ "step": 1270
771
+ },
772
+ {
773
+ "epoch": 0.44,
774
+ "learning_rate": 1.1160220994475139e-05,
775
+ "loss": 1.7096,
776
+ "step": 1280
777
+ },
778
+ {
779
+ "epoch": 0.45,
780
+ "learning_rate": 1.1091160220994478e-05,
781
+ "loss": 1.7261,
782
+ "step": 1290
783
+ },
784
+ {
785
+ "epoch": 0.45,
786
+ "learning_rate": 1.1022099447513813e-05,
787
+ "loss": 1.7823,
788
+ "step": 1300
789
+ },
790
+ {
791
+ "epoch": 0.45,
792
+ "learning_rate": 1.095303867403315e-05,
793
+ "loss": 1.7031,
794
+ "step": 1310
795
+ },
796
+ {
797
+ "epoch": 0.46,
798
+ "learning_rate": 1.0883977900552487e-05,
799
+ "loss": 1.7445,
800
+ "step": 1320
801
+ },
802
+ {
803
+ "epoch": 0.46,
804
+ "learning_rate": 1.0814917127071824e-05,
805
+ "loss": 1.7109,
806
+ "step": 1330
807
+ },
808
+ {
809
+ "epoch": 0.46,
810
+ "learning_rate": 1.0745856353591162e-05,
811
+ "loss": 1.8005,
812
+ "step": 1340
813
+ },
814
+ {
815
+ "epoch": 0.47,
816
+ "learning_rate": 1.0676795580110498e-05,
817
+ "loss": 1.7529,
818
+ "step": 1350
819
+ },
820
+ {
821
+ "epoch": 0.47,
822
+ "learning_rate": 1.0607734806629835e-05,
823
+ "loss": 1.6576,
824
+ "step": 1360
825
+ },
826
+ {
827
+ "epoch": 0.47,
828
+ "learning_rate": 1.0538674033149172e-05,
829
+ "loss": 1.7873,
830
+ "step": 1370
831
+ },
832
+ {
833
+ "epoch": 0.48,
834
+ "learning_rate": 1.046961325966851e-05,
835
+ "loss": 1.7714,
836
+ "step": 1380
837
+ },
838
+ {
839
+ "epoch": 0.48,
840
+ "learning_rate": 1.0400552486187847e-05,
841
+ "loss": 1.7174,
842
+ "step": 1390
843
+ },
844
+ {
845
+ "epoch": 0.48,
846
+ "learning_rate": 1.0331491712707182e-05,
847
+ "loss": 1.6726,
848
+ "step": 1400
849
+ },
850
+ {
851
+ "epoch": 0.49,
852
+ "learning_rate": 1.026243093922652e-05,
853
+ "loss": 1.718,
854
+ "step": 1410
855
+ },
856
+ {
857
+ "epoch": 0.49,
858
+ "learning_rate": 1.0193370165745858e-05,
859
+ "loss": 1.7388,
860
+ "step": 1420
861
+ },
862
+ {
863
+ "epoch": 0.49,
864
+ "learning_rate": 1.0124309392265195e-05,
865
+ "loss": 1.7361,
866
+ "step": 1430
867
+ },
868
+ {
869
+ "epoch": 0.5,
870
+ "learning_rate": 1.0055248618784532e-05,
871
+ "loss": 1.7668,
872
+ "step": 1440
873
+ },
874
+ {
875
+ "epoch": 0.5,
876
+ "learning_rate": 9.986187845303869e-06,
877
+ "loss": 1.8001,
878
+ "step": 1450
879
+ },
880
+ {
881
+ "epoch": 0.5,
882
+ "learning_rate": 9.917127071823204e-06,
883
+ "loss": 1.7302,
884
+ "step": 1460
885
+ },
886
+ {
887
+ "epoch": 0.51,
888
+ "learning_rate": 9.848066298342543e-06,
889
+ "loss": 1.687,
890
+ "step": 1470
891
+ },
892
+ {
893
+ "epoch": 0.51,
894
+ "learning_rate": 9.779005524861878e-06,
895
+ "loss": 1.6978,
896
+ "step": 1480
897
+ },
898
+ {
899
+ "epoch": 0.51,
900
+ "learning_rate": 9.709944751381217e-06,
901
+ "loss": 1.7913,
902
+ "step": 1490
903
+ },
904
+ {
905
+ "epoch": 0.52,
906
+ "learning_rate": 9.640883977900554e-06,
907
+ "loss": 1.8389,
908
+ "step": 1500
909
+ },
910
+ {
911
+ "epoch": 0.52,
912
+ "learning_rate": 9.57182320441989e-06,
913
+ "loss": 1.8108,
914
+ "step": 1510
915
+ },
916
+ {
917
+ "epoch": 0.52,
918
+ "learning_rate": 9.502762430939228e-06,
919
+ "loss": 1.7494,
920
+ "step": 1520
921
+ },
922
+ {
923
+ "epoch": 0.53,
924
+ "learning_rate": 9.433701657458565e-06,
925
+ "loss": 1.8438,
926
+ "step": 1530
927
+ },
928
+ {
929
+ "epoch": 0.53,
930
+ "learning_rate": 9.364640883977902e-06,
931
+ "loss": 1.8194,
932
+ "step": 1540
933
+ },
934
+ {
935
+ "epoch": 0.54,
936
+ "learning_rate": 9.295580110497238e-06,
937
+ "loss": 1.7495,
938
+ "step": 1550
939
+ },
940
+ {
941
+ "epoch": 0.54,
942
+ "learning_rate": 9.226519337016575e-06,
943
+ "loss": 1.7208,
944
+ "step": 1560
945
+ },
946
+ {
947
+ "epoch": 0.54,
948
+ "learning_rate": 9.157458563535912e-06,
949
+ "loss": 1.706,
950
+ "step": 1570
951
+ },
952
+ {
953
+ "epoch": 0.55,
954
+ "learning_rate": 9.08839779005525e-06,
955
+ "loss": 1.8367,
956
+ "step": 1580
957
+ },
958
+ {
959
+ "epoch": 0.55,
960
+ "learning_rate": 9.019337016574586e-06,
961
+ "loss": 1.6874,
962
+ "step": 1590
963
+ },
964
+ {
965
+ "epoch": 0.55,
966
+ "learning_rate": 8.950276243093923e-06,
967
+ "loss": 1.7478,
968
+ "step": 1600
969
+ },
970
+ {
971
+ "epoch": 0.56,
972
+ "learning_rate": 8.88121546961326e-06,
973
+ "loss": 1.6782,
974
+ "step": 1610
975
+ },
976
+ {
977
+ "epoch": 0.56,
978
+ "learning_rate": 8.812154696132597e-06,
979
+ "loss": 1.8467,
980
+ "step": 1620
981
+ },
982
+ {
983
+ "epoch": 0.56,
984
+ "learning_rate": 8.743093922651934e-06,
985
+ "loss": 1.758,
986
+ "step": 1630
987
+ },
988
+ {
989
+ "epoch": 0.57,
990
+ "learning_rate": 8.674033149171271e-06,
991
+ "loss": 1.7116,
992
+ "step": 1640
993
+ },
994
+ {
995
+ "epoch": 0.57,
996
+ "learning_rate": 8.60497237569061e-06,
997
+ "loss": 1.751,
998
+ "step": 1650
999
+ },
1000
+ {
1001
+ "epoch": 0.57,
1002
+ "learning_rate": 8.535911602209945e-06,
1003
+ "loss": 1.7502,
1004
+ "step": 1660
1005
+ },
1006
+ {
1007
+ "epoch": 0.58,
1008
+ "learning_rate": 8.466850828729282e-06,
1009
+ "loss": 1.879,
1010
+ "step": 1670
1011
+ },
1012
+ {
1013
+ "epoch": 0.58,
1014
+ "learning_rate": 8.397790055248619e-06,
1015
+ "loss": 1.8171,
1016
+ "step": 1680
1017
+ },
1018
+ {
1019
+ "epoch": 0.58,
1020
+ "learning_rate": 8.328729281767956e-06,
1021
+ "loss": 1.7586,
1022
+ "step": 1690
1023
+ },
1024
+ {
1025
+ "epoch": 0.59,
1026
+ "learning_rate": 8.259668508287293e-06,
1027
+ "loss": 1.7848,
1028
+ "step": 1700
1029
+ },
1030
+ {
1031
+ "epoch": 0.59,
1032
+ "learning_rate": 8.19060773480663e-06,
1033
+ "loss": 1.7789,
1034
+ "step": 1710
1035
+ },
1036
+ {
1037
+ "epoch": 0.59,
1038
+ "learning_rate": 8.121546961325968e-06,
1039
+ "loss": 1.7104,
1040
+ "step": 1720
1041
+ },
1042
+ {
1043
+ "epoch": 0.6,
1044
+ "learning_rate": 8.052486187845304e-06,
1045
+ "loss": 1.807,
1046
+ "step": 1730
1047
+ },
1048
+ {
1049
+ "epoch": 0.6,
1050
+ "learning_rate": 7.983425414364642e-06,
1051
+ "loss": 1.6772,
1052
+ "step": 1740
1053
+ },
1054
+ {
1055
+ "epoch": 0.6,
1056
+ "learning_rate": 7.914364640883978e-06,
1057
+ "loss": 1.7574,
1058
+ "step": 1750
1059
+ },
1060
+ {
1061
+ "epoch": 0.61,
1062
+ "learning_rate": 7.845303867403316e-06,
1063
+ "loss": 1.7925,
1064
+ "step": 1760
1065
+ },
1066
+ {
1067
+ "epoch": 0.61,
1068
+ "learning_rate": 7.776243093922653e-06,
1069
+ "loss": 1.7109,
1070
+ "step": 1770
1071
+ },
1072
+ {
1073
+ "epoch": 0.61,
1074
+ "learning_rate": 7.70718232044199e-06,
1075
+ "loss": 1.8161,
1076
+ "step": 1780
1077
+ },
1078
+ {
1079
+ "epoch": 0.62,
1080
+ "learning_rate": 7.638121546961327e-06,
1081
+ "loss": 1.716,
1082
+ "step": 1790
1083
+ },
1084
+ {
1085
+ "epoch": 0.62,
1086
+ "learning_rate": 7.569060773480663e-06,
1087
+ "loss": 1.7228,
1088
+ "step": 1800
1089
+ },
1090
+ {
1091
+ "epoch": 0.62,
1092
+ "learning_rate": 7.500000000000001e-06,
1093
+ "loss": 1.865,
1094
+ "step": 1810
1095
+ },
1096
+ {
1097
+ "epoch": 0.63,
1098
+ "learning_rate": 7.430939226519338e-06,
1099
+ "loss": 1.7947,
1100
+ "step": 1820
1101
+ },
1102
+ {
1103
+ "epoch": 0.63,
1104
+ "learning_rate": 7.361878453038674e-06,
1105
+ "loss": 1.8093,
1106
+ "step": 1830
1107
+ },
1108
+ {
1109
+ "epoch": 0.64,
1110
+ "learning_rate": 7.292817679558012e-06,
1111
+ "loss": 1.7586,
1112
+ "step": 1840
1113
+ },
1114
+ {
1115
+ "epoch": 0.64,
1116
+ "learning_rate": 7.223756906077348e-06,
1117
+ "loss": 1.7256,
1118
+ "step": 1850
1119
+ },
1120
+ {
1121
+ "epoch": 0.64,
1122
+ "learning_rate": 7.154696132596686e-06,
1123
+ "loss": 1.7485,
1124
+ "step": 1860
1125
+ },
1126
+ {
1127
+ "epoch": 0.65,
1128
+ "learning_rate": 7.085635359116023e-06,
1129
+ "loss": 1.7597,
1130
+ "step": 1870
1131
+ },
1132
+ {
1133
+ "epoch": 0.65,
1134
+ "learning_rate": 7.01657458563536e-06,
1135
+ "loss": 1.7516,
1136
+ "step": 1880
1137
+ },
1138
+ {
1139
+ "epoch": 0.65,
1140
+ "learning_rate": 6.947513812154697e-06,
1141
+ "loss": 1.7449,
1142
+ "step": 1890
1143
+ },
1144
+ {
1145
+ "epoch": 0.66,
1146
+ "learning_rate": 6.878453038674034e-06,
1147
+ "loss": 1.8367,
1148
+ "step": 1900
1149
+ },
1150
+ {
1151
+ "epoch": 0.66,
1152
+ "learning_rate": 6.809392265193371e-06,
1153
+ "loss": 1.7401,
1154
+ "step": 1910
1155
+ },
1156
+ {
1157
+ "epoch": 0.66,
1158
+ "learning_rate": 6.740331491712708e-06,
1159
+ "loss": 1.8057,
1160
+ "step": 1920
1161
+ },
1162
+ {
1163
+ "epoch": 0.67,
1164
+ "learning_rate": 6.6712707182320445e-06,
1165
+ "loss": 1.7072,
1166
+ "step": 1930
1167
+ },
1168
+ {
1169
+ "epoch": 0.67,
1170
+ "learning_rate": 6.602209944751382e-06,
1171
+ "loss": 1.7182,
1172
+ "step": 1940
1173
+ },
1174
+ {
1175
+ "epoch": 0.67,
1176
+ "learning_rate": 6.5331491712707184e-06,
1177
+ "loss": 1.7331,
1178
+ "step": 1950
1179
+ },
1180
+ {
1181
+ "epoch": 0.68,
1182
+ "learning_rate": 6.464088397790056e-06,
1183
+ "loss": 1.871,
1184
+ "step": 1960
1185
+ },
1186
+ {
1187
+ "epoch": 0.68,
1188
+ "learning_rate": 6.395027624309392e-06,
1189
+ "loss": 1.7339,
1190
+ "step": 1970
1191
+ },
1192
+ {
1193
+ "epoch": 0.68,
1194
+ "learning_rate": 6.32596685082873e-06,
1195
+ "loss": 1.7293,
1196
+ "step": 1980
1197
+ },
1198
+ {
1199
+ "epoch": 0.69,
1200
+ "learning_rate": 6.256906077348067e-06,
1201
+ "loss": 1.8179,
1202
+ "step": 1990
1203
+ },
1204
+ {
1205
+ "epoch": 0.69,
1206
+ "learning_rate": 6.187845303867403e-06,
1207
+ "loss": 1.7189,
1208
+ "step": 2000
1209
+ },
1210
+ {
1211
+ "epoch": 0.69,
1212
+ "learning_rate": 6.118784530386741e-06,
1213
+ "loss": 1.6855,
1214
+ "step": 2010
1215
+ },
1216
+ {
1217
+ "epoch": 0.7,
1218
+ "learning_rate": 6.049723756906077e-06,
1219
+ "loss": 1.8562,
1220
+ "step": 2020
1221
+ },
1222
+ {
1223
+ "epoch": 0.7,
1224
+ "learning_rate": 5.980662983425415e-06,
1225
+ "loss": 1.7805,
1226
+ "step": 2030
1227
+ },
1228
+ {
1229
+ "epoch": 0.7,
1230
+ "learning_rate": 5.911602209944752e-06,
1231
+ "loss": 1.7376,
1232
+ "step": 2040
1233
+ },
1234
+ {
1235
+ "epoch": 0.71,
1236
+ "learning_rate": 5.842541436464089e-06,
1237
+ "loss": 1.7232,
1238
+ "step": 2050
1239
+ },
1240
+ {
1241
+ "epoch": 0.71,
1242
+ "learning_rate": 5.773480662983426e-06,
1243
+ "loss": 1.7372,
1244
+ "step": 2060
1245
+ },
1246
+ {
1247
+ "epoch": 0.71,
1248
+ "learning_rate": 5.704419889502763e-06,
1249
+ "loss": 1.6427,
1250
+ "step": 2070
1251
+ },
1252
+ {
1253
+ "epoch": 0.72,
1254
+ "learning_rate": 5.6353591160221e-06,
1255
+ "loss": 1.6946,
1256
+ "step": 2080
1257
+ },
1258
+ {
1259
+ "epoch": 0.72,
1260
+ "learning_rate": 5.5662983425414375e-06,
1261
+ "loss": 1.7831,
1262
+ "step": 2090
1263
+ },
1264
+ {
1265
+ "epoch": 0.73,
1266
+ "learning_rate": 5.497237569060774e-06,
1267
+ "loss": 1.8121,
1268
+ "step": 2100
1269
+ },
1270
+ {
1271
+ "epoch": 0.73,
1272
+ "learning_rate": 5.4281767955801114e-06,
1273
+ "loss": 1.7363,
1274
+ "step": 2110
1275
+ },
1276
+ {
1277
+ "epoch": 0.73,
1278
+ "learning_rate": 5.3591160220994476e-06,
1279
+ "loss": 1.7434,
1280
+ "step": 2120
1281
+ },
1282
+ {
1283
+ "epoch": 0.74,
1284
+ "learning_rate": 5.290055248618785e-06,
1285
+ "loss": 1.8479,
1286
+ "step": 2130
1287
+ },
1288
+ {
1289
+ "epoch": 0.74,
1290
+ "learning_rate": 5.220994475138122e-06,
1291
+ "loss": 1.7347,
1292
+ "step": 2140
1293
+ },
1294
+ {
1295
+ "epoch": 0.74,
1296
+ "learning_rate": 5.151933701657459e-06,
1297
+ "loss": 1.785,
1298
+ "step": 2150
1299
+ },
1300
+ {
1301
+ "epoch": 0.75,
1302
+ "learning_rate": 5.082872928176796e-06,
1303
+ "loss": 1.8255,
1304
+ "step": 2160
1305
+ },
1306
+ {
1307
+ "epoch": 0.75,
1308
+ "learning_rate": 5.013812154696132e-06,
1309
+ "loss": 1.7684,
1310
+ "step": 2170
1311
+ },
1312
+ {
1313
+ "epoch": 0.75,
1314
+ "learning_rate": 4.94475138121547e-06,
1315
+ "loss": 1.7494,
1316
+ "step": 2180
1317
+ },
1318
+ {
1319
+ "epoch": 0.76,
1320
+ "learning_rate": 4.875690607734807e-06,
1321
+ "loss": 1.6692,
1322
+ "step": 2190
1323
+ },
1324
+ {
1325
+ "epoch": 0.76,
1326
+ "learning_rate": 4.806629834254144e-06,
1327
+ "loss": 1.7653,
1328
+ "step": 2200
1329
+ },
1330
+ {
1331
+ "epoch": 0.76,
1332
+ "learning_rate": 4.737569060773481e-06,
1333
+ "loss": 1.6953,
1334
+ "step": 2210
1335
+ },
1336
+ {
1337
+ "epoch": 0.77,
1338
+ "learning_rate": 4.668508287292818e-06,
1339
+ "loss": 1.8247,
1340
+ "step": 2220
1341
+ },
1342
+ {
1343
+ "epoch": 0.77,
1344
+ "learning_rate": 4.599447513812155e-06,
1345
+ "loss": 1.8188,
1346
+ "step": 2230
1347
+ },
1348
+ {
1349
+ "epoch": 0.77,
1350
+ "learning_rate": 4.530386740331492e-06,
1351
+ "loss": 1.7473,
1352
+ "step": 2240
1353
+ },
1354
+ {
1355
+ "epoch": 0.78,
1356
+ "learning_rate": 4.461325966850829e-06,
1357
+ "loss": 1.6097,
1358
+ "step": 2250
1359
+ },
1360
+ {
1361
+ "epoch": 0.78,
1362
+ "learning_rate": 4.392265193370166e-06,
1363
+ "loss": 1.7937,
1364
+ "step": 2260
1365
+ },
1366
+ {
1367
+ "epoch": 0.78,
1368
+ "learning_rate": 4.323204419889503e-06,
1369
+ "loss": 1.7121,
1370
+ "step": 2270
1371
+ },
1372
+ {
1373
+ "epoch": 0.79,
1374
+ "learning_rate": 4.2541436464088406e-06,
1375
+ "loss": 1.8483,
1376
+ "step": 2280
1377
+ },
1378
+ {
1379
+ "epoch": 0.79,
1380
+ "learning_rate": 4.1850828729281775e-06,
1381
+ "loss": 1.7838,
1382
+ "step": 2290
1383
+ },
1384
+ {
1385
+ "epoch": 0.79,
1386
+ "learning_rate": 4.1160220994475145e-06,
1387
+ "loss": 1.6945,
1388
+ "step": 2300
1389
+ },
1390
+ {
1391
+ "epoch": 0.8,
1392
+ "learning_rate": 4.0469613259668514e-06,
1393
+ "loss": 1.7295,
1394
+ "step": 2310
1395
+ },
1396
+ {
1397
+ "epoch": 0.8,
1398
+ "learning_rate": 3.977900552486188e-06,
1399
+ "loss": 1.7116,
1400
+ "step": 2320
1401
+ },
1402
+ {
1403
+ "epoch": 0.8,
1404
+ "learning_rate": 3.9088397790055245e-06,
1405
+ "loss": 1.8388,
1406
+ "step": 2330
1407
+ },
1408
+ {
1409
+ "epoch": 0.81,
1410
+ "learning_rate": 3.839779005524862e-06,
1411
+ "loss": 1.8654,
1412
+ "step": 2340
1413
+ },
1414
+ {
1415
+ "epoch": 0.81,
1416
+ "learning_rate": 3.7707182320441993e-06,
1417
+ "loss": 1.7835,
1418
+ "step": 2350
1419
+ },
1420
+ {
1421
+ "epoch": 0.81,
1422
+ "learning_rate": 3.7016574585635362e-06,
1423
+ "loss": 1.6937,
1424
+ "step": 2360
1425
+ },
1426
+ {
1427
+ "epoch": 0.82,
1428
+ "learning_rate": 3.632596685082873e-06,
1429
+ "loss": 1.6986,
1430
+ "step": 2370
1431
+ },
1432
+ {
1433
+ "epoch": 0.82,
1434
+ "learning_rate": 3.56353591160221e-06,
1435
+ "loss": 1.7051,
1436
+ "step": 2380
1437
+ },
1438
+ {
1439
+ "epoch": 0.83,
1440
+ "learning_rate": 3.4944751381215475e-06,
1441
+ "loss": 1.7332,
1442
+ "step": 2390
1443
+ },
1444
+ {
1445
+ "epoch": 0.83,
1446
+ "learning_rate": 3.4254143646408845e-06,
1447
+ "loss": 1.6828,
1448
+ "step": 2400
1449
+ },
1450
+ {
1451
+ "epoch": 0.83,
1452
+ "learning_rate": 3.3563535911602214e-06,
1453
+ "loss": 1.818,
1454
+ "step": 2410
1455
+ },
1456
+ {
1457
+ "epoch": 0.84,
1458
+ "learning_rate": 3.2872928176795584e-06,
1459
+ "loss": 1.7628,
1460
+ "step": 2420
1461
+ },
1462
+ {
1463
+ "epoch": 0.84,
1464
+ "learning_rate": 3.218232044198895e-06,
1465
+ "loss": 1.7445,
1466
+ "step": 2430
1467
+ },
1468
+ {
1469
+ "epoch": 0.84,
1470
+ "learning_rate": 3.149171270718232e-06,
1471
+ "loss": 1.8389,
1472
+ "step": 2440
1473
+ },
1474
+ {
1475
+ "epoch": 0.85,
1476
+ "learning_rate": 3.0801104972375697e-06,
1477
+ "loss": 1.7749,
1478
+ "step": 2450
1479
+ },
1480
+ {
1481
+ "epoch": 0.85,
1482
+ "learning_rate": 3.0110497237569062e-06,
1483
+ "loss": 1.8421,
1484
+ "step": 2460
1485
+ },
1486
+ {
1487
+ "epoch": 0.85,
1488
+ "learning_rate": 2.941988950276243e-06,
1489
+ "loss": 1.7727,
1490
+ "step": 2470
1491
+ },
1492
+ {
1493
+ "epoch": 0.86,
1494
+ "learning_rate": 2.87292817679558e-06,
1495
+ "loss": 1.754,
1496
+ "step": 2480
1497
+ },
1498
+ {
1499
+ "epoch": 0.86,
1500
+ "learning_rate": 2.803867403314917e-06,
1501
+ "loss": 1.6552,
1502
+ "step": 2490
1503
+ },
1504
+ {
1505
+ "epoch": 0.86,
1506
+ "learning_rate": 2.7348066298342545e-06,
1507
+ "loss": 1.7354,
1508
+ "step": 2500
1509
+ },
1510
+ {
1511
+ "epoch": 0.87,
1512
+ "learning_rate": 2.6657458563535914e-06,
1513
+ "loss": 1.7694,
1514
+ "step": 2510
1515
+ },
1516
+ {
1517
+ "epoch": 0.87,
1518
+ "learning_rate": 2.5966850828729284e-06,
1519
+ "loss": 1.7042,
1520
+ "step": 2520
1521
+ },
1522
+ {
1523
+ "epoch": 0.87,
1524
+ "learning_rate": 2.5276243093922653e-06,
1525
+ "loss": 1.6969,
1526
+ "step": 2530
1527
+ },
1528
+ {
1529
+ "epoch": 0.88,
1530
+ "learning_rate": 2.4585635359116027e-06,
1531
+ "loss": 1.7404,
1532
+ "step": 2540
1533
+ },
1534
+ {
1535
+ "epoch": 0.88,
1536
+ "learning_rate": 2.3895027624309393e-06,
1537
+ "loss": 1.7236,
1538
+ "step": 2550
1539
+ },
1540
+ {
1541
+ "epoch": 0.88,
1542
+ "learning_rate": 2.320441988950276e-06,
1543
+ "loss": 1.8104,
1544
+ "step": 2560
1545
+ },
1546
+ {
1547
+ "epoch": 0.89,
1548
+ "learning_rate": 2.2513812154696136e-06,
1549
+ "loss": 1.7518,
1550
+ "step": 2570
1551
+ },
1552
+ {
1553
+ "epoch": 0.89,
1554
+ "learning_rate": 2.1823204419889505e-06,
1555
+ "loss": 1.7097,
1556
+ "step": 2580
1557
+ },
1558
+ {
1559
+ "epoch": 0.89,
1560
+ "learning_rate": 2.1132596685082875e-06,
1561
+ "loss": 1.7118,
1562
+ "step": 2590
1563
+ },
1564
+ {
1565
+ "epoch": 0.9,
1566
+ "learning_rate": 2.0441988950276245e-06,
1567
+ "loss": 1.7996,
1568
+ "step": 2600
1569
+ },
1570
+ {
1571
+ "epoch": 0.9,
1572
+ "learning_rate": 1.9751381215469614e-06,
1573
+ "loss": 1.6647,
1574
+ "step": 2610
1575
+ },
1576
+ {
1577
+ "epoch": 0.9,
1578
+ "learning_rate": 1.9060773480662986e-06,
1579
+ "loss": 1.6974,
1580
+ "step": 2620
1581
+ },
1582
+ {
1583
+ "epoch": 0.91,
1584
+ "learning_rate": 1.8370165745856355e-06,
1585
+ "loss": 1.6929,
1586
+ "step": 2630
1587
+ },
1588
+ {
1589
+ "epoch": 0.91,
1590
+ "learning_rate": 1.7679558011049725e-06,
1591
+ "loss": 1.7994,
1592
+ "step": 2640
1593
+ },
1594
+ {
1595
+ "epoch": 0.91,
1596
+ "learning_rate": 1.6988950276243097e-06,
1597
+ "loss": 1.7755,
1598
+ "step": 2650
1599
+ },
1600
+ {
1601
+ "epoch": 0.92,
1602
+ "learning_rate": 1.6298342541436466e-06,
1603
+ "loss": 1.8172,
1604
+ "step": 2660
1605
+ },
1606
+ {
1607
+ "epoch": 0.92,
1608
+ "learning_rate": 1.5607734806629834e-06,
1609
+ "loss": 1.8241,
1610
+ "step": 2670
1611
+ },
1612
+ {
1613
+ "epoch": 0.93,
1614
+ "learning_rate": 1.4917127071823205e-06,
1615
+ "loss": 1.7558,
1616
+ "step": 2680
1617
+ },
1618
+ {
1619
+ "epoch": 0.93,
1620
+ "learning_rate": 1.4226519337016575e-06,
1621
+ "loss": 1.7474,
1622
+ "step": 2690
1623
+ },
1624
+ {
1625
+ "epoch": 0.93,
1626
+ "learning_rate": 1.3535911602209945e-06,
1627
+ "loss": 1.8138,
1628
+ "step": 2700
1629
+ },
1630
+ {
1631
+ "epoch": 0.94,
1632
+ "learning_rate": 1.2845303867403316e-06,
1633
+ "loss": 1.7158,
1634
+ "step": 2710
1635
+ },
1636
+ {
1637
+ "epoch": 0.94,
1638
+ "learning_rate": 1.2154696132596686e-06,
1639
+ "loss": 1.8223,
1640
+ "step": 2720
1641
+ },
1642
+ {
1643
+ "epoch": 0.94,
1644
+ "learning_rate": 1.1464088397790055e-06,
1645
+ "loss": 1.8411,
1646
+ "step": 2730
1647
+ },
1648
+ {
1649
+ "epoch": 0.95,
1650
+ "learning_rate": 1.0773480662983427e-06,
1651
+ "loss": 1.8054,
1652
+ "step": 2740
1653
+ },
1654
+ {
1655
+ "epoch": 0.95,
1656
+ "learning_rate": 1.0082872928176797e-06,
1657
+ "loss": 1.7401,
1658
+ "step": 2750
1659
+ },
1660
+ {
1661
+ "epoch": 0.95,
1662
+ "learning_rate": 9.392265193370166e-07,
1663
+ "loss": 1.6664,
1664
+ "step": 2760
1665
+ },
1666
+ {
1667
+ "epoch": 0.96,
1668
+ "learning_rate": 8.701657458563537e-07,
1669
+ "loss": 1.7788,
1670
+ "step": 2770
1671
+ },
1672
+ {
1673
+ "epoch": 0.96,
1674
+ "learning_rate": 8.011049723756907e-07,
1675
+ "loss": 1.7135,
1676
+ "step": 2780
1677
+ },
1678
+ {
1679
+ "epoch": 0.96,
1680
+ "learning_rate": 7.320441988950276e-07,
1681
+ "loss": 1.8247,
1682
+ "step": 2790
1683
+ },
1684
+ {
1685
+ "epoch": 0.97,
1686
+ "learning_rate": 6.629834254143647e-07,
1687
+ "loss": 1.7436,
1688
+ "step": 2800
1689
+ },
1690
+ {
1691
+ "epoch": 0.97,
1692
+ "learning_rate": 5.939226519337017e-07,
1693
+ "loss": 1.792,
1694
+ "step": 2810
1695
+ },
1696
+ {
1697
+ "epoch": 0.97,
1698
+ "learning_rate": 5.248618784530387e-07,
1699
+ "loss": 1.7798,
1700
+ "step": 2820
1701
+ },
1702
+ {
1703
+ "epoch": 0.98,
1704
+ "learning_rate": 4.5580110497237574e-07,
1705
+ "loss": 1.7821,
1706
+ "step": 2830
1707
+ },
1708
+ {
1709
+ "epoch": 0.98,
1710
+ "learning_rate": 3.867403314917127e-07,
1711
+ "loss": 1.7994,
1712
+ "step": 2840
1713
+ },
1714
+ {
1715
+ "epoch": 0.98,
1716
+ "learning_rate": 3.1767955801104976e-07,
1717
+ "loss": 1.8054,
1718
+ "step": 2850
1719
+ },
1720
+ {
1721
+ "epoch": 0.99,
1722
+ "learning_rate": 2.486187845303868e-07,
1723
+ "loss": 1.7472,
1724
+ "step": 2860
1725
+ },
1726
+ {
1727
+ "epoch": 0.99,
1728
+ "learning_rate": 1.7955801104972376e-07,
1729
+ "loss": 1.7099,
1730
+ "step": 2870
1731
+ },
1732
+ {
1733
+ "epoch": 0.99,
1734
+ "learning_rate": 1.1049723756906078e-07,
1735
+ "loss": 1.7215,
1736
+ "step": 2880
1737
+ },
1738
+ {
1739
+ "epoch": 1.0,
1740
+ "learning_rate": 4.143646408839779e-08,
1741
+ "loss": 1.7854,
1742
+ "step": 2890
1743
+ },
1744
+ {
1745
+ "epoch": 1.0,
1746
+ "step": 2896,
1747
+ "total_flos": 1.202498608338174e+18,
1748
+ "train_loss": 1.7728749915381163,
1749
+ "train_runtime": 8665.6227,
1750
+ "train_samples_per_second": 10.696,
1751
+ "train_steps_per_second": 0.334
1752
+ }
1753
+ ],
1754
+ "max_steps": 2896,
1755
+ "num_train_epochs": 1,
1756
+ "total_flos": 1.202498608338174e+18,
1757
+ "trial_name": null,
1758
+ "trial_params": null
1759
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97e00138104d4e085655a90d5111ad5f351141ea9ed78cbec25b5a9ec9760fec
3
+ size 4027