jondurbin commited on
Commit
1da1f4a
1 Parent(s): 62d5c4e

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/workspace/llama-7b-hf",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "bos_token_id": 0,
7
+ "eos_token_id": 1,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 4096,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 11008,
12
+ "max_position_embeddings": 2048,
13
+ "max_seq_len": 4096,
14
+ "max_sequence_length": 2048,
15
+ "model_type": "llama",
16
+ "num_attention_heads": 32,
17
+ "num_hidden_layers": 32,
18
+ "pad_token_id": -1,
19
+ "rms_norm_eps": 1e-06,
20
+ "tie_word_embeddings": false,
21
+ "torch_dtype": "float32",
22
+ "transformers_version": "4.29.2",
23
+ "use_cache": false,
24
+ "vocab_size": 32000
25
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 1,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.29.2"
7
+ }
pytorch_model-00001-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa4f3a56efca67a31e83a3306b0047391994df3a1802fd12a14cca9f17b5fae5
3
+ size 9877989586
pytorch_model-00002-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62ef6cdd2c2b29cec07abfede67890ac1b35a15ab7a8e6d4a18ddc0050e749c6
3
+ size 9894801014
pytorch_model-00003-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e91a490d897baf14e60123f390d1af71d91e4d512fdaa223cc4d85bba5e5386c
3
+ size 7180990649
pytorch_model.bin.index.json ADDED
@@ -0,0 +1,330 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 26953670656
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "pytorch_model-00003-of-00003.bin",
7
+ "model.embed_tokens.weight": "pytorch_model-00001-of-00003.bin",
8
+ "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
9
+ "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
10
+ "model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
11
+ "model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
12
+ "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
13
+ "model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
14
+ "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
15
+ "model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
16
+ "model.layers.0.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
17
+ "model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
18
+ "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
19
+ "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
20
+ "model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
21
+ "model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
22
+ "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
23
+ "model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
24
+ "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
25
+ "model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
26
+ "model.layers.1.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
27
+ "model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
28
+ "model.layers.10.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
29
+ "model.layers.10.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
30
+ "model.layers.10.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
31
+ "model.layers.10.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
32
+ "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
33
+ "model.layers.10.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
34
+ "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
35
+ "model.layers.10.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
36
+ "model.layers.10.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
37
+ "model.layers.10.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
38
+ "model.layers.11.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
39
+ "model.layers.11.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
40
+ "model.layers.11.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
41
+ "model.layers.11.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
42
+ "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
43
+ "model.layers.11.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
44
+ "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
45
+ "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
46
+ "model.layers.11.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
47
+ "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
48
+ "model.layers.12.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
49
+ "model.layers.12.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
50
+ "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
51
+ "model.layers.12.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
52
+ "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
53
+ "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
54
+ "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
55
+ "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
56
+ "model.layers.12.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
57
+ "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
58
+ "model.layers.13.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
59
+ "model.layers.13.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
60
+ "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
61
+ "model.layers.13.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
62
+ "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
63
+ "model.layers.13.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
64
+ "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
65
+ "model.layers.13.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
66
+ "model.layers.13.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
67
+ "model.layers.13.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
68
+ "model.layers.14.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
69
+ "model.layers.14.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
70
+ "model.layers.14.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
71
+ "model.layers.14.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
72
+ "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
73
+ "model.layers.14.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
74
+ "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
75
+ "model.layers.14.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
76
+ "model.layers.14.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
77
+ "model.layers.14.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
78
+ "model.layers.15.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
79
+ "model.layers.15.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
80
+ "model.layers.15.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
81
+ "model.layers.15.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
82
+ "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
83
+ "model.layers.15.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
84
+ "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
85
+ "model.layers.15.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
86
+ "model.layers.15.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
87
+ "model.layers.15.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
88
+ "model.layers.16.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
89
+ "model.layers.16.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
90
+ "model.layers.16.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
91
+ "model.layers.16.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
92
+ "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
93
+ "model.layers.16.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
94
+ "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
95
+ "model.layers.16.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
96
+ "model.layers.16.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
97
+ "model.layers.16.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
98
+ "model.layers.17.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
99
+ "model.layers.17.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
100
+ "model.layers.17.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
101
+ "model.layers.17.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
102
+ "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
103
+ "model.layers.17.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
104
+ "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
105
+ "model.layers.17.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
106
+ "model.layers.17.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
107
+ "model.layers.17.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
108
+ "model.layers.18.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
109
+ "model.layers.18.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
110
+ "model.layers.18.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
111
+ "model.layers.18.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
112
+ "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
113
+ "model.layers.18.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
114
+ "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
115
+ "model.layers.18.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
116
+ "model.layers.18.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
117
+ "model.layers.18.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
118
+ "model.layers.19.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
119
+ "model.layers.19.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
120
+ "model.layers.19.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
121
+ "model.layers.19.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
122
+ "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
123
+ "model.layers.19.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
124
+ "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
125
+ "model.layers.19.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
126
+ "model.layers.19.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
127
+ "model.layers.19.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
128
+ "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
129
+ "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
130
+ "model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
131
+ "model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
132
+ "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
133
+ "model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
134
+ "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
135
+ "model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
136
+ "model.layers.2.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
137
+ "model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
138
+ "model.layers.20.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
139
+ "model.layers.20.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
140
+ "model.layers.20.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
141
+ "model.layers.20.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
142
+ "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
143
+ "model.layers.20.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
144
+ "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
145
+ "model.layers.20.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
146
+ "model.layers.20.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
147
+ "model.layers.20.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
148
+ "model.layers.21.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
149
+ "model.layers.21.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
150
+ "model.layers.21.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
151
+ "model.layers.21.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
152
+ "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
153
+ "model.layers.21.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
154
+ "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
155
+ "model.layers.21.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
156
+ "model.layers.21.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
157
+ "model.layers.21.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
158
+ "model.layers.22.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
159
+ "model.layers.22.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
160
+ "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
161
+ "model.layers.22.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
162
+ "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
163
+ "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
164
+ "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
165
+ "model.layers.22.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
166
+ "model.layers.22.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
167
+ "model.layers.22.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
168
+ "model.layers.23.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
169
+ "model.layers.23.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
170
+ "model.layers.23.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
171
+ "model.layers.23.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
172
+ "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
173
+ "model.layers.23.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
174
+ "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
175
+ "model.layers.23.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
176
+ "model.layers.23.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
177
+ "model.layers.23.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
178
+ "model.layers.24.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
179
+ "model.layers.24.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
180
+ "model.layers.24.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
181
+ "model.layers.24.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
182
+ "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
183
+ "model.layers.24.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
184
+ "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
185
+ "model.layers.24.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
186
+ "model.layers.24.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
187
+ "model.layers.24.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
188
+ "model.layers.25.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
189
+ "model.layers.25.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
190
+ "model.layers.25.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
191
+ "model.layers.25.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
192
+ "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
193
+ "model.layers.25.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
194
+ "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
195
+ "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
196
+ "model.layers.25.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
197
+ "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
198
+ "model.layers.26.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
199
+ "model.layers.26.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
200
+ "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
201
+ "model.layers.26.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
202
+ "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
203
+ "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
204
+ "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
205
+ "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
206
+ "model.layers.26.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
207
+ "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
208
+ "model.layers.27.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
209
+ "model.layers.27.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
210
+ "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
211
+ "model.layers.27.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
212
+ "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
213
+ "model.layers.27.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
214
+ "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
215
+ "model.layers.27.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
216
+ "model.layers.27.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
217
+ "model.layers.27.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
218
+ "model.layers.28.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
219
+ "model.layers.28.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
220
+ "model.layers.28.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
221
+ "model.layers.28.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
222
+ "model.layers.28.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
223
+ "model.layers.28.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
224
+ "model.layers.28.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
225
+ "model.layers.28.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
226
+ "model.layers.28.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
227
+ "model.layers.28.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
228
+ "model.layers.29.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
229
+ "model.layers.29.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
230
+ "model.layers.29.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
231
+ "model.layers.29.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
232
+ "model.layers.29.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
233
+ "model.layers.29.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
234
+ "model.layers.29.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
235
+ "model.layers.29.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
236
+ "model.layers.29.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
237
+ "model.layers.29.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
238
+ "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
239
+ "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
240
+ "model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
241
+ "model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
242
+ "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
243
+ "model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
244
+ "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
245
+ "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
246
+ "model.layers.3.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
247
+ "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
248
+ "model.layers.30.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
249
+ "model.layers.30.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
250
+ "model.layers.30.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
251
+ "model.layers.30.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
252
+ "model.layers.30.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
253
+ "model.layers.30.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
254
+ "model.layers.30.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
255
+ "model.layers.30.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
256
+ "model.layers.30.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
257
+ "model.layers.30.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
258
+ "model.layers.31.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
259
+ "model.layers.31.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
260
+ "model.layers.31.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
261
+ "model.layers.31.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
262
+ "model.layers.31.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
263
+ "model.layers.31.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
264
+ "model.layers.31.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
265
+ "model.layers.31.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
266
+ "model.layers.31.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
267
+ "model.layers.31.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
268
+ "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
269
+ "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
270
+ "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
271
+ "model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
272
+ "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
273
+ "model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
274
+ "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
275
+ "model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
276
+ "model.layers.4.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
277
+ "model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
278
+ "model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
279
+ "model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
280
+ "model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
281
+ "model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
282
+ "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
283
+ "model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
284
+ "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
285
+ "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
286
+ "model.layers.5.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
287
+ "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
288
+ "model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
289
+ "model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
290
+ "model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
291
+ "model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
292
+ "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
293
+ "model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
294
+ "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
295
+ "model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
296
+ "model.layers.6.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
297
+ "model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
298
+ "model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
299
+ "model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
300
+ "model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
301
+ "model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
302
+ "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
303
+ "model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
304
+ "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
305
+ "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
306
+ "model.layers.7.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
307
+ "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
308
+ "model.layers.8.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
309
+ "model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
310
+ "model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
311
+ "model.layers.8.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
312
+ "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
313
+ "model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
314
+ "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
315
+ "model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
316
+ "model.layers.8.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
317
+ "model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
318
+ "model.layers.9.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
319
+ "model.layers.9.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
320
+ "model.layers.9.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
321
+ "model.layers.9.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
322
+ "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
323
+ "model.layers.9.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
324
+ "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
325
+ "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
326
+ "model.layers.9.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
327
+ "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
328
+ "model.norm.weight": "pytorch_model-00003-of-00003.bin"
329
+ }
330
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<unk>",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": true,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
tokenizer_config.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "bos_token": {
5
+ "__type": "AddedToken",
6
+ "content": "<s>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "clean_up_tokenization_spaces": false,
13
+ "eos_token": {
14
+ "__type": "AddedToken",
15
+ "content": "</s>",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false
20
+ },
21
+ "model_max_length": 4096,
22
+ "pad_token": null,
23
+ "padding_side": "right",
24
+ "sp_model_kwargs": {},
25
+ "tokenizer_class": "LlamaTokenizer",
26
+ "unk_token": {
27
+ "__type": "AddedToken",
28
+ "content": "<unk>",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
trainer_state.json ADDED
@@ -0,0 +1,1807 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 3.0,
5
+ "global_step": 297,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.01,
12
+ "learning_rate": 1.6666666666666667e-06,
13
+ "loss": 0.6895,
14
+ "step": 1
15
+ },
16
+ {
17
+ "epoch": 0.02,
18
+ "learning_rate": 3.3333333333333333e-06,
19
+ "loss": 0.7129,
20
+ "step": 2
21
+ },
22
+ {
23
+ "epoch": 0.03,
24
+ "learning_rate": 5e-06,
25
+ "loss": 0.7467,
26
+ "step": 3
27
+ },
28
+ {
29
+ "epoch": 0.04,
30
+ "learning_rate": 6.666666666666667e-06,
31
+ "loss": 0.6553,
32
+ "step": 4
33
+ },
34
+ {
35
+ "epoch": 0.05,
36
+ "learning_rate": 8.333333333333334e-06,
37
+ "loss": 0.6908,
38
+ "step": 5
39
+ },
40
+ {
41
+ "epoch": 0.06,
42
+ "learning_rate": 1e-05,
43
+ "loss": 0.6292,
44
+ "step": 6
45
+ },
46
+ {
47
+ "epoch": 0.07,
48
+ "learning_rate": 1.1666666666666668e-05,
49
+ "loss": 0.648,
50
+ "step": 7
51
+ },
52
+ {
53
+ "epoch": 0.08,
54
+ "learning_rate": 1.3333333333333333e-05,
55
+ "loss": 0.6291,
56
+ "step": 8
57
+ },
58
+ {
59
+ "epoch": 0.09,
60
+ "learning_rate": 1.5000000000000002e-05,
61
+ "loss": 0.6205,
62
+ "step": 9
63
+ },
64
+ {
65
+ "epoch": 0.1,
66
+ "learning_rate": 1.6666666666666667e-05,
67
+ "loss": 0.5143,
68
+ "step": 10
69
+ },
70
+ {
71
+ "epoch": 0.11,
72
+ "learning_rate": 1.8333333333333333e-05,
73
+ "loss": 0.5343,
74
+ "step": 11
75
+ },
76
+ {
77
+ "epoch": 0.12,
78
+ "learning_rate": 2e-05,
79
+ "loss": 0.6127,
80
+ "step": 12
81
+ },
82
+ {
83
+ "epoch": 0.13,
84
+ "learning_rate": 1.9999392458943432e-05,
85
+ "loss": 0.5313,
86
+ "step": 13
87
+ },
88
+ {
89
+ "epoch": 0.14,
90
+ "learning_rate": 1.9997569909594948e-05,
91
+ "loss": 0.486,
92
+ "step": 14
93
+ },
94
+ {
95
+ "epoch": 0.15,
96
+ "learning_rate": 1.999453257340926e-05,
97
+ "loss": 0.5457,
98
+ "step": 15
99
+ },
100
+ {
101
+ "epoch": 0.16,
102
+ "learning_rate": 1.9990280819447662e-05,
103
+ "loss": 0.5688,
104
+ "step": 16
105
+ },
106
+ {
107
+ "epoch": 0.17,
108
+ "learning_rate": 1.9984815164333163e-05,
109
+ "loss": 0.5523,
110
+ "step": 17
111
+ },
112
+ {
113
+ "epoch": 0.18,
114
+ "learning_rate": 1.9978136272187745e-05,
115
+ "loss": 0.5291,
116
+ "step": 18
117
+ },
118
+ {
119
+ "epoch": 0.19,
120
+ "learning_rate": 1.9970244954551648e-05,
121
+ "loss": 0.4995,
122
+ "step": 19
123
+ },
124
+ {
125
+ "epoch": 0.2,
126
+ "learning_rate": 1.9961142170284762e-05,
127
+ "loss": 0.4819,
128
+ "step": 20
129
+ },
130
+ {
131
+ "epoch": 0.21,
132
+ "learning_rate": 1.9950829025450116e-05,
133
+ "loss": 0.485,
134
+ "step": 21
135
+ },
136
+ {
137
+ "epoch": 0.22,
138
+ "learning_rate": 1.9939306773179498e-05,
139
+ "loss": 0.5592,
140
+ "step": 22
141
+ },
142
+ {
143
+ "epoch": 0.23,
144
+ "learning_rate": 1.9926576813521167e-05,
145
+ "loss": 0.5018,
146
+ "step": 23
147
+ },
148
+ {
149
+ "epoch": 0.24,
150
+ "learning_rate": 1.9912640693269754e-05,
151
+ "loss": 0.5148,
152
+ "step": 24
153
+ },
154
+ {
155
+ "epoch": 0.25,
156
+ "learning_rate": 1.98975001057783e-05,
157
+ "loss": 0.4703,
158
+ "step": 25
159
+ },
160
+ {
161
+ "epoch": 0.26,
162
+ "learning_rate": 1.9881156890752517e-05,
163
+ "loss": 0.46,
164
+ "step": 26
165
+ },
166
+ {
167
+ "epoch": 0.27,
168
+ "learning_rate": 1.9863613034027224e-05,
169
+ "loss": 0.4772,
170
+ "step": 27
171
+ },
172
+ {
173
+ "epoch": 0.28,
174
+ "learning_rate": 1.9844870667325073e-05,
175
+ "loss": 0.4644,
176
+ "step": 28
177
+ },
178
+ {
179
+ "epoch": 0.29,
180
+ "learning_rate": 1.9824932067997516e-05,
181
+ "loss": 0.43,
182
+ "step": 29
183
+ },
184
+ {
185
+ "epoch": 0.3,
186
+ "learning_rate": 1.9803799658748096e-05,
187
+ "loss": 0.4792,
188
+ "step": 30
189
+ },
190
+ {
191
+ "epoch": 0.31,
192
+ "learning_rate": 1.9781476007338058e-05,
193
+ "loss": 0.4879,
194
+ "step": 31
195
+ },
196
+ {
197
+ "epoch": 0.32,
198
+ "learning_rate": 1.9757963826274357e-05,
199
+ "loss": 0.466,
200
+ "step": 32
201
+ },
202
+ {
203
+ "epoch": 0.33,
204
+ "learning_rate": 1.973326597248006e-05,
205
+ "loss": 0.4972,
206
+ "step": 33
207
+ },
208
+ {
209
+ "epoch": 0.34,
210
+ "learning_rate": 1.97073854469472e-05,
211
+ "loss": 0.4756,
212
+ "step": 34
213
+ },
214
+ {
215
+ "epoch": 0.35,
216
+ "learning_rate": 1.968032539437215e-05,
217
+ "loss": 0.5015,
218
+ "step": 35
219
+ },
220
+ {
221
+ "epoch": 0.36,
222
+ "learning_rate": 1.9652089102773487e-05,
223
+ "loss": 0.4449,
224
+ "step": 36
225
+ },
226
+ {
227
+ "epoch": 0.37,
228
+ "learning_rate": 1.9622680003092503e-05,
229
+ "loss": 0.411,
230
+ "step": 37
231
+ },
232
+ {
233
+ "epoch": 0.38,
234
+ "learning_rate": 1.95921016687763e-05,
235
+ "loss": 0.4184,
236
+ "step": 38
237
+ },
238
+ {
239
+ "epoch": 0.39,
240
+ "learning_rate": 1.9560357815343577e-05,
241
+ "loss": 0.4779,
242
+ "step": 39
243
+ },
244
+ {
245
+ "epoch": 0.4,
246
+ "learning_rate": 1.9527452299933192e-05,
247
+ "loss": 0.4778,
248
+ "step": 40
249
+ },
250
+ {
251
+ "epoch": 0.41,
252
+ "learning_rate": 1.9493389120835462e-05,
253
+ "loss": 0.3891,
254
+ "step": 41
255
+ },
256
+ {
257
+ "epoch": 0.42,
258
+ "learning_rate": 1.9458172417006347e-05,
259
+ "loss": 0.4235,
260
+ "step": 42
261
+ },
262
+ {
263
+ "epoch": 0.43,
264
+ "learning_rate": 1.9421806467564546e-05,
265
+ "loss": 0.4597,
266
+ "step": 43
267
+ },
268
+ {
269
+ "epoch": 0.44,
270
+ "learning_rate": 1.9384295691271523e-05,
271
+ "loss": 0.4506,
272
+ "step": 44
273
+ },
274
+ {
275
+ "epoch": 0.45,
276
+ "learning_rate": 1.934564464599461e-05,
277
+ "loss": 0.4283,
278
+ "step": 45
279
+ },
280
+ {
281
+ "epoch": 0.46,
282
+ "learning_rate": 1.9305858028153186e-05,
283
+ "loss": 0.414,
284
+ "step": 46
285
+ },
286
+ {
287
+ "epoch": 0.47,
288
+ "learning_rate": 1.9264940672148018e-05,
289
+ "loss": 0.3809,
290
+ "step": 47
291
+ },
292
+ {
293
+ "epoch": 0.48,
294
+ "learning_rate": 1.922289754977385e-05,
295
+ "loss": 0.4041,
296
+ "step": 48
297
+ },
298
+ {
299
+ "epoch": 0.49,
300
+ "learning_rate": 1.9179733769615273e-05,
301
+ "loss": 0.4192,
302
+ "step": 49
303
+ },
304
+ {
305
+ "epoch": 0.51,
306
+ "learning_rate": 1.913545457642601e-05,
307
+ "loss": 0.4638,
308
+ "step": 50
309
+ },
310
+ {
311
+ "epoch": 0.52,
312
+ "learning_rate": 1.909006535049163e-05,
313
+ "loss": 0.3532,
314
+ "step": 51
315
+ },
316
+ {
317
+ "epoch": 0.53,
318
+ "learning_rate": 1.9043571606975776e-05,
319
+ "loss": 0.3824,
320
+ "step": 52
321
+ },
322
+ {
323
+ "epoch": 0.54,
324
+ "learning_rate": 1.899597899525007e-05,
325
+ "loss": 0.4066,
326
+ "step": 53
327
+ },
328
+ {
329
+ "epoch": 0.55,
330
+ "learning_rate": 1.8947293298207637e-05,
331
+ "loss": 0.4117,
332
+ "step": 54
333
+ },
334
+ {
335
+ "epoch": 0.56,
336
+ "learning_rate": 1.8897520431560435e-05,
337
+ "loss": 0.4037,
338
+ "step": 55
339
+ },
340
+ {
341
+ "epoch": 0.57,
342
+ "learning_rate": 1.884666644312046e-05,
343
+ "loss": 0.4009,
344
+ "step": 56
345
+ },
346
+ {
347
+ "epoch": 0.58,
348
+ "learning_rate": 1.879473751206489e-05,
349
+ "loss": 0.3139,
350
+ "step": 57
351
+ },
352
+ {
353
+ "epoch": 0.59,
354
+ "learning_rate": 1.8741739948185256e-05,
355
+ "loss": 0.4352,
356
+ "step": 58
357
+ },
358
+ {
359
+ "epoch": 0.6,
360
+ "learning_rate": 1.8687680191120746e-05,
361
+ "loss": 0.4189,
362
+ "step": 59
363
+ },
364
+ {
365
+ "epoch": 0.61,
366
+ "learning_rate": 1.863256480957574e-05,
367
+ "loss": 0.4013,
368
+ "step": 60
369
+ },
370
+ {
371
+ "epoch": 0.62,
372
+ "learning_rate": 1.8576400500521673e-05,
373
+ "loss": 0.416,
374
+ "step": 61
375
+ },
376
+ {
377
+ "epoch": 0.63,
378
+ "learning_rate": 1.851919408838327e-05,
379
+ "loss": 0.4034,
380
+ "step": 62
381
+ },
382
+ {
383
+ "epoch": 0.64,
384
+ "learning_rate": 1.8460952524209355e-05,
385
+ "loss": 0.3081,
386
+ "step": 63
387
+ },
388
+ {
389
+ "epoch": 0.65,
390
+ "learning_rate": 1.8401682884828212e-05,
391
+ "loss": 0.4454,
392
+ "step": 64
393
+ },
394
+ {
395
+ "epoch": 0.66,
396
+ "learning_rate": 1.83413923719877e-05,
397
+ "loss": 0.4245,
398
+ "step": 65
399
+ },
400
+ {
401
+ "epoch": 0.67,
402
+ "learning_rate": 1.8280088311480203e-05,
403
+ "loss": 0.4159,
404
+ "step": 66
405
+ },
406
+ {
407
+ "epoch": 0.68,
408
+ "learning_rate": 1.821777815225245e-05,
409
+ "loss": 0.3682,
410
+ "step": 67
411
+ },
412
+ {
413
+ "epoch": 0.69,
414
+ "learning_rate": 1.8154469465500447e-05,
415
+ "loss": 0.3361,
416
+ "step": 68
417
+ },
418
+ {
419
+ "epoch": 0.7,
420
+ "learning_rate": 1.8090169943749477e-05,
421
+ "loss": 0.4249,
422
+ "step": 69
423
+ },
424
+ {
425
+ "epoch": 0.71,
426
+ "learning_rate": 1.802488739991941e-05,
427
+ "loss": 0.3483,
428
+ "step": 70
429
+ },
430
+ {
431
+ "epoch": 0.72,
432
+ "learning_rate": 1.7958629766375387e-05,
433
+ "loss": 0.3563,
434
+ "step": 71
435
+ },
436
+ {
437
+ "epoch": 0.73,
438
+ "learning_rate": 1.789140509396394e-05,
439
+ "loss": 0.3829,
440
+ "step": 72
441
+ },
442
+ {
443
+ "epoch": 0.74,
444
+ "learning_rate": 1.7823221551034766e-05,
445
+ "loss": 0.333,
446
+ "step": 73
447
+ },
448
+ {
449
+ "epoch": 0.75,
450
+ "learning_rate": 1.7754087422448217e-05,
451
+ "loss": 0.3394,
452
+ "step": 74
453
+ },
454
+ {
455
+ "epoch": 0.76,
456
+ "learning_rate": 1.7684011108568593e-05,
457
+ "loss": 0.2989,
458
+ "step": 75
459
+ },
460
+ {
461
+ "epoch": 0.77,
462
+ "learning_rate": 1.7613001124243448e-05,
463
+ "loss": 0.3513,
464
+ "step": 76
465
+ },
466
+ {
467
+ "epoch": 0.78,
468
+ "learning_rate": 1.7541066097768965e-05,
469
+ "loss": 0.4423,
470
+ "step": 77
471
+ },
472
+ {
473
+ "epoch": 0.79,
474
+ "learning_rate": 1.7468214769841542e-05,
475
+ "loss": 0.3345,
476
+ "step": 78
477
+ },
478
+ {
479
+ "epoch": 0.8,
480
+ "learning_rate": 1.7394455992495722e-05,
481
+ "loss": 0.3538,
482
+ "step": 79
483
+ },
484
+ {
485
+ "epoch": 0.81,
486
+ "learning_rate": 1.7319798728028617e-05,
487
+ "loss": 0.311,
488
+ "step": 80
489
+ },
490
+ {
491
+ "epoch": 0.82,
492
+ "learning_rate": 1.7244252047910893e-05,
493
+ "loss": 0.3137,
494
+ "step": 81
495
+ },
496
+ {
497
+ "epoch": 0.83,
498
+ "learning_rate": 1.7167825131684516e-05,
499
+ "loss": 0.2986,
500
+ "step": 82
501
+ },
502
+ {
503
+ "epoch": 0.84,
504
+ "learning_rate": 1.7090527265847375e-05,
505
+ "loss": 0.3376,
506
+ "step": 83
507
+ },
508
+ {
509
+ "epoch": 0.85,
510
+ "learning_rate": 1.7012367842724887e-05,
511
+ "loss": 0.2555,
512
+ "step": 84
513
+ },
514
+ {
515
+ "epoch": 0.86,
516
+ "learning_rate": 1.6933356359328756e-05,
517
+ "loss": 0.2784,
518
+ "step": 85
519
+ },
520
+ {
521
+ "epoch": 0.87,
522
+ "learning_rate": 1.6853502416203e-05,
523
+ "loss": 0.4175,
524
+ "step": 86
525
+ },
526
+ {
527
+ "epoch": 0.88,
528
+ "learning_rate": 1.6772815716257414e-05,
529
+ "loss": 0.402,
530
+ "step": 87
531
+ },
532
+ {
533
+ "epoch": 0.89,
534
+ "learning_rate": 1.6691306063588583e-05,
535
+ "loss": 0.3356,
536
+ "step": 88
537
+ },
538
+ {
539
+ "epoch": 0.9,
540
+ "learning_rate": 1.6608983362288612e-05,
541
+ "loss": 0.3317,
542
+ "step": 89
543
+ },
544
+ {
545
+ "epoch": 0.91,
546
+ "learning_rate": 1.6525857615241686e-05,
547
+ "loss": 0.3149,
548
+ "step": 90
549
+ },
550
+ {
551
+ "epoch": 0.92,
552
+ "learning_rate": 1.6441938922908644e-05,
553
+ "loss": 0.291,
554
+ "step": 91
555
+ },
556
+ {
557
+ "epoch": 0.93,
558
+ "learning_rate": 1.6357237482099682e-05,
559
+ "loss": 0.3862,
560
+ "step": 92
561
+ },
562
+ {
563
+ "epoch": 0.94,
564
+ "learning_rate": 1.6271763584735373e-05,
565
+ "loss": 0.3611,
566
+ "step": 93
567
+ },
568
+ {
569
+ "epoch": 0.95,
570
+ "learning_rate": 1.6185527616596096e-05,
571
+ "loss": 0.3326,
572
+ "step": 94
573
+ },
574
+ {
575
+ "epoch": 0.96,
576
+ "learning_rate": 1.609854005606009e-05,
577
+ "loss": 0.4092,
578
+ "step": 95
579
+ },
580
+ {
581
+ "epoch": 0.97,
582
+ "learning_rate": 1.6010811472830253e-05,
583
+ "loss": 0.313,
584
+ "step": 96
585
+ },
586
+ {
587
+ "epoch": 0.98,
588
+ "learning_rate": 1.5922352526649803e-05,
589
+ "loss": 0.3875,
590
+ "step": 97
591
+ },
592
+ {
593
+ "epoch": 0.99,
594
+ "learning_rate": 1.583317396600707e-05,
595
+ "loss": 0.3525,
596
+ "step": 98
597
+ },
598
+ {
599
+ "epoch": 1.0,
600
+ "learning_rate": 1.5743286626829437e-05,
601
+ "loss": 0.3531,
602
+ "step": 99
603
+ },
604
+ {
605
+ "epoch": 1.01,
606
+ "learning_rate": 1.565270143116672e-05,
607
+ "loss": 0.3013,
608
+ "step": 100
609
+ },
610
+ {
611
+ "epoch": 1.02,
612
+ "learning_rate": 1.5561429385864005e-05,
613
+ "loss": 0.3702,
614
+ "step": 101
615
+ },
616
+ {
617
+ "epoch": 1.03,
618
+ "learning_rate": 1.5469481581224274e-05,
619
+ "loss": 0.3134,
620
+ "step": 102
621
+ },
622
+ {
623
+ "epoch": 1.04,
624
+ "learning_rate": 1.5376869189660784e-05,
625
+ "loss": 0.334,
626
+ "step": 103
627
+ },
628
+ {
629
+ "epoch": 1.05,
630
+ "learning_rate": 1.528360346433959e-05,
631
+ "loss": 0.3265,
632
+ "step": 104
633
+ },
634
+ {
635
+ "epoch": 1.06,
636
+ "learning_rate": 1.5189695737812153e-05,
637
+ "loss": 0.2694,
638
+ "step": 105
639
+ },
640
+ {
641
+ "epoch": 1.07,
642
+ "learning_rate": 1.5095157420638349e-05,
643
+ "loss": 0.3163,
644
+ "step": 106
645
+ },
646
+ {
647
+ "epoch": 1.08,
648
+ "learning_rate": 1.5000000000000002e-05,
649
+ "loss": 0.273,
650
+ "step": 107
651
+ },
652
+ {
653
+ "epoch": 1.09,
654
+ "learning_rate": 1.4904235038305084e-05,
655
+ "loss": 0.2774,
656
+ "step": 108
657
+ },
658
+ {
659
+ "epoch": 1.1,
660
+ "learning_rate": 1.4807874171782795e-05,
661
+ "loss": 0.3119,
662
+ "step": 109
663
+ },
664
+ {
665
+ "epoch": 1.11,
666
+ "learning_rate": 1.4710929109069674e-05,
667
+ "loss": 0.3072,
668
+ "step": 110
669
+ },
670
+ {
671
+ "epoch": 1.12,
672
+ "learning_rate": 1.461341162978688e-05,
673
+ "loss": 0.2993,
674
+ "step": 111
675
+ },
676
+ {
677
+ "epoch": 1.13,
678
+ "learning_rate": 1.4515333583108896e-05,
679
+ "loss": 0.2969,
680
+ "step": 112
681
+ },
682
+ {
683
+ "epoch": 1.14,
684
+ "learning_rate": 1.4416706886323741e-05,
685
+ "loss": 0.2728,
686
+ "step": 113
687
+ },
688
+ {
689
+ "epoch": 1.15,
690
+ "learning_rate": 1.4317543523384928e-05,
691
+ "loss": 0.282,
692
+ "step": 114
693
+ },
694
+ {
695
+ "epoch": 1.16,
696
+ "learning_rate": 1.4217855543455323e-05,
697
+ "loss": 0.2412,
698
+ "step": 115
699
+ },
700
+ {
701
+ "epoch": 1.17,
702
+ "learning_rate": 1.4117655059443052e-05,
703
+ "loss": 0.2805,
704
+ "step": 116
705
+ },
706
+ {
707
+ "epoch": 1.18,
708
+ "learning_rate": 1.4016954246529697e-05,
709
+ "loss": 0.2515,
710
+ "step": 117
711
+ },
712
+ {
713
+ "epoch": 1.19,
714
+ "learning_rate": 1.3915765340690916e-05,
715
+ "loss": 0.246,
716
+ "step": 118
717
+ },
718
+ {
719
+ "epoch": 1.2,
720
+ "learning_rate": 1.3814100637209663e-05,
721
+ "loss": 0.272,
722
+ "step": 119
723
+ },
724
+ {
725
+ "epoch": 1.21,
726
+ "learning_rate": 1.3711972489182208e-05,
727
+ "loss": 0.2769,
728
+ "step": 120
729
+ },
730
+ {
731
+ "epoch": 1.22,
732
+ "learning_rate": 1.3609393306017149e-05,
733
+ "loss": 0.2189,
734
+ "step": 121
735
+ },
736
+ {
737
+ "epoch": 1.23,
738
+ "learning_rate": 1.3506375551927546e-05,
739
+ "loss": 0.2174,
740
+ "step": 122
741
+ },
742
+ {
743
+ "epoch": 1.24,
744
+ "learning_rate": 1.3402931744416432e-05,
745
+ "loss": 0.2252,
746
+ "step": 123
747
+ },
748
+ {
749
+ "epoch": 1.25,
750
+ "learning_rate": 1.3299074452755829e-05,
751
+ "loss": 0.2361,
752
+ "step": 124
753
+ },
754
+ {
755
+ "epoch": 1.26,
756
+ "learning_rate": 1.3194816296459483e-05,
757
+ "loss": 0.2545,
758
+ "step": 125
759
+ },
760
+ {
761
+ "epoch": 1.27,
762
+ "learning_rate": 1.3090169943749475e-05,
763
+ "loss": 0.2656,
764
+ "step": 126
765
+ },
766
+ {
767
+ "epoch": 1.28,
768
+ "learning_rate": 1.2985148110016947e-05,
769
+ "loss": 0.2973,
770
+ "step": 127
771
+ },
772
+ {
773
+ "epoch": 1.29,
774
+ "learning_rate": 1.2879763556277062e-05,
775
+ "loss": 0.2965,
776
+ "step": 128
777
+ },
778
+ {
779
+ "epoch": 1.3,
780
+ "learning_rate": 1.2774029087618448e-05,
781
+ "loss": 0.2444,
782
+ "step": 129
783
+ },
784
+ {
785
+ "epoch": 1.31,
786
+ "learning_rate": 1.2667957551647263e-05,
787
+ "loss": 0.2817,
788
+ "step": 130
789
+ },
790
+ {
791
+ "epoch": 1.32,
792
+ "learning_rate": 1.2561561836926115e-05,
793
+ "loss": 0.2908,
794
+ "step": 131
795
+ },
796
+ {
797
+ "epoch": 1.33,
798
+ "learning_rate": 1.2454854871407993e-05,
799
+ "loss": 0.2854,
800
+ "step": 132
801
+ },
802
+ {
803
+ "epoch": 1.34,
804
+ "learning_rate": 1.234784962086541e-05,
805
+ "loss": 0.2955,
806
+ "step": 133
807
+ },
808
+ {
809
+ "epoch": 1.35,
810
+ "learning_rate": 1.224055908731496e-05,
811
+ "loss": 0.3027,
812
+ "step": 134
813
+ },
814
+ {
815
+ "epoch": 1.36,
816
+ "learning_rate": 1.213299630743747e-05,
817
+ "loss": 0.2459,
818
+ "step": 135
819
+ },
820
+ {
821
+ "epoch": 1.37,
822
+ "learning_rate": 1.2025174350993923e-05,
823
+ "loss": 0.2663,
824
+ "step": 136
825
+ },
826
+ {
827
+ "epoch": 1.38,
828
+ "learning_rate": 1.1917106319237386e-05,
829
+ "loss": 0.22,
830
+ "step": 137
831
+ },
832
+ {
833
+ "epoch": 1.39,
834
+ "learning_rate": 1.1808805343321102e-05,
835
+ "loss": 0.2549,
836
+ "step": 138
837
+ },
838
+ {
839
+ "epoch": 1.4,
840
+ "learning_rate": 1.1700284582702933e-05,
841
+ "loss": 0.2712,
842
+ "step": 139
843
+ },
844
+ {
845
+ "epoch": 1.41,
846
+ "learning_rate": 1.1591557223546394e-05,
847
+ "loss": 0.2347,
848
+ "step": 140
849
+ },
850
+ {
851
+ "epoch": 1.42,
852
+ "learning_rate": 1.148263647711842e-05,
853
+ "loss": 0.2478,
854
+ "step": 141
855
+ },
856
+ {
857
+ "epoch": 1.43,
858
+ "learning_rate": 1.1373535578184083e-05,
859
+ "loss": 0.2049,
860
+ "step": 142
861
+ },
862
+ {
863
+ "epoch": 1.44,
864
+ "learning_rate": 1.1264267783398463e-05,
865
+ "loss": 0.2375,
866
+ "step": 143
867
+ },
868
+ {
869
+ "epoch": 1.45,
870
+ "learning_rate": 1.1154846369695864e-05,
871
+ "loss": 0.2893,
872
+ "step": 144
873
+ },
874
+ {
875
+ "epoch": 1.46,
876
+ "learning_rate": 1.1045284632676535e-05,
877
+ "loss": 0.2361,
878
+ "step": 145
879
+ },
880
+ {
881
+ "epoch": 1.47,
882
+ "learning_rate": 1.093559588499118e-05,
883
+ "loss": 0.1738,
884
+ "step": 146
885
+ },
886
+ {
887
+ "epoch": 1.48,
888
+ "learning_rate": 1.0825793454723325e-05,
889
+ "loss": 0.2037,
890
+ "step": 147
891
+ },
892
+ {
893
+ "epoch": 1.49,
894
+ "learning_rate": 1.0715890683769872e-05,
895
+ "loss": 0.2048,
896
+ "step": 148
897
+ },
898
+ {
899
+ "epoch": 1.51,
900
+ "learning_rate": 1.060590092621994e-05,
901
+ "loss": 0.1777,
902
+ "step": 149
903
+ },
904
+ {
905
+ "epoch": 1.52,
906
+ "learning_rate": 1.0495837546732224e-05,
907
+ "loss": 0.2889,
908
+ "step": 150
909
+ },
910
+ {
911
+ "epoch": 1.53,
912
+ "learning_rate": 1.0385713918911104e-05,
913
+ "loss": 0.2363,
914
+ "step": 151
915
+ },
916
+ {
917
+ "epoch": 1.54,
918
+ "learning_rate": 1.0275543423681622e-05,
919
+ "loss": 0.2456,
920
+ "step": 152
921
+ },
922
+ {
923
+ "epoch": 1.55,
924
+ "learning_rate": 1.0165339447663586e-05,
925
+ "loss": 0.236,
926
+ "step": 153
927
+ },
928
+ {
929
+ "epoch": 1.56,
930
+ "learning_rate": 1.0055115381545006e-05,
931
+ "loss": 0.2409,
932
+ "step": 154
933
+ },
934
+ {
935
+ "epoch": 1.57,
936
+ "learning_rate": 9.944884618454996e-06,
937
+ "loss": 0.2758,
938
+ "step": 155
939
+ },
940
+ {
941
+ "epoch": 1.58,
942
+ "learning_rate": 9.834660552336415e-06,
943
+ "loss": 0.1752,
944
+ "step": 156
945
+ },
946
+ {
947
+ "epoch": 1.59,
948
+ "learning_rate": 9.724456576318383e-06,
949
+ "loss": 0.2372,
950
+ "step": 157
951
+ },
952
+ {
953
+ "epoch": 1.6,
954
+ "learning_rate": 9.614286081088895e-06,
955
+ "loss": 0.2148,
956
+ "step": 158
957
+ },
958
+ {
959
+ "epoch": 1.61,
960
+ "learning_rate": 9.504162453267776e-06,
961
+ "loss": 0.1684,
962
+ "step": 159
963
+ },
964
+ {
965
+ "epoch": 1.62,
966
+ "learning_rate": 9.394099073780066e-06,
967
+ "loss": 0.2243,
968
+ "step": 160
969
+ },
970
+ {
971
+ "epoch": 1.63,
972
+ "learning_rate": 9.284109316230133e-06,
973
+ "loss": 0.2365,
974
+ "step": 161
975
+ },
976
+ {
977
+ "epoch": 1.64,
978
+ "learning_rate": 9.174206545276678e-06,
979
+ "loss": 0.2333,
980
+ "step": 162
981
+ },
982
+ {
983
+ "epoch": 1.65,
984
+ "learning_rate": 9.064404115008824e-06,
985
+ "loss": 0.2108,
986
+ "step": 163
987
+ },
988
+ {
989
+ "epoch": 1.66,
990
+ "learning_rate": 8.954715367323468e-06,
991
+ "loss": 0.283,
992
+ "step": 164
993
+ },
994
+ {
995
+ "epoch": 1.67,
996
+ "learning_rate": 8.84515363030414e-06,
997
+ "loss": 0.2383,
998
+ "step": 165
999
+ },
1000
+ {
1001
+ "epoch": 1.68,
1002
+ "learning_rate": 8.735732216601538e-06,
1003
+ "loss": 0.1921,
1004
+ "step": 166
1005
+ },
1006
+ {
1007
+ "epoch": 1.69,
1008
+ "learning_rate": 8.626464421815919e-06,
1009
+ "loss": 0.2585,
1010
+ "step": 167
1011
+ },
1012
+ {
1013
+ "epoch": 1.7,
1014
+ "learning_rate": 8.51736352288158e-06,
1015
+ "loss": 0.2201,
1016
+ "step": 168
1017
+ },
1018
+ {
1019
+ "epoch": 1.71,
1020
+ "learning_rate": 8.408442776453606e-06,
1021
+ "loss": 0.2088,
1022
+ "step": 169
1023
+ },
1024
+ {
1025
+ "epoch": 1.72,
1026
+ "learning_rate": 8.299715417297072e-06,
1027
+ "loss": 0.1736,
1028
+ "step": 170
1029
+ },
1030
+ {
1031
+ "epoch": 1.73,
1032
+ "learning_rate": 8.191194656678905e-06,
1033
+ "loss": 0.2028,
1034
+ "step": 171
1035
+ },
1036
+ {
1037
+ "epoch": 1.74,
1038
+ "learning_rate": 8.082893680762619e-06,
1039
+ "loss": 0.2331,
1040
+ "step": 172
1041
+ },
1042
+ {
1043
+ "epoch": 1.75,
1044
+ "learning_rate": 7.974825649006082e-06,
1045
+ "loss": 0.2313,
1046
+ "step": 173
1047
+ },
1048
+ {
1049
+ "epoch": 1.76,
1050
+ "learning_rate": 7.867003692562533e-06,
1051
+ "loss": 0.3048,
1052
+ "step": 174
1053
+ },
1054
+ {
1055
+ "epoch": 1.77,
1056
+ "learning_rate": 7.759440912685043e-06,
1057
+ "loss": 0.2014,
1058
+ "step": 175
1059
+ },
1060
+ {
1061
+ "epoch": 1.78,
1062
+ "learning_rate": 7.652150379134593e-06,
1063
+ "loss": 0.2098,
1064
+ "step": 176
1065
+ },
1066
+ {
1067
+ "epoch": 1.79,
1068
+ "learning_rate": 7.545145128592009e-06,
1069
+ "loss": 0.2076,
1070
+ "step": 177
1071
+ },
1072
+ {
1073
+ "epoch": 1.8,
1074
+ "learning_rate": 7.438438163073884e-06,
1075
+ "loss": 0.1652,
1076
+ "step": 178
1077
+ },
1078
+ {
1079
+ "epoch": 1.81,
1080
+ "learning_rate": 7.3320424483527385e-06,
1081
+ "loss": 0.1559,
1082
+ "step": 179
1083
+ },
1084
+ {
1085
+ "epoch": 1.82,
1086
+ "learning_rate": 7.225970912381557e-06,
1087
+ "loss": 0.2062,
1088
+ "step": 180
1089
+ },
1090
+ {
1091
+ "epoch": 1.83,
1092
+ "learning_rate": 7.120236443722941e-06,
1093
+ "loss": 0.2017,
1094
+ "step": 181
1095
+ },
1096
+ {
1097
+ "epoch": 1.84,
1098
+ "learning_rate": 7.014851889983058e-06,
1099
+ "loss": 0.1794,
1100
+ "step": 182
1101
+ },
1102
+ {
1103
+ "epoch": 1.85,
1104
+ "learning_rate": 6.909830056250527e-06,
1105
+ "loss": 0.1932,
1106
+ "step": 183
1107
+ },
1108
+ {
1109
+ "epoch": 1.86,
1110
+ "learning_rate": 6.80518370354052e-06,
1111
+ "loss": 0.2138,
1112
+ "step": 184
1113
+ },
1114
+ {
1115
+ "epoch": 1.87,
1116
+ "learning_rate": 6.700925547244173e-06,
1117
+ "loss": 0.1776,
1118
+ "step": 185
1119
+ },
1120
+ {
1121
+ "epoch": 1.88,
1122
+ "learning_rate": 6.59706825558357e-06,
1123
+ "loss": 0.1961,
1124
+ "step": 186
1125
+ },
1126
+ {
1127
+ "epoch": 1.89,
1128
+ "learning_rate": 6.4936244480724575e-06,
1129
+ "loss": 0.2006,
1130
+ "step": 187
1131
+ },
1132
+ {
1133
+ "epoch": 1.9,
1134
+ "learning_rate": 6.3906066939828546e-06,
1135
+ "loss": 0.263,
1136
+ "step": 188
1137
+ },
1138
+ {
1139
+ "epoch": 1.91,
1140
+ "learning_rate": 6.2880275108177915e-06,
1141
+ "loss": 0.201,
1142
+ "step": 189
1143
+ },
1144
+ {
1145
+ "epoch": 1.92,
1146
+ "learning_rate": 6.18589936279034e-06,
1147
+ "loss": 0.1588,
1148
+ "step": 190
1149
+ },
1150
+ {
1151
+ "epoch": 1.93,
1152
+ "learning_rate": 6.084234659309088e-06,
1153
+ "loss": 0.216,
1154
+ "step": 191
1155
+ },
1156
+ {
1157
+ "epoch": 1.94,
1158
+ "learning_rate": 5.983045753470308e-06,
1159
+ "loss": 0.1918,
1160
+ "step": 192
1161
+ },
1162
+ {
1163
+ "epoch": 1.95,
1164
+ "learning_rate": 5.8823449405569525e-06,
1165
+ "loss": 0.184,
1166
+ "step": 193
1167
+ },
1168
+ {
1169
+ "epoch": 1.96,
1170
+ "learning_rate": 5.782144456544681e-06,
1171
+ "loss": 0.1564,
1172
+ "step": 194
1173
+ },
1174
+ {
1175
+ "epoch": 1.97,
1176
+ "learning_rate": 5.6824564766150724e-06,
1177
+ "loss": 0.1936,
1178
+ "step": 195
1179
+ },
1180
+ {
1181
+ "epoch": 1.98,
1182
+ "learning_rate": 5.58329311367626e-06,
1183
+ "loss": 0.1495,
1184
+ "step": 196
1185
+ },
1186
+ {
1187
+ "epoch": 1.99,
1188
+ "learning_rate": 5.484666416891109e-06,
1189
+ "loss": 0.2793,
1190
+ "step": 197
1191
+ },
1192
+ {
1193
+ "epoch": 2.0,
1194
+ "learning_rate": 5.386588370213124e-06,
1195
+ "loss": 0.1719,
1196
+ "step": 198
1197
+ },
1198
+ {
1199
+ "epoch": 2.01,
1200
+ "learning_rate": 5.289070890930328e-06,
1201
+ "loss": 0.194,
1202
+ "step": 199
1203
+ },
1204
+ {
1205
+ "epoch": 2.02,
1206
+ "learning_rate": 5.192125828217203e-06,
1207
+ "loss": 0.1358,
1208
+ "step": 200
1209
+ },
1210
+ {
1211
+ "epoch": 2.03,
1212
+ "learning_rate": 5.095764961694923e-06,
1213
+ "loss": 0.16,
1214
+ "step": 201
1215
+ },
1216
+ {
1217
+ "epoch": 2.04,
1218
+ "learning_rate": 5.000000000000003e-06,
1219
+ "loss": 0.1797,
1220
+ "step": 202
1221
+ },
1222
+ {
1223
+ "epoch": 2.05,
1224
+ "learning_rate": 4.904842579361653e-06,
1225
+ "loss": 0.1968,
1226
+ "step": 203
1227
+ },
1228
+ {
1229
+ "epoch": 2.06,
1230
+ "learning_rate": 4.8103042621878515e-06,
1231
+ "loss": 0.1639,
1232
+ "step": 204
1233
+ },
1234
+ {
1235
+ "epoch": 2.07,
1236
+ "learning_rate": 4.716396535660412e-06,
1237
+ "loss": 0.1747,
1238
+ "step": 205
1239
+ },
1240
+ {
1241
+ "epoch": 2.08,
1242
+ "learning_rate": 4.623130810339219e-06,
1243
+ "loss": 0.2478,
1244
+ "step": 206
1245
+ },
1246
+ {
1247
+ "epoch": 2.09,
1248
+ "learning_rate": 4.530518418775734e-06,
1249
+ "loss": 0.1603,
1250
+ "step": 207
1251
+ },
1252
+ {
1253
+ "epoch": 2.1,
1254
+ "learning_rate": 4.438570614135994e-06,
1255
+ "loss": 0.1551,
1256
+ "step": 208
1257
+ },
1258
+ {
1259
+ "epoch": 2.11,
1260
+ "learning_rate": 4.347298568833281e-06,
1261
+ "loss": 0.1762,
1262
+ "step": 209
1263
+ },
1264
+ {
1265
+ "epoch": 2.12,
1266
+ "learning_rate": 4.256713373170565e-06,
1267
+ "loss": 0.1881,
1268
+ "step": 210
1269
+ },
1270
+ {
1271
+ "epoch": 2.13,
1272
+ "learning_rate": 4.166826033992939e-06,
1273
+ "loss": 0.1405,
1274
+ "step": 211
1275
+ },
1276
+ {
1277
+ "epoch": 2.14,
1278
+ "learning_rate": 4.077647473350201e-06,
1279
+ "loss": 0.1996,
1280
+ "step": 212
1281
+ },
1282
+ {
1283
+ "epoch": 2.15,
1284
+ "learning_rate": 3.989188527169749e-06,
1285
+ "loss": 0.1252,
1286
+ "step": 213
1287
+ },
1288
+ {
1289
+ "epoch": 2.16,
1290
+ "learning_rate": 3.90145994393991e-06,
1291
+ "loss": 0.1156,
1292
+ "step": 214
1293
+ },
1294
+ {
1295
+ "epoch": 2.17,
1296
+ "learning_rate": 3.8144723834039076e-06,
1297
+ "loss": 0.1462,
1298
+ "step": 215
1299
+ },
1300
+ {
1301
+ "epoch": 2.18,
1302
+ "learning_rate": 3.72823641526463e-06,
1303
+ "loss": 0.2174,
1304
+ "step": 216
1305
+ },
1306
+ {
1307
+ "epoch": 2.19,
1308
+ "learning_rate": 3.6427625179003223e-06,
1309
+ "loss": 0.2678,
1310
+ "step": 217
1311
+ },
1312
+ {
1313
+ "epoch": 2.2,
1314
+ "learning_rate": 3.5580610770913593e-06,
1315
+ "loss": 0.1681,
1316
+ "step": 218
1317
+ },
1318
+ {
1319
+ "epoch": 2.21,
1320
+ "learning_rate": 3.4741423847583134e-06,
1321
+ "loss": 0.2125,
1322
+ "step": 219
1323
+ },
1324
+ {
1325
+ "epoch": 2.22,
1326
+ "learning_rate": 3.3910166377113894e-06,
1327
+ "loss": 0.1816,
1328
+ "step": 220
1329
+ },
1330
+ {
1331
+ "epoch": 2.23,
1332
+ "learning_rate": 3.308693936411421e-06,
1333
+ "loss": 0.1421,
1334
+ "step": 221
1335
+ },
1336
+ {
1337
+ "epoch": 2.24,
1338
+ "learning_rate": 3.2271842837425917e-06,
1339
+ "loss": 0.1898,
1340
+ "step": 222
1341
+ },
1342
+ {
1343
+ "epoch": 2.25,
1344
+ "learning_rate": 3.1464975837970035e-06,
1345
+ "loss": 0.1321,
1346
+ "step": 223
1347
+ },
1348
+ {
1349
+ "epoch": 2.26,
1350
+ "learning_rate": 3.0666436406712485e-06,
1351
+ "loss": 0.1529,
1352
+ "step": 224
1353
+ },
1354
+ {
1355
+ "epoch": 2.27,
1356
+ "learning_rate": 2.9876321572751143e-06,
1357
+ "loss": 0.1399,
1358
+ "step": 225
1359
+ },
1360
+ {
1361
+ "epoch": 2.28,
1362
+ "learning_rate": 2.9094727341526275e-06,
1363
+ "loss": 0.1596,
1364
+ "step": 226
1365
+ },
1366
+ {
1367
+ "epoch": 2.29,
1368
+ "learning_rate": 2.8321748683154893e-06,
1369
+ "loss": 0.1354,
1370
+ "step": 227
1371
+ },
1372
+ {
1373
+ "epoch": 2.3,
1374
+ "learning_rate": 2.7557479520891104e-06,
1375
+ "loss": 0.1474,
1376
+ "step": 228
1377
+ },
1378
+ {
1379
+ "epoch": 2.31,
1380
+ "learning_rate": 2.680201271971383e-06,
1381
+ "loss": 0.1558,
1382
+ "step": 229
1383
+ },
1384
+ {
1385
+ "epoch": 2.32,
1386
+ "learning_rate": 2.6055440075042793e-06,
1387
+ "loss": 0.1595,
1388
+ "step": 230
1389
+ },
1390
+ {
1391
+ "epoch": 2.33,
1392
+ "learning_rate": 2.5317852301584642e-06,
1393
+ "loss": 0.1791,
1394
+ "step": 231
1395
+ },
1396
+ {
1397
+ "epoch": 2.34,
1398
+ "learning_rate": 2.4589339022310386e-06,
1399
+ "loss": 0.1387,
1400
+ "step": 232
1401
+ },
1402
+ {
1403
+ "epoch": 2.35,
1404
+ "learning_rate": 2.386998875756554e-06,
1405
+ "loss": 0.1032,
1406
+ "step": 233
1407
+ },
1408
+ {
1409
+ "epoch": 2.36,
1410
+ "learning_rate": 2.315988891431412e-06,
1411
+ "loss": 0.1711,
1412
+ "step": 234
1413
+ },
1414
+ {
1415
+ "epoch": 2.37,
1416
+ "learning_rate": 2.2459125775517854e-06,
1417
+ "loss": 0.1274,
1418
+ "step": 235
1419
+ },
1420
+ {
1421
+ "epoch": 2.38,
1422
+ "learning_rate": 2.1767784489652345e-06,
1423
+ "loss": 0.1584,
1424
+ "step": 236
1425
+ },
1426
+ {
1427
+ "epoch": 2.39,
1428
+ "learning_rate": 2.1085949060360654e-06,
1429
+ "loss": 0.1374,
1430
+ "step": 237
1431
+ },
1432
+ {
1433
+ "epoch": 2.4,
1434
+ "learning_rate": 2.0413702336246156e-06,
1435
+ "loss": 0.1403,
1436
+ "step": 238
1437
+ },
1438
+ {
1439
+ "epoch": 2.41,
1440
+ "learning_rate": 1.97511260008059e-06,
1441
+ "loss": 0.1533,
1442
+ "step": 239
1443
+ },
1444
+ {
1445
+ "epoch": 2.42,
1446
+ "learning_rate": 1.9098300562505266e-06,
1447
+ "loss": 0.1528,
1448
+ "step": 240
1449
+ },
1450
+ {
1451
+ "epoch": 2.43,
1452
+ "learning_rate": 1.8455305344995523e-06,
1453
+ "loss": 0.1275,
1454
+ "step": 241
1455
+ },
1456
+ {
1457
+ "epoch": 2.44,
1458
+ "learning_rate": 1.7822218477475496e-06,
1459
+ "loss": 0.209,
1460
+ "step": 242
1461
+ },
1462
+ {
1463
+ "epoch": 2.45,
1464
+ "learning_rate": 1.7199116885197996e-06,
1465
+ "loss": 0.1208,
1466
+ "step": 243
1467
+ },
1468
+ {
1469
+ "epoch": 2.46,
1470
+ "learning_rate": 1.6586076280123032e-06,
1471
+ "loss": 0.0937,
1472
+ "step": 244
1473
+ },
1474
+ {
1475
+ "epoch": 2.47,
1476
+ "learning_rate": 1.5983171151717924e-06,
1477
+ "loss": 0.1244,
1478
+ "step": 245
1479
+ },
1480
+ {
1481
+ "epoch": 2.48,
1482
+ "learning_rate": 1.5390474757906449e-06,
1483
+ "loss": 0.1131,
1484
+ "step": 246
1485
+ },
1486
+ {
1487
+ "epoch": 2.49,
1488
+ "learning_rate": 1.4808059116167306e-06,
1489
+ "loss": 0.1315,
1490
+ "step": 247
1491
+ },
1492
+ {
1493
+ "epoch": 2.51,
1494
+ "learning_rate": 1.4235994994783297e-06,
1495
+ "loss": 0.1891,
1496
+ "step": 248
1497
+ },
1498
+ {
1499
+ "epoch": 2.52,
1500
+ "learning_rate": 1.367435190424261e-06,
1501
+ "loss": 0.1316,
1502
+ "step": 249
1503
+ },
1504
+ {
1505
+ "epoch": 2.53,
1506
+ "learning_rate": 1.3123198088792577e-06,
1507
+ "loss": 0.155,
1508
+ "step": 250
1509
+ },
1510
+ {
1511
+ "epoch": 2.54,
1512
+ "learning_rate": 1.2582600518147448e-06,
1513
+ "loss": 0.1472,
1514
+ "step": 251
1515
+ },
1516
+ {
1517
+ "epoch": 2.55,
1518
+ "learning_rate": 1.2052624879351105e-06,
1519
+ "loss": 0.1732,
1520
+ "step": 252
1521
+ },
1522
+ {
1523
+ "epoch": 2.56,
1524
+ "learning_rate": 1.1533335568795412e-06,
1525
+ "loss": 0.1536,
1526
+ "step": 253
1527
+ },
1528
+ {
1529
+ "epoch": 2.57,
1530
+ "learning_rate": 1.1024795684395695e-06,
1531
+ "loss": 0.1176,
1532
+ "step": 254
1533
+ },
1534
+ {
1535
+ "epoch": 2.58,
1536
+ "learning_rate": 1.0527067017923654e-06,
1537
+ "loss": 0.1502,
1538
+ "step": 255
1539
+ },
1540
+ {
1541
+ "epoch": 2.59,
1542
+ "learning_rate": 1.0040210047499289e-06,
1543
+ "loss": 0.1859,
1544
+ "step": 256
1545
+ },
1546
+ {
1547
+ "epoch": 2.6,
1548
+ "learning_rate": 9.564283930242258e-07,
1549
+ "loss": 0.1167,
1550
+ "step": 257
1551
+ },
1552
+ {
1553
+ "epoch": 2.61,
1554
+ "learning_rate": 9.09934649508375e-07,
1555
+ "loss": 0.1391,
1556
+ "step": 258
1557
+ },
1558
+ {
1559
+ "epoch": 2.62,
1560
+ "learning_rate": 8.645454235739903e-07,
1561
+ "loss": 0.1438,
1562
+ "step": 259
1563
+ },
1564
+ {
1565
+ "epoch": 2.63,
1566
+ "learning_rate": 8.202662303847298e-07,
1567
+ "loss": 0.1527,
1568
+ "step": 260
1569
+ },
1570
+ {
1571
+ "epoch": 2.64,
1572
+ "learning_rate": 7.771024502261526e-07,
1573
+ "loss": 0.1238,
1574
+ "step": 261
1575
+ },
1576
+ {
1577
+ "epoch": 2.65,
1578
+ "learning_rate": 7.350593278519824e-07,
1579
+ "loss": 0.1709,
1580
+ "step": 262
1581
+ },
1582
+ {
1583
+ "epoch": 2.66,
1584
+ "learning_rate": 6.941419718468168e-07,
1585
+ "loss": 0.1836,
1586
+ "step": 263
1587
+ },
1588
+ {
1589
+ "epoch": 2.67,
1590
+ "learning_rate": 6.543553540053926e-07,
1591
+ "loss": 0.1866,
1592
+ "step": 264
1593
+ },
1594
+ {
1595
+ "epoch": 2.68,
1596
+ "learning_rate": 6.157043087284797e-07,
1597
+ "loss": 0.1365,
1598
+ "step": 265
1599
+ },
1600
+ {
1601
+ "epoch": 2.69,
1602
+ "learning_rate": 5.781935324354571e-07,
1603
+ "loss": 0.1835,
1604
+ "step": 266
1605
+ },
1606
+ {
1607
+ "epoch": 2.7,
1608
+ "learning_rate": 5.418275829936537e-07,
1609
+ "loss": 0.1197,
1610
+ "step": 267
1611
+ },
1612
+ {
1613
+ "epoch": 2.71,
1614
+ "learning_rate": 5.066108791645407e-07,
1615
+ "loss": 0.1345,
1616
+ "step": 268
1617
+ },
1618
+ {
1619
+ "epoch": 2.72,
1620
+ "learning_rate": 4.7254770006681105e-07,
1621
+ "loss": 0.1644,
1622
+ "step": 269
1623
+ },
1624
+ {
1625
+ "epoch": 2.73,
1626
+ "learning_rate": 4.396421846564236e-07,
1627
+ "loss": 0.1511,
1628
+ "step": 270
1629
+ },
1630
+ {
1631
+ "epoch": 2.74,
1632
+ "learning_rate": 4.078983312237017e-07,
1633
+ "loss": 0.179,
1634
+ "step": 271
1635
+ },
1636
+ {
1637
+ "epoch": 2.75,
1638
+ "learning_rate": 3.773199969074959e-07,
1639
+ "loss": 0.1191,
1640
+ "step": 272
1641
+ },
1642
+ {
1643
+ "epoch": 2.76,
1644
+ "learning_rate": 3.4791089722651437e-07,
1645
+ "loss": 0.127,
1646
+ "step": 273
1647
+ },
1648
+ {
1649
+ "epoch": 2.77,
1650
+ "learning_rate": 3.1967460562785325e-07,
1651
+ "loss": 0.1226,
1652
+ "step": 274
1653
+ },
1654
+ {
1655
+ "epoch": 2.78,
1656
+ "learning_rate": 2.926145530528002e-07,
1657
+ "loss": 0.1787,
1658
+ "step": 275
1659
+ },
1660
+ {
1661
+ "epoch": 2.79,
1662
+ "learning_rate": 2.667340275199426e-07,
1663
+ "loss": 0.1438,
1664
+ "step": 276
1665
+ },
1666
+ {
1667
+ "epoch": 2.8,
1668
+ "learning_rate": 2.420361737256438e-07,
1669
+ "loss": 0.1226,
1670
+ "step": 277
1671
+ },
1672
+ {
1673
+ "epoch": 2.81,
1674
+ "learning_rate": 2.1852399266194312e-07,
1675
+ "loss": 0.1422,
1676
+ "step": 278
1677
+ },
1678
+ {
1679
+ "epoch": 2.82,
1680
+ "learning_rate": 1.9620034125190645e-07,
1681
+ "loss": 0.1451,
1682
+ "step": 279
1683
+ },
1684
+ {
1685
+ "epoch": 2.83,
1686
+ "learning_rate": 1.7506793200248507e-07,
1687
+ "loss": 0.1726,
1688
+ "step": 280
1689
+ },
1690
+ {
1691
+ "epoch": 2.84,
1692
+ "learning_rate": 1.5512933267492813e-07,
1693
+ "loss": 0.1418,
1694
+ "step": 281
1695
+ },
1696
+ {
1697
+ "epoch": 2.85,
1698
+ "learning_rate": 1.3638696597277678e-07,
1699
+ "loss": 0.1239,
1700
+ "step": 282
1701
+ },
1702
+ {
1703
+ "epoch": 2.86,
1704
+ "learning_rate": 1.18843109247484e-07,
1705
+ "loss": 0.1554,
1706
+ "step": 283
1707
+ },
1708
+ {
1709
+ "epoch": 2.87,
1710
+ "learning_rate": 1.0249989422169926e-07,
1711
+ "loss": 0.1818,
1712
+ "step": 284
1713
+ },
1714
+ {
1715
+ "epoch": 2.88,
1716
+ "learning_rate": 8.735930673024806e-08,
1717
+ "loss": 0.1633,
1718
+ "step": 285
1719
+ },
1720
+ {
1721
+ "epoch": 2.89,
1722
+ "learning_rate": 7.342318647883595e-08,
1723
+ "loss": 0.1912,
1724
+ "step": 286
1725
+ },
1726
+ {
1727
+ "epoch": 2.9,
1728
+ "learning_rate": 6.069322682050516e-08,
1729
+ "loss": 0.1301,
1730
+ "step": 287
1731
+ },
1732
+ {
1733
+ "epoch": 2.91,
1734
+ "learning_rate": 4.9170974549885844e-08,
1735
+ "loss": 0.1728,
1736
+ "step": 288
1737
+ },
1738
+ {
1739
+ "epoch": 2.92,
1740
+ "learning_rate": 3.885782971524088e-08,
1741
+ "loss": 0.1156,
1742
+ "step": 289
1743
+ },
1744
+ {
1745
+ "epoch": 2.93,
1746
+ "learning_rate": 2.9755045448351948e-08,
1747
+ "loss": 0.1304,
1748
+ "step": 290
1749
+ },
1750
+ {
1751
+ "epoch": 2.94,
1752
+ "learning_rate": 2.1863727812254653e-08,
1753
+ "loss": 0.1408,
1754
+ "step": 291
1755
+ },
1756
+ {
1757
+ "epoch": 2.95,
1758
+ "learning_rate": 1.518483566683826e-08,
1759
+ "loss": 0.2036,
1760
+ "step": 292
1761
+ },
1762
+ {
1763
+ "epoch": 2.96,
1764
+ "learning_rate": 9.719180552341113e-09,
1765
+ "loss": 0.1597,
1766
+ "step": 293
1767
+ },
1768
+ {
1769
+ "epoch": 2.97,
1770
+ "learning_rate": 5.467426590739511e-09,
1771
+ "loss": 0.1618,
1772
+ "step": 294
1773
+ },
1774
+ {
1775
+ "epoch": 2.98,
1776
+ "learning_rate": 2.430090405054486e-09,
1777
+ "loss": 0.1651,
1778
+ "step": 295
1779
+ },
1780
+ {
1781
+ "epoch": 2.99,
1782
+ "learning_rate": 6.075410565697937e-10,
1783
+ "loss": 0.1322,
1784
+ "step": 296
1785
+ },
1786
+ {
1787
+ "epoch": 3.0,
1788
+ "learning_rate": 0.0,
1789
+ "loss": 0.1284,
1790
+ "step": 297
1791
+ },
1792
+ {
1793
+ "epoch": 3.0,
1794
+ "step": 297,
1795
+ "total_flos": 7.834692227086418e+17,
1796
+ "train_loss": 0.27782333958329575,
1797
+ "train_runtime": 6854.2409,
1798
+ "train_samples_per_second": 5.521,
1799
+ "train_steps_per_second": 0.043
1800
+ }
1801
+ ],
1802
+ "max_steps": 297,
1803
+ "num_train_epochs": 3,
1804
+ "total_flos": 7.834692227086418e+17,
1805
+ "trial_name": null,
1806
+ "trial_params": null
1807
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e88e487e3ec731ca0983617b8b7922fedadc7e30ccb71f07203cdc9e1a046b07
3
+ size 4091