Doohae commited on
Commit
e26886c
1 Parent(s): 866dce6
all_results.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 10.0,
3
+ "eval_samples": 474,
4
+ "exact_match": 66.66666666666667,
5
+ "f1": 73.91331557875677,
6
+ "init_mem_cpu_alloc_delta": 955506688,
7
+ "init_mem_cpu_peaked_delta": 1337384960,
8
+ "init_mem_gpu_alloc_delta": 1343489024,
9
+ "init_mem_gpu_peaked_delta": 0,
10
+ "train_mem_cpu_alloc_delta": 391798784,
11
+ "train_mem_cpu_peaked_delta": 448819200,
12
+ "train_mem_gpu_alloc_delta": 4055639040,
13
+ "train_mem_gpu_peaked_delta": 10974608384,
14
+ "train_runtime": 17438.5747,
15
+ "train_samples": 17427,
16
+ "train_samples_per_second": 1.25
17
+ }
checkpoint-21500/config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "klue/roberta-large",
3
+ "architectures": [
4
+ "RobertaForQuestionAnswering"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "eos_token_id": 2,
9
+ "gradient_checkpointing": false,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 1024,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 4096,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 514,
17
+ "model_type": "roberta",
18
+ "num_attention_heads": 16,
19
+ "num_hidden_layers": 24,
20
+ "pad_token_id": 1,
21
+ "position_embedding_type": "absolute",
22
+ "tokenizer_class": "BertTokenizer",
23
+ "transformers_version": "4.5.0",
24
+ "type_vocab_size": 1,
25
+ "use_cache": true,
26
+ "vocab_size": 32000
27
+ }
checkpoint-21500/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f113791049ceb6df9f53b8e29c6a61343e205c3daf0342127ee9e14f5c51ef3f
3
+ size 2685116205
checkpoint-21500/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e4f52f971e31d560b13a83619c857ecc8b4cd53b872bca1d9dbae87ba50ea616
3
+ size 1342607991
checkpoint-21500/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d35ff702e37b610ef1e1be52abaca6a21b83094d412c529ef03ce1f0b14acb2e
3
+ size 623
checkpoint-21500/special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "[CLS]", "eos_token": "[SEP]", "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
checkpoint-21500/tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "do_basic_tokenize": true, "never_split": null, "bos_token": "[CLS]", "eos_token": "[SEP]", "model_max_length": 512, "tokenizer_class": "BertTokenizer", "special_tokens_map_file": "/opt/ml/.cache/huggingface/transformers/1a24ab4628028ed80dea35ce3334a636dc656fd9a17a09bad377f88f0cbecdac.70c17d6e4d492c8f24f5bb97ab56c7f272e947112c6faf9dd846da42ba13eb23", "name_or_path": "klue/roberta-large"}
checkpoint-21500/trainer_state.json ADDED
@@ -0,0 +1,328 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 9.866911427260211,
5
+ "global_step": 21500,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.23,
12
+ "learning_rate": 4.885268471776045e-06,
13
+ "loss": 1.4166,
14
+ "step": 500
15
+ },
16
+ {
17
+ "epoch": 0.46,
18
+ "learning_rate": 4.770536943552089e-06,
19
+ "loss": 0.6545,
20
+ "step": 1000
21
+ },
22
+ {
23
+ "epoch": 0.69,
24
+ "learning_rate": 4.655805415328132e-06,
25
+ "loss": 0.5315,
26
+ "step": 1500
27
+ },
28
+ {
29
+ "epoch": 0.92,
30
+ "learning_rate": 4.541073887104177e-06,
31
+ "loss": 0.4715,
32
+ "step": 2000
33
+ },
34
+ {
35
+ "epoch": 1.0,
36
+ "exact_match": 67.5,
37
+ "f1": 75.87103174603179,
38
+ "step": 2179
39
+ },
40
+ {
41
+ "epoch": 1.15,
42
+ "learning_rate": 4.4263423588802205e-06,
43
+ "loss": 0.3414,
44
+ "step": 2500
45
+ },
46
+ {
47
+ "epoch": 1.38,
48
+ "learning_rate": 4.311610830656265e-06,
49
+ "loss": 0.2945,
50
+ "step": 3000
51
+ },
52
+ {
53
+ "epoch": 1.61,
54
+ "learning_rate": 4.196879302432309e-06,
55
+ "loss": 0.3019,
56
+ "step": 3500
57
+ },
58
+ {
59
+ "epoch": 1.84,
60
+ "learning_rate": 4.0821477742083525e-06,
61
+ "loss": 0.3033,
62
+ "step": 4000
63
+ },
64
+ {
65
+ "epoch": 2.0,
66
+ "exact_match": 68.33333333333333,
67
+ "f1": 76.34986772486776,
68
+ "step": 4358
69
+ },
70
+ {
71
+ "epoch": 2.07,
72
+ "learning_rate": 3.967416245984397e-06,
73
+ "loss": 0.2622,
74
+ "step": 4500
75
+ },
76
+ {
77
+ "epoch": 2.29,
78
+ "learning_rate": 3.852684717760441e-06,
79
+ "loss": 0.1801,
80
+ "step": 5000
81
+ },
82
+ {
83
+ "epoch": 2.52,
84
+ "learning_rate": 3.7379531895364852e-06,
85
+ "loss": 0.1697,
86
+ "step": 5500
87
+ },
88
+ {
89
+ "epoch": 2.75,
90
+ "learning_rate": 3.623221661312529e-06,
91
+ "loss": 0.2118,
92
+ "step": 6000
93
+ },
94
+ {
95
+ "epoch": 2.98,
96
+ "learning_rate": 3.508490133088573e-06,
97
+ "loss": 0.1559,
98
+ "step": 6500
99
+ },
100
+ {
101
+ "epoch": 3.0,
102
+ "exact_match": 67.91666666666667,
103
+ "f1": 76.26147001147002,
104
+ "step": 6537
105
+ },
106
+ {
107
+ "epoch": 3.21,
108
+ "learning_rate": 3.393758604864617e-06,
109
+ "loss": 0.1068,
110
+ "step": 7000
111
+ },
112
+ {
113
+ "epoch": 3.44,
114
+ "learning_rate": 3.2790270766406612e-06,
115
+ "loss": 0.0951,
116
+ "step": 7500
117
+ },
118
+ {
119
+ "epoch": 3.67,
120
+ "learning_rate": 3.1642955484167054e-06,
121
+ "loss": 0.0986,
122
+ "step": 8000
123
+ },
124
+ {
125
+ "epoch": 3.9,
126
+ "learning_rate": 3.049564020192749e-06,
127
+ "loss": 0.1122,
128
+ "step": 8500
129
+ },
130
+ {
131
+ "epoch": 4.0,
132
+ "exact_match": 67.5,
133
+ "f1": 74.74419793169795,
134
+ "step": 8716
135
+ },
136
+ {
137
+ "epoch": 4.13,
138
+ "learning_rate": 2.934832491968793e-06,
139
+ "loss": 0.0811,
140
+ "step": 9000
141
+ },
142
+ {
143
+ "epoch": 4.36,
144
+ "learning_rate": 2.8201009637448373e-06,
145
+ "loss": 0.0546,
146
+ "step": 9500
147
+ },
148
+ {
149
+ "epoch": 4.59,
150
+ "learning_rate": 2.7053694355208814e-06,
151
+ "loss": 0.056,
152
+ "step": 10000
153
+ },
154
+ {
155
+ "epoch": 4.82,
156
+ "learning_rate": 2.5906379072969255e-06,
157
+ "loss": 0.0731,
158
+ "step": 10500
159
+ },
160
+ {
161
+ "epoch": 5.0,
162
+ "exact_match": 70.41666666666667,
163
+ "f1": 77.59175084175087,
164
+ "step": 10895
165
+ },
166
+ {
167
+ "epoch": 5.05,
168
+ "learning_rate": 2.4759063790729696e-06,
169
+ "loss": 0.0687,
170
+ "step": 11000
171
+ },
172
+ {
173
+ "epoch": 5.28,
174
+ "learning_rate": 2.3611748508490133e-06,
175
+ "loss": 0.0471,
176
+ "step": 11500
177
+ },
178
+ {
179
+ "epoch": 5.51,
180
+ "learning_rate": 2.2464433226250574e-06,
181
+ "loss": 0.0426,
182
+ "step": 12000
183
+ },
184
+ {
185
+ "epoch": 5.74,
186
+ "learning_rate": 2.1317117944011015e-06,
187
+ "loss": 0.0437,
188
+ "step": 12500
189
+ },
190
+ {
191
+ "epoch": 5.97,
192
+ "learning_rate": 2.0169802661771456e-06,
193
+ "loss": 0.0299,
194
+ "step": 13000
195
+ },
196
+ {
197
+ "epoch": 6.0,
198
+ "exact_match": 66.66666666666667,
199
+ "f1": 74.826330804272,
200
+ "step": 13074
201
+ },
202
+ {
203
+ "epoch": 6.2,
204
+ "learning_rate": 1.9022487379531897e-06,
205
+ "loss": 0.0232,
206
+ "step": 13500
207
+ },
208
+ {
209
+ "epoch": 6.42,
210
+ "learning_rate": 1.7875172097292336e-06,
211
+ "loss": 0.0221,
212
+ "step": 14000
213
+ },
214
+ {
215
+ "epoch": 6.65,
216
+ "learning_rate": 1.672785681505278e-06,
217
+ "loss": 0.0242,
218
+ "step": 14500
219
+ },
220
+ {
221
+ "epoch": 6.88,
222
+ "learning_rate": 1.5580541532813219e-06,
223
+ "loss": 0.0327,
224
+ "step": 15000
225
+ },
226
+ {
227
+ "epoch": 7.0,
228
+ "exact_match": 67.08333333333333,
229
+ "f1": 75.72919876963996,
230
+ "step": 15253
231
+ },
232
+ {
233
+ "epoch": 7.11,
234
+ "learning_rate": 1.443322625057366e-06,
235
+ "loss": 0.0207,
236
+ "step": 15500
237
+ },
238
+ {
239
+ "epoch": 7.34,
240
+ "learning_rate": 1.3285910968334099e-06,
241
+ "loss": 0.0243,
242
+ "step": 16000
243
+ },
244
+ {
245
+ "epoch": 7.57,
246
+ "learning_rate": 1.213859568609454e-06,
247
+ "loss": 0.0127,
248
+ "step": 16500
249
+ },
250
+ {
251
+ "epoch": 7.8,
252
+ "learning_rate": 1.099128040385498e-06,
253
+ "loss": 0.0189,
254
+ "step": 17000
255
+ },
256
+ {
257
+ "epoch": 8.0,
258
+ "exact_match": 66.25,
259
+ "f1": 73.67448496492618,
260
+ "step": 17432
261
+ },
262
+ {
263
+ "epoch": 8.03,
264
+ "learning_rate": 9.84396512161542e-07,
265
+ "loss": 0.0234,
266
+ "step": 17500
267
+ },
268
+ {
269
+ "epoch": 8.26,
270
+ "learning_rate": 8.696649839375861e-07,
271
+ "loss": 0.0041,
272
+ "step": 18000
273
+ },
274
+ {
275
+ "epoch": 8.49,
276
+ "learning_rate": 7.549334557136302e-07,
277
+ "loss": 0.0115,
278
+ "step": 18500
279
+ },
280
+ {
281
+ "epoch": 8.72,
282
+ "learning_rate": 6.402019274896742e-07,
283
+ "loss": 0.0159,
284
+ "step": 19000
285
+ },
286
+ {
287
+ "epoch": 8.95,
288
+ "learning_rate": 5.254703992657182e-07,
289
+ "loss": 0.0116,
290
+ "step": 19500
291
+ },
292
+ {
293
+ "epoch": 9.0,
294
+ "exact_match": 69.16666666666667,
295
+ "f1": 77.33799506593625,
296
+ "step": 19611
297
+ },
298
+ {
299
+ "epoch": 9.18,
300
+ "learning_rate": 4.107388710417623e-07,
301
+ "loss": 0.0096,
302
+ "step": 20000
303
+ },
304
+ {
305
+ "epoch": 9.41,
306
+ "learning_rate": 2.9600734281780635e-07,
307
+ "loss": 0.0057,
308
+ "step": 20500
309
+ },
310
+ {
311
+ "epoch": 9.64,
312
+ "learning_rate": 1.8127581459385043e-07,
313
+ "loss": 0.005,
314
+ "step": 21000
315
+ },
316
+ {
317
+ "epoch": 9.87,
318
+ "learning_rate": 6.654428636989445e-08,
319
+ "loss": 0.0042,
320
+ "step": 21500
321
+ }
322
+ ],
323
+ "max_steps": 21790,
324
+ "num_train_epochs": 10,
325
+ "total_flos": 1.3296295768428288e+17,
326
+ "trial_name": null,
327
+ "trial_params": null
328
+ }
checkpoint-21500/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0dfd0a4787636edc929303bed05a4a1ab521eff925f1ec32039ce39afc930add
3
+ size 2351
checkpoint-21500/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "klue/roberta-large",
3
+ "architectures": [
4
+ "RobertaForQuestionAnswering"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "eos_token_id": 2,
9
+ "gradient_checkpointing": false,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 1024,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 4096,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 514,
17
+ "model_type": "roberta",
18
+ "num_attention_heads": 16,
19
+ "num_hidden_layers": 24,
20
+ "pad_token_id": 1,
21
+ "position_embedding_type": "absolute",
22
+ "tokenizer_class": "BertTokenizer",
23
+ "transformers_version": "4.5.0",
24
+ "type_vocab_size": 1,
25
+ "use_cache": true,
26
+ "vocab_size": 32000
27
+ }
eval_results.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
1
+ {
2
+ "epoch": 10.0,
3
+ "eval_samples": 474,
4
+ "exact_match": 66.66666666666667,
5
+ "f1": 73.91331557875677
6
+ }
nbest_predictions.json ADDED
The diff for this file is too large to render. See raw diff
predictions.json ADDED
@@ -0,0 +1,242 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "mrc-0-003264": "한보철강",
3
+ "mrc-0-004762": "1871년",
4
+ "mrc-1-001810": "나뭇잎",
5
+ "mrc-1-000219": "금대야",
6
+ "mrc-1-000285": "수평적 관계",
7
+ "mrc-0-005106": "쇼와 천황의 옥음방송",
8
+ "mrc-0-002076": "코칭 스티치",
9
+ "mrc-1-000414": "복잡한 감염병",
10
+ "mrc-0-002875": "스페인",
11
+ "mrc-0-003828": "20세기 초",
12
+ "mrc-0-002778": "\"5월의 왕\"",
13
+ "mrc-0-003931": "'일급 비밀 프로젝트 2501'",
14
+ "mrc-0-002485": "테헤란",
15
+ "mrc-0-004483": "역사교육과정개발추진위원회",
16
+ "mrc-0-003032": "1967년 8월 16일",
17
+ "mrc-1-000724": "1965년",
18
+ "mrc-0-002138": "아리크 부케",
19
+ "mrc-0-003727": "〈중앙일보〉",
20
+ "mrc-0-003115": "이이노야 성",
21
+ "mrc-0-003088": "전체 4순위",
22
+ "mrc-0-005296": "뇌물",
23
+ "mrc-1-001495": "보통 유형 준융합성 천연두",
24
+ "mrc-0-005289": "데스탱 장군",
25
+ "mrc-0-002240": "박트리아",
26
+ "mrc-0-000248": "기와조각",
27
+ "mrc-0-001846": "《국가》",
28
+ "mrc-1-001653": "2011년 3월 19일",
29
+ "mrc-0-000785": "브라질 포르투갈어",
30
+ "mrc-0-001519": "릿지당",
31
+ "mrc-0-002457": "강도역왕",
32
+ "mrc-1-001479": "버지니아 리치먼드",
33
+ "mrc-0-001707": "만주국 관리",
34
+ "mrc-0-002280": "『협동조합에 관하여』",
35
+ "mrc-1-001023": "도고쿠",
36
+ "mrc-0-001022": "루피의 할아버지",
37
+ "mrc-0-001150": "원생생물, 갑각류, 극피류 등등의 착생하는 유기체이다.",
38
+ "mrc-0-001807": "광주교도소",
39
+ "mrc-0-001161": "베게티우스",
40
+ "mrc-0-004654": "가오슝 시",
41
+ "mrc-0-002468": "망치",
42
+ "mrc-0-001870": "마리즈 교수",
43
+ "mrc-1-000753": "\"트로이아 노바\"",
44
+ "mrc-0-001073": "이틀",
45
+ "mrc-1-001116": "의상대사",
46
+ "mrc-1-000995": "시모키타 반도",
47
+ "mrc-0-002175": "‘우유의 바다’",
48
+ "mrc-0-001894": "닭고기",
49
+ "mrc-0-000337": "소련",
50
+ "mrc-0-002013": "전치",
51
+ "mrc-1-001766": "흑색육",
52
+ "mrc-0-002333": "욱",
53
+ "mrc-1-001814": "독일 인민당",
54
+ "mrc-0-001554": "하기노 역",
55
+ "mrc-0-002247": "회칠",
56
+ "mrc-0-000547": "황강다리",
57
+ "mrc-1-001177": "직업 교육",
58
+ "mrc-0-000939": "공산당",
59
+ "mrc-0-000223": "매년 음력 정월",
60
+ "mrc-0-005386": "조계종",
61
+ "mrc-0-000118": "베이징",
62
+ "mrc-0-003529": "독일군",
63
+ "mrc-1-000459": "1932년",
64
+ "mrc-0-003801": "제서지전(齊西之戰)",
65
+ "mrc-0-004342": "잦은 이슬비(가루아)가 종일 지속되며, 짙은 안개가 섬을 가린다.",
66
+ "mrc-0-001311": "출생 천궁도",
67
+ "mrc-0-002931": "나말여초 불상",
68
+ "mrc-0-002767": "1938년",
69
+ "mrc-1-000961": "창 절제술",
70
+ "mrc-1-000796": "도버 밀",
71
+ "mrc-1-000449": "여정현",
72
+ "mrc-0-004133": "광배",
73
+ "mrc-0-003576": "울산",
74
+ "mrc-0-002692": "땅(은색)과 하늘(금색)을 맺고 푸는 권한",
75
+ "mrc-0-004899": "저수지",
76
+ "mrc-0-003677": "퀘이커 교도",
77
+ "mrc-1-000127": "피에르 오주로",
78
+ "mrc-0-005412": "지분의 일부",
79
+ "mrc-0-000561": "스탈린그라드 전투에서 독일군의 대참패",
80
+ "mrc-0-004268": "제 3자",
81
+ "mrc-1-000835": "페이팔에서 아디옌으로 점진적으로 바꿀 것이라고 발표하였다.",
82
+ "mrc-0-001704": "가리타 히사노리에",
83
+ "mrc-1-001132": "점수",
84
+ "mrc-0-002512": "코스모케라톱스",
85
+ "mrc-0-003017": "'사채회사'",
86
+ "mrc-0-000215": "스위스",
87
+ "mrc-0-002873": "\"공유지의 비극\"",
88
+ "mrc-0-003118": "계급 모순",
89
+ "mrc-1-000384": "해리 트루먼 대통령",
90
+ "mrc-0-005270": "무위태수",
91
+ "mrc-0-002981": "크라운라이터 라이온스",
92
+ "mrc-1-000158": "1990년",
93
+ "mrc-0-002189": "\"인간과 요괴의 완전한 평등\"",
94
+ "mrc-0-000905": "경애왕과 경순왕",
95
+ "mrc-0-001198": "도널드 트럼프 미국 대통령",
96
+ "mrc-0-003947": "바르톨로메오 브뤼기에르 신부",
97
+ "mrc-1-001328": "불교",
98
+ "mrc-0-000166": "경위대식 망원경",
99
+ "mrc-0-004090": "남큐슈 자동차 전용 도로",
100
+ "mrc-0-003522": "세실리아 페인",
101
+ "mrc-1-001398": "웹 2.0",
102
+ "mrc-0-000355": "수녀",
103
+ "mrc-0-002906": "맨해튼",
104
+ "mrc-0-001590": "상대성이론",
105
+ "mrc-1-001522": "대제 손권",
106
+ "mrc-0-004307": "마그넨티우스",
107
+ "mrc-0-002471": "제임스 뷰캐넌",
108
+ "mrc-1-001313": "트렁크",
109
+ "mrc-0-004083": "데코행진",
110
+ "mrc-0-005186": "약 600년간",
111
+ "mrc-0-004197": "깃털 셔틀콕의 타구감을 선호하고, 또한 플라스틱보다 깃털 셔틀콕이 정교한 컨트롤을 하기에 보다 더 적합하기 때문",
112
+ "mrc-0-003208": "1998년",
113
+ "mrc-1-000297": "드라",
114
+ "mrc-1-000358": "가르미슈파르텐키르헨",
115
+ "mrc-1-001785": "정태적인 자본주의",
116
+ "mrc-0-005042": "데미안",
117
+ "mrc-1-000839": "딱딱한 판으로 몸��� 둘러싸인 동물로 묘사하였다. 또, 코뿔소의 등",
118
+ "mrc-0-004677": "34자",
119
+ "mrc-0-003564": "혼묘지(本妙寺)",
120
+ "mrc-0-004202": "바르바로사 작전",
121
+ "mrc-0-001486": "동계건조(wintertrocken)",
122
+ "mrc-1-000291": "이질",
123
+ "mrc-0-003753": "중간 생산물",
124
+ "mrc-0-003033": "특정한 귀인 평향과 행동의 상관 관계",
125
+ "mrc-0-005478": "중성유체",
126
+ "mrc-0-005155": "기독교",
127
+ "mrc-1-000516": "1945년",
128
+ "mrc-0-002679": "브리튼인",
129
+ "mrc-0-004527": "미국",
130
+ "mrc-0-001980": "남강변에 형성된 포인트 바(Point bar)에 자리잡고 있다.",
131
+ "mrc-0-004495": "전쟁",
132
+ "mrc-1-000037": "뇌졸중",
133
+ "mrc-0-004092": "김수환 추기경",
134
+ "mrc-0-004015": "4년 연상의 김옥성(金玉聲)과 결혼했다. 이후 그는 60년간",
135
+ "mrc-0-000707": "펜실베이니아기",
136
+ "mrc-1-000658": "서남쪽",
137
+ "mrc-1-001446": "화이트 베이스",
138
+ "mrc-0-000412": "교수형",
139
+ "mrc-0-001288": "곽상",
140
+ "mrc-1-000197": "신도 케이",
141
+ "mrc-0-002253": "남편",
142
+ "mrc-1-000367": "비밀 투표",
143
+ "mrc-0-003906": "61화",
144
+ "mrc-0-001254": "카누",
145
+ "mrc-0-003289": "알프스 전기 주식회사",
146
+ "mrc-0-004435": "크리스타",
147
+ "mrc-0-003844": "배상금",
148
+ "mrc-0-005355": "“어떤 경우에도 행할 수” 있도록 하였다.",
149
+ "mrc-0-000521": "속도별 배치",
150
+ "mrc-1-000066": "1975년",
151
+ "mrc-0-001952": "제비초리",
152
+ "mrc-0-002462": "석달",
153
+ "mrc-1-000899": "황간",
154
+ "mrc-0-002886": "태화관",
155
+ "mrc-0-004454": "알라바스타 왕국에서 태어났다.",
156
+ "mrc-0-001646": "하노이",
157
+ "mrc-1-001534": "토끼를 보고도 그냥 뛰어갔기에",
158
+ "mrc-1-000918": "좌우대칭",
159
+ "mrc-0-005046": "1913년",
160
+ "mrc-1-001611": "라자그리하",
161
+ "mrc-0-005222": "대령",
162
+ "mrc-0-004879": "걸프 카르텔(카르텔 델 골포: Cartel del golfo)",
163
+ "mrc-1-001481": "피터 케스카트 왓슨",
164
+ "mrc-0-003637": "충격파",
165
+ "mrc-0-003882": "대한민국 3보병사단",
166
+ "mrc-1-001285": "토머스 바클레이",
167
+ "mrc-0-005109": "감정 전염",
168
+ "mrc-1-001455": "스위치 이더넷",
169
+ "mrc-0-001274": "소비감소",
170
+ "mrc-1-000855": "마츠키 촌",
171
+ "mrc-0-003587": "불안감을 느낀다는 것이다.",
172
+ "mrc-0-005031": "미토콘드리아",
173
+ "mrc-0-005154": "아케디아이다.",
174
+ "mrc-0-000477": "에마뉘엘의 과시하는 듯한 신앙심",
175
+ "mrc-0-002513": "소비자들의 반응",
176
+ "mrc-0-003022": "골룸바노",
177
+ "mrc-1-000946": "다케다",
178
+ "mrc-0-001240": "악당",
179
+ "mrc-0-001110": "유이엔",
180
+ "mrc-0-003149": "트레뻬제",
181
+ "mrc-0-004132": "포스카르네트",
182
+ "mrc-0-005300": "면세증",
183
+ "mrc-0-004203": "숙의 정씨",
184
+ "mrc-0-003566": "남성연대 홈페이지",
185
+ "mrc-0-002926": "1895년",
186
+ "mrc-0-000536": "《천체의 회전에 관하여》",
187
+ "mrc-0-003848": "히로시마",
188
+ "mrc-0-003760": "5·16 광장(여의도 광장)",
189
+ "mrc-0-001989": "6m",
190
+ "mrc-0-004863": "이등공송덕비건의소",
191
+ "mrc-1-000714": "헬리오시스",
192
+ "mrc-0-003211": "할머니",
193
+ "mrc-1-001173": "탄광 환경 문제",
194
+ "mrc-0-005360": "1796년",
195
+ "mrc-1-000096": "피자헛",
196
+ "mrc-0-003467": "가산",
197
+ "mrc-1-000593": "마거릿 대처 전 영국수상",
198
+ "mrc-0-001001": "디지털 컴퓨터가 등장",
199
+ "mrc-1-001567": "35년간",
200
+ "mrc-1-000132": "문치미르가 승계받았다.",
201
+ "mrc-0-000787": "1947년",
202
+ "mrc-0-001494": "아이치 전기 전용선",
203
+ "mrc-0-003146": "'달빛 정원'(Moonlight Garden)",
204
+ "mrc-0-004411": "란다우 튜브",
205
+ "mrc-1-001622": "우천",
206
+ "mrc-0-002887": "<젊은 근위대>",
207
+ "mrc-0-003437": "10만필",
208
+ "mrc-1-001121": "가능한 다양한 미래 시나리오의 도전을 해결할 수있는 군대",
209
+ "mrc-0-005286": "차고나라 전투",
210
+ "mrc-1-000295": "봅슬레이",
211
+ "mrc-0-000159": "일등병 토머스",
212
+ "mrc-0-004725": "1956년",
213
+ "mrc-0-004837": "클레몽트와 사랑에 빠졌다. 클레몽트가 결혼 허락을 구하나, 조르댕은 귀족이 아니란 이유로 거절한다.",
214
+ "mrc-1-000507": "17세기",
215
+ "mrc-0-001719": "'회', 과의 기능을 하는 옆에 달린 칼날은 '원'라고 불린다.",
216
+ "mrc-0-001253": "수입 금지 조치",
217
+ "mrc-1-001270": "\"광휘에의 각성\"",
218
+ "mrc-0-005324": "카바레나 클럽",
219
+ "mrc-0-005105": "보수",
220
+ "mrc-1-000983": "리퍼",
221
+ "mrc-0-002718": "국무회의",
222
+ "mrc-0-001552": "비단",
223
+ "mrc-0-003752": "사흘간",
224
+ "mrc-0-004530": "대과 급제자",
225
+ "mrc-0-003057": "한신 난바 선의 역 통로를 지나갈 수 있다.",
226
+ "mrc-0-003850": "태조 왕건",
227
+ "mrc-0-003262": "빈자의 십자군",
228
+ "mrc-0-001650": "멕시코",
229
+ "mrc-0-004662": "지질 과산화 생성물",
230
+ "mrc-0-003759": "독감",
231
+ "mrc-1-001279": "양당(楊黨)",
232
+ "mrc-0-001960": "슈바이츠 암 존탁",
233
+ "mrc-0-001162": "앨리스 페어살 스미스",
234
+ "mrc-0-004565": "다수결",
235
+ "mrc-0-000754": "코프먼과 저스티슨",
236
+ "mrc-1-000024": "물적 성과",
237
+ "mrc-0-000484": "제 양왕",
238
+ "mrc-0-002095": "'일곱 개의 신전 광장'",
239
+ "mrc-0-003083": "미나미 지로 총독",
240
+ "mrc-0-002978": "200,000명",
241
+ "mrc-1-000622": "블랑키주의"
242
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4586e5fe1fd041b9e0356944c7fbacb30bb807b08b0347fee0959e764f434996
3
+ size 1342607991
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "[CLS]", "eos_token": "[SEP]", "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "do_basic_tokenize": true, "never_split": null, "bos_token": "[CLS]", "eos_token": "[SEP]", "model_max_length": 512, "tokenizer_class": "BertTokenizer", "special_tokens_map_file": "/opt/ml/.cache/huggingface/transformers/1a24ab4628028ed80dea35ce3334a636dc656fd9a17a09bad377f88f0cbecdac.70c17d6e4d492c8f24f5bb97ab56c7f272e947112c6faf9dd846da42ba13eb23", "name_or_path": "klue/roberta-large"}
train_results.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 10.0,
3
+ "init_mem_cpu_alloc_delta": 955506688,
4
+ "init_mem_cpu_peaked_delta": 1337384960,
5
+ "init_mem_gpu_alloc_delta": 1343489024,
6
+ "init_mem_gpu_peaked_delta": 0,
7
+ "train_mem_cpu_alloc_delta": 391798784,
8
+ "train_mem_cpu_peaked_delta": 448819200,
9
+ "train_mem_gpu_alloc_delta": 4055639040,
10
+ "train_mem_gpu_peaked_delta": 10974608384,
11
+ "train_runtime": 17438.5747,
12
+ "train_samples": 17427,
13
+ "train_samples_per_second": 1.25
14
+ }
train_results.txt ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch = 10.0
2
+ init_mem_cpu_alloc_delta = 955506688
3
+ init_mem_cpu_peaked_delta = 1337384960
4
+ init_mem_gpu_alloc_delta = 1343489024
5
+ init_mem_gpu_peaked_delta = 0
6
+ train_mem_cpu_alloc_delta = 391798784
7
+ train_mem_cpu_peaked_delta = 448819200
8
+ train_mem_gpu_alloc_delta = 4055639040
9
+ train_mem_gpu_peaked_delta = 10974608384
10
+ train_runtime = 17438.5747
11
+ train_samples = 17427
12
+ train_samples_per_second = 1.25
trainer_state.json ADDED
@@ -0,0 +1,341 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 10.0,
5
+ "global_step": 21790,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.23,
12
+ "learning_rate": 4.885268471776045e-06,
13
+ "loss": 1.4166,
14
+ "step": 500
15
+ },
16
+ {
17
+ "epoch": 0.46,
18
+ "learning_rate": 4.770536943552089e-06,
19
+ "loss": 0.6545,
20
+ "step": 1000
21
+ },
22
+ {
23
+ "epoch": 0.69,
24
+ "learning_rate": 4.655805415328132e-06,
25
+ "loss": 0.5315,
26
+ "step": 1500
27
+ },
28
+ {
29
+ "epoch": 0.92,
30
+ "learning_rate": 4.541073887104177e-06,
31
+ "loss": 0.4715,
32
+ "step": 2000
33
+ },
34
+ {
35
+ "epoch": 1.0,
36
+ "exact_match": 67.5,
37
+ "f1": 75.87103174603179,
38
+ "step": 2179
39
+ },
40
+ {
41
+ "epoch": 1.15,
42
+ "learning_rate": 4.4263423588802205e-06,
43
+ "loss": 0.3414,
44
+ "step": 2500
45
+ },
46
+ {
47
+ "epoch": 1.38,
48
+ "learning_rate": 4.311610830656265e-06,
49
+ "loss": 0.2945,
50
+ "step": 3000
51
+ },
52
+ {
53
+ "epoch": 1.61,
54
+ "learning_rate": 4.196879302432309e-06,
55
+ "loss": 0.3019,
56
+ "step": 3500
57
+ },
58
+ {
59
+ "epoch": 1.84,
60
+ "learning_rate": 4.0821477742083525e-06,
61
+ "loss": 0.3033,
62
+ "step": 4000
63
+ },
64
+ {
65
+ "epoch": 2.0,
66
+ "exact_match": 68.33333333333333,
67
+ "f1": 76.34986772486776,
68
+ "step": 4358
69
+ },
70
+ {
71
+ "epoch": 2.07,
72
+ "learning_rate": 3.967416245984397e-06,
73
+ "loss": 0.2622,
74
+ "step": 4500
75
+ },
76
+ {
77
+ "epoch": 2.29,
78
+ "learning_rate": 3.852684717760441e-06,
79
+ "loss": 0.1801,
80
+ "step": 5000
81
+ },
82
+ {
83
+ "epoch": 2.52,
84
+ "learning_rate": 3.7379531895364852e-06,
85
+ "loss": 0.1697,
86
+ "step": 5500
87
+ },
88
+ {
89
+ "epoch": 2.75,
90
+ "learning_rate": 3.623221661312529e-06,
91
+ "loss": 0.2118,
92
+ "step": 6000
93
+ },
94
+ {
95
+ "epoch": 2.98,
96
+ "learning_rate": 3.508490133088573e-06,
97
+ "loss": 0.1559,
98
+ "step": 6500
99
+ },
100
+ {
101
+ "epoch": 3.0,
102
+ "exact_match": 67.91666666666667,
103
+ "f1": 76.26147001147002,
104
+ "step": 6537
105
+ },
106
+ {
107
+ "epoch": 3.21,
108
+ "learning_rate": 3.393758604864617e-06,
109
+ "loss": 0.1068,
110
+ "step": 7000
111
+ },
112
+ {
113
+ "epoch": 3.44,
114
+ "learning_rate": 3.2790270766406612e-06,
115
+ "loss": 0.0951,
116
+ "step": 7500
117
+ },
118
+ {
119
+ "epoch": 3.67,
120
+ "learning_rate": 3.1642955484167054e-06,
121
+ "loss": 0.0986,
122
+ "step": 8000
123
+ },
124
+ {
125
+ "epoch": 3.9,
126
+ "learning_rate": 3.049564020192749e-06,
127
+ "loss": 0.1122,
128
+ "step": 8500
129
+ },
130
+ {
131
+ "epoch": 4.0,
132
+ "exact_match": 67.5,
133
+ "f1": 74.74419793169795,
134
+ "step": 8716
135
+ },
136
+ {
137
+ "epoch": 4.13,
138
+ "learning_rate": 2.934832491968793e-06,
139
+ "loss": 0.0811,
140
+ "step": 9000
141
+ },
142
+ {
143
+ "epoch": 4.36,
144
+ "learning_rate": 2.8201009637448373e-06,
145
+ "loss": 0.0546,
146
+ "step": 9500
147
+ },
148
+ {
149
+ "epoch": 4.59,
150
+ "learning_rate": 2.7053694355208814e-06,
151
+ "loss": 0.056,
152
+ "step": 10000
153
+ },
154
+ {
155
+ "epoch": 4.82,
156
+ "learning_rate": 2.5906379072969255e-06,
157
+ "loss": 0.0731,
158
+ "step": 10500
159
+ },
160
+ {
161
+ "epoch": 5.0,
162
+ "exact_match": 70.41666666666667,
163
+ "f1": 77.59175084175087,
164
+ "step": 10895
165
+ },
166
+ {
167
+ "epoch": 5.05,
168
+ "learning_rate": 2.4759063790729696e-06,
169
+ "loss": 0.0687,
170
+ "step": 11000
171
+ },
172
+ {
173
+ "epoch": 5.28,
174
+ "learning_rate": 2.3611748508490133e-06,
175
+ "loss": 0.0471,
176
+ "step": 11500
177
+ },
178
+ {
179
+ "epoch": 5.51,
180
+ "learning_rate": 2.2464433226250574e-06,
181
+ "loss": 0.0426,
182
+ "step": 12000
183
+ },
184
+ {
185
+ "epoch": 5.74,
186
+ "learning_rate": 2.1317117944011015e-06,
187
+ "loss": 0.0437,
188
+ "step": 12500
189
+ },
190
+ {
191
+ "epoch": 5.97,
192
+ "learning_rate": 2.0169802661771456e-06,
193
+ "loss": 0.0299,
194
+ "step": 13000
195
+ },
196
+ {
197
+ "epoch": 6.0,
198
+ "exact_match": 66.66666666666667,
199
+ "f1": 74.826330804272,
200
+ "step": 13074
201
+ },
202
+ {
203
+ "epoch": 6.2,
204
+ "learning_rate": 1.9022487379531897e-06,
205
+ "loss": 0.0232,
206
+ "step": 13500
207
+ },
208
+ {
209
+ "epoch": 6.42,
210
+ "learning_rate": 1.7875172097292336e-06,
211
+ "loss": 0.0221,
212
+ "step": 14000
213
+ },
214
+ {
215
+ "epoch": 6.65,
216
+ "learning_rate": 1.672785681505278e-06,
217
+ "loss": 0.0242,
218
+ "step": 14500
219
+ },
220
+ {
221
+ "epoch": 6.88,
222
+ "learning_rate": 1.5580541532813219e-06,
223
+ "loss": 0.0327,
224
+ "step": 15000
225
+ },
226
+ {
227
+ "epoch": 7.0,
228
+ "exact_match": 67.08333333333333,
229
+ "f1": 75.72919876963996,
230
+ "step": 15253
231
+ },
232
+ {
233
+ "epoch": 7.11,
234
+ "learning_rate": 1.443322625057366e-06,
235
+ "loss": 0.0207,
236
+ "step": 15500
237
+ },
238
+ {
239
+ "epoch": 7.34,
240
+ "learning_rate": 1.3285910968334099e-06,
241
+ "loss": 0.0243,
242
+ "step": 16000
243
+ },
244
+ {
245
+ "epoch": 7.57,
246
+ "learning_rate": 1.213859568609454e-06,
247
+ "loss": 0.0127,
248
+ "step": 16500
249
+ },
250
+ {
251
+ "epoch": 7.8,
252
+ "learning_rate": 1.099128040385498e-06,
253
+ "loss": 0.0189,
254
+ "step": 17000
255
+ },
256
+ {
257
+ "epoch": 8.0,
258
+ "exact_match": 66.25,
259
+ "f1": 73.67448496492618,
260
+ "step": 17432
261
+ },
262
+ {
263
+ "epoch": 8.03,
264
+ "learning_rate": 9.84396512161542e-07,
265
+ "loss": 0.0234,
266
+ "step": 17500
267
+ },
268
+ {
269
+ "epoch": 8.26,
270
+ "learning_rate": 8.696649839375861e-07,
271
+ "loss": 0.0041,
272
+ "step": 18000
273
+ },
274
+ {
275
+ "epoch": 8.49,
276
+ "learning_rate": 7.549334557136302e-07,
277
+ "loss": 0.0115,
278
+ "step": 18500
279
+ },
280
+ {
281
+ "epoch": 8.72,
282
+ "learning_rate": 6.402019274896742e-07,
283
+ "loss": 0.0159,
284
+ "step": 19000
285
+ },
286
+ {
287
+ "epoch": 8.95,
288
+ "learning_rate": 5.254703992657182e-07,
289
+ "loss": 0.0116,
290
+ "step": 19500
291
+ },
292
+ {
293
+ "epoch": 9.0,
294
+ "exact_match": 69.16666666666667,
295
+ "f1": 77.33799506593625,
296
+ "step": 19611
297
+ },
298
+ {
299
+ "epoch": 9.18,
300
+ "learning_rate": 4.107388710417623e-07,
301
+ "loss": 0.0096,
302
+ "step": 20000
303
+ },
304
+ {
305
+ "epoch": 9.41,
306
+ "learning_rate": 2.9600734281780635e-07,
307
+ "loss": 0.0057,
308
+ "step": 20500
309
+ },
310
+ {
311
+ "epoch": 9.64,
312
+ "learning_rate": 1.8127581459385043e-07,
313
+ "loss": 0.005,
314
+ "step": 21000
315
+ },
316
+ {
317
+ "epoch": 9.87,
318
+ "learning_rate": 6.654428636989445e-08,
319
+ "loss": 0.0042,
320
+ "step": 21500
321
+ },
322
+ {
323
+ "epoch": 10.0,
324
+ "exact_match": 66.66666666666667,
325
+ "f1": 73.91331557875677,
326
+ "step": 21790
327
+ },
328
+ {
329
+ "epoch": 10.0,
330
+ "step": 21790,
331
+ "total_flos": 1.3475301465871872e+17,
332
+ "train_runtime": 17438.5747,
333
+ "train_samples_per_second": 1.25
334
+ }
335
+ ],
336
+ "max_steps": 21790,
337
+ "num_train_epochs": 10,
338
+ "total_flos": 1.3475301465871872e+17,
339
+ "trial_name": null,
340
+ "trial_params": null
341
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0dfd0a4787636edc929303bed05a4a1ab521eff925f1ec32039ce39afc930add
3
+ size 2351
vocab.txt ADDED
The diff for this file is too large to render. See raw diff