choco_9966 commited on
Commit
47adc1c
1 Parent(s): 3753c40

remove nia result

Browse files
EleutherAI/polyglot-ko-12.8b/result_2023-09-26 09:55:07.json DELETED
@@ -1,444 +0,0 @@
1
- {
2
- "results": {
3
- "harness|ko_arc_challenge|25": {
4
- "acc": 0.2858361774744027,
5
- "acc_stderr": 0.013203196088537365,
6
- "acc_norm": 0.33447098976109213,
7
- "acc_norm_stderr": 0.013787460322441374
8
- },
9
- "harness|ko_hellaswag|10": {
10
- "acc": 0.38548097988448515,
11
- "acc_stderr": 0.004857140410776749,
12
- "acc_norm": 0.5030870344552878,
13
- "acc_norm_stderr": 0.0049896863074845536
14
- },
15
- "harness|ko_mmlu_world_religions|5": {
16
- "acc": 0.30994152046783624,
17
- "acc_stderr": 0.03546976959393161,
18
- "acc_norm": 0.30994152046783624,
19
- "acc_norm_stderr": 0.03546976959393161
20
- },
21
- "harness|ko_mmlu_management|5": {
22
- "acc": 0.17475728155339806,
23
- "acc_stderr": 0.03760178006026621,
24
- "acc_norm": 0.17475728155339806,
25
- "acc_norm_stderr": 0.03760178006026621
26
- },
27
- "harness|ko_mmlu_miscellaneous|5": {
28
- "acc": 0.25925925925925924,
29
- "acc_stderr": 0.015671006009339572,
30
- "acc_norm": 0.25925925925925924,
31
- "acc_norm_stderr": 0.015671006009339572
32
- },
33
- "harness|ko_mmlu_anatomy|5": {
34
- "acc": 0.22962962962962963,
35
- "acc_stderr": 0.036333844140734636,
36
- "acc_norm": 0.22962962962962963,
37
- "acc_norm_stderr": 0.036333844140734636
38
- },
39
- "harness|ko_mmlu_abstract_algebra|5": {
40
- "acc": 0.27,
41
- "acc_stderr": 0.0446196043338474,
42
- "acc_norm": 0.27,
43
- "acc_norm_stderr": 0.0446196043338474
44
- },
45
- "harness|ko_mmlu_conceptual_physics|5": {
46
- "acc": 0.2127659574468085,
47
- "acc_stderr": 0.026754391348039787,
48
- "acc_norm": 0.2127659574468085,
49
- "acc_norm_stderr": 0.026754391348039787
50
- },
51
- "harness|ko_mmlu_virology|5": {
52
- "acc": 0.22289156626506024,
53
- "acc_stderr": 0.03240004825594687,
54
- "acc_norm": 0.22289156626506024,
55
- "acc_norm_stderr": 0.03240004825594687
56
- },
57
- "harness|ko_mmlu_philosophy|5": {
58
- "acc": 0.31189710610932475,
59
- "acc_stderr": 0.02631185807185416,
60
- "acc_norm": 0.31189710610932475,
61
- "acc_norm_stderr": 0.02631185807185416
62
- },
63
- "harness|ko_mmlu_human_aging|5": {
64
- "acc": 0.20179372197309417,
65
- "acc_stderr": 0.02693611191280227,
66
- "acc_norm": 0.20179372197309417,
67
- "acc_norm_stderr": 0.02693611191280227
68
- },
69
- "harness|ko_mmlu_human_sexuality|5": {
70
- "acc": 0.2366412213740458,
71
- "acc_stderr": 0.03727673575596917,
72
- "acc_norm": 0.2366412213740458,
73
- "acc_norm_stderr": 0.03727673575596917
74
- },
75
- "harness|ko_mmlu_medical_genetics|5": {
76
- "acc": 0.23,
77
- "acc_stderr": 0.04229525846816506,
78
- "acc_norm": 0.23,
79
- "acc_norm_stderr": 0.04229525846816506
80
- },
81
- "harness|ko_mmlu_high_school_geography|5": {
82
- "acc": 0.2474747474747475,
83
- "acc_stderr": 0.030746300742124498,
84
- "acc_norm": 0.2474747474747475,
85
- "acc_norm_stderr": 0.030746300742124498
86
- },
87
- "harness|ko_mmlu_electrical_engineering|5": {
88
- "acc": 0.296551724137931,
89
- "acc_stderr": 0.038061426873099935,
90
- "acc_norm": 0.296551724137931,
91
- "acc_norm_stderr": 0.038061426873099935
92
- },
93
- "harness|ko_mmlu_college_physics|5": {
94
- "acc": 0.30392156862745096,
95
- "acc_stderr": 0.045766654032077615,
96
- "acc_norm": 0.30392156862745096,
97
- "acc_norm_stderr": 0.045766654032077615
98
- },
99
- "harness|ko_mmlu_high_school_microeconomics|5": {
100
- "acc": 0.23529411764705882,
101
- "acc_stderr": 0.027553614467863797,
102
- "acc_norm": 0.23529411764705882,
103
- "acc_norm_stderr": 0.027553614467863797
104
- },
105
- "harness|ko_mmlu_high_school_macroeconomics|5": {
106
- "acc": 0.21025641025641026,
107
- "acc_stderr": 0.020660597485026928,
108
- "acc_norm": 0.21025641025641026,
109
- "acc_norm_stderr": 0.020660597485026928
110
- },
111
- "harness|ko_mmlu_computer_security|5": {
112
- "acc": 0.26,
113
- "acc_stderr": 0.0440844002276808,
114
- "acc_norm": 0.26,
115
- "acc_norm_stderr": 0.0440844002276808
116
- },
117
- "harness|ko_mmlu_global_facts|5": {
118
- "acc": 0.21,
119
- "acc_stderr": 0.040936018074033256,
120
- "acc_norm": 0.21,
121
- "acc_norm_stderr": 0.040936018074033256
122
- },
123
- "harness|ko_mmlu_jurisprudence|5": {
124
- "acc": 0.25,
125
- "acc_stderr": 0.04186091791394607,
126
- "acc_norm": 0.25,
127
- "acc_norm_stderr": 0.04186091791394607
128
- },
129
- "harness|ko_mmlu_high_school_chemistry|5": {
130
- "acc": 0.26108374384236455,
131
- "acc_stderr": 0.0309037969521145,
132
- "acc_norm": 0.26108374384236455,
133
- "acc_norm_stderr": 0.0309037969521145
134
- },
135
- "harness|ko_mmlu_high_school_biology|5": {
136
- "acc": 0.25161290322580643,
137
- "acc_stderr": 0.024685979286239963,
138
- "acc_norm": 0.25161290322580643,
139
- "acc_norm_stderr": 0.024685979286239963
140
- },
141
- "harness|ko_mmlu_marketing|5": {
142
- "acc": 0.23076923076923078,
143
- "acc_stderr": 0.027601921381417604,
144
- "acc_norm": 0.23076923076923078,
145
- "acc_norm_stderr": 0.027601921381417604
146
- },
147
- "harness|ko_mmlu_clinical_knowledge|5": {
148
- "acc": 0.23773584905660378,
149
- "acc_stderr": 0.026199808807561932,
150
- "acc_norm": 0.23773584905660378,
151
- "acc_norm_stderr": 0.026199808807561932
152
- },
153
- "harness|ko_mmlu_public_relations|5": {
154
- "acc": 0.21818181818181817,
155
- "acc_stderr": 0.03955932861795833,
156
- "acc_norm": 0.21818181818181817,
157
- "acc_norm_stderr": 0.03955932861795833
158
- },
159
- "harness|ko_mmlu_high_school_mathematics|5": {
160
- "acc": 0.26296296296296295,
161
- "acc_stderr": 0.02684205787383371,
162
- "acc_norm": 0.26296296296296295,
163
- "acc_norm_stderr": 0.02684205787383371
164
- },
165
- "harness|ko_mmlu_high_school_physics|5": {
166
- "acc": 0.2847682119205298,
167
- "acc_stderr": 0.03684881521389024,
168
- "acc_norm": 0.2847682119205298,
169
- "acc_norm_stderr": 0.03684881521389024
170
- },
171
- "harness|ko_mmlu_sociology|5": {
172
- "acc": 0.263681592039801,
173
- "acc_stderr": 0.03115715086935554,
174
- "acc_norm": 0.263681592039801,
175
- "acc_norm_stderr": 0.03115715086935554
176
- },
177
- "harness|ko_mmlu_college_medicine|5": {
178
- "acc": 0.24855491329479767,
179
- "acc_stderr": 0.03295304696818317,
180
- "acc_norm": 0.24855491329479767,
181
- "acc_norm_stderr": 0.03295304696818317
182
- },
183
- "harness|ko_mmlu_elementary_mathematics|5": {
184
- "acc": 0.2698412698412698,
185
- "acc_stderr": 0.02286083830923207,
186
- "acc_norm": 0.2698412698412698,
187
- "acc_norm_stderr": 0.02286083830923207
188
- },
189
- "harness|ko_mmlu_college_biology|5": {
190
- "acc": 0.2569444444444444,
191
- "acc_stderr": 0.036539469694421,
192
- "acc_norm": 0.2569444444444444,
193
- "acc_norm_stderr": 0.036539469694421
194
- },
195
- "harness|ko_mmlu_college_chemistry|5": {
196
- "acc": 0.2,
197
- "acc_stderr": 0.04020151261036846,
198
- "acc_norm": 0.2,
199
- "acc_norm_stderr": 0.04020151261036846
200
- },
201
- "harness|ko_mmlu_us_foreign_policy|5": {
202
- "acc": 0.26,
203
- "acc_stderr": 0.04408440022768079,
204
- "acc_norm": 0.26,
205
- "acc_norm_stderr": 0.04408440022768079
206
- },
207
- "harness|ko_mmlu_moral_disputes|5": {
208
- "acc": 0.23121387283236994,
209
- "acc_stderr": 0.022698657167855716,
210
- "acc_norm": 0.23121387283236994,
211
- "acc_norm_stderr": 0.022698657167855716
212
- },
213
- "harness|ko_mmlu_logical_fallacies|5": {
214
- "acc": 0.2883435582822086,
215
- "acc_stderr": 0.035590395316173425,
216
- "acc_norm": 0.2883435582822086,
217
- "acc_norm_stderr": 0.035590395316173425
218
- },
219
- "harness|ko_mmlu_prehistory|5": {
220
- "acc": 0.2808641975308642,
221
- "acc_stderr": 0.025006469755799208,
222
- "acc_norm": 0.2808641975308642,
223
- "acc_norm_stderr": 0.025006469755799208
224
- },
225
- "harness|ko_mmlu_college_mathematics|5": {
226
- "acc": 0.33,
227
- "acc_stderr": 0.047258156262526045,
228
- "acc_norm": 0.33,
229
- "acc_norm_stderr": 0.047258156262526045
230
- },
231
- "harness|ko_mmlu_high_school_government_and_politics|5": {
232
- "acc": 0.2694300518134715,
233
- "acc_stderr": 0.03201867122877794,
234
- "acc_norm": 0.2694300518134715,
235
- "acc_norm_stderr": 0.03201867122877794
236
- },
237
- "harness|ko_mmlu_econometrics|5": {
238
- "acc": 0.23684210526315788,
239
- "acc_stderr": 0.03999423879281336,
240
- "acc_norm": 0.23684210526315788,
241
- "acc_norm_stderr": 0.03999423879281336
242
- },
243
- "harness|ko_mmlu_high_school_psychology|5": {
244
- "acc": 0.26605504587155965,
245
- "acc_stderr": 0.018946022322225593,
246
- "acc_norm": 0.26605504587155965,
247
- "acc_norm_stderr": 0.018946022322225593
248
- },
249
- "harness|ko_mmlu_formal_logic|5": {
250
- "acc": 0.19047619047619047,
251
- "acc_stderr": 0.035122074123020534,
252
- "acc_norm": 0.19047619047619047,
253
- "acc_norm_stderr": 0.035122074123020534
254
- },
255
- "harness|ko_mmlu_nutrition|5": {
256
- "acc": 0.2581699346405229,
257
- "acc_stderr": 0.02505850331695815,
258
- "acc_norm": 0.2581699346405229,
259
- "acc_norm_stderr": 0.02505850331695815
260
- },
261
- "harness|ko_mmlu_business_ethics|5": {
262
- "acc": 0.22,
263
- "acc_stderr": 0.041633319989322674,
264
- "acc_norm": 0.22,
265
- "acc_norm_stderr": 0.041633319989322674
266
- },
267
- "harness|ko_mmlu_international_law|5": {
268
- "acc": 0.36363636363636365,
269
- "acc_stderr": 0.043913262867240704,
270
- "acc_norm": 0.36363636363636365,
271
- "acc_norm_stderr": 0.043913262867240704
272
- },
273
- "harness|ko_mmlu_astronomy|5": {
274
- "acc": 0.2894736842105263,
275
- "acc_stderr": 0.036906779861372814,
276
- "acc_norm": 0.2894736842105263,
277
- "acc_norm_stderr": 0.036906779861372814
278
- },
279
- "harness|ko_mmlu_professional_psychology|5": {
280
- "acc": 0.20261437908496732,
281
- "acc_stderr": 0.01626105528374613,
282
- "acc_norm": 0.20261437908496732,
283
- "acc_norm_stderr": 0.01626105528374613
284
- },
285
- "harness|ko_mmlu_professional_accounting|5": {
286
- "acc": 0.22695035460992907,
287
- "acc_stderr": 0.02498710636564298,
288
- "acc_norm": 0.22695035460992907,
289
- "acc_norm_stderr": 0.02498710636564298
290
- },
291
- "harness|ko_mmlu_machine_learning|5": {
292
- "acc": 0.25,
293
- "acc_stderr": 0.04109974682633932,
294
- "acc_norm": 0.25,
295
- "acc_norm_stderr": 0.04109974682633932
296
- },
297
- "harness|ko_mmlu_high_school_statistics|5": {
298
- "acc": 0.3611111111111111,
299
- "acc_stderr": 0.03275773486100999,
300
- "acc_norm": 0.3611111111111111,
301
- "acc_norm_stderr": 0.03275773486100999
302
- },
303
- "harness|ko_mmlu_moral_scenarios|5": {
304
- "acc": 0.24692737430167597,
305
- "acc_stderr": 0.014422292204808852,
306
- "acc_norm": 0.24692737430167597,
307
- "acc_norm_stderr": 0.014422292204808852
308
- },
309
- "harness|ko_mmlu_college_computer_science|5": {
310
- "acc": 0.27,
311
- "acc_stderr": 0.04461960433384741,
312
- "acc_norm": 0.27,
313
- "acc_norm_stderr": 0.04461960433384741
314
- },
315
- "harness|ko_mmlu_high_school_computer_science|5": {
316
- "acc": 0.3,
317
- "acc_stderr": 0.046056618647183814,
318
- "acc_norm": 0.3,
319
- "acc_norm_stderr": 0.046056618647183814
320
- },
321
- "harness|ko_mmlu_professional_medicine|5": {
322
- "acc": 0.44485294117647056,
323
- "acc_stderr": 0.030187532060329383,
324
- "acc_norm": 0.44485294117647056,
325
- "acc_norm_stderr": 0.030187532060329383
326
- },
327
- "harness|ko_mmlu_security_studies|5": {
328
- "acc": 0.22857142857142856,
329
- "acc_stderr": 0.026882144922307748,
330
- "acc_norm": 0.22857142857142856,
331
- "acc_norm_stderr": 0.026882144922307748
332
- },
333
- "harness|ko_mmlu_high_school_world_history|5": {
334
- "acc": 0.32489451476793246,
335
- "acc_stderr": 0.030486039389105303,
336
- "acc_norm": 0.32489451476793246,
337
- "acc_norm_stderr": 0.030486039389105303
338
- },
339
- "harness|ko_mmlu_professional_law|5": {
340
- "acc": 0.2588005215123859,
341
- "acc_stderr": 0.011186109046564616,
342
- "acc_norm": 0.2588005215123859,
343
- "acc_norm_stderr": 0.011186109046564616
344
- },
345
- "harness|ko_mmlu_high_school_us_history|5": {
346
- "acc": 0.27941176470588236,
347
- "acc_stderr": 0.031493281045079556,
348
- "acc_norm": 0.27941176470588236,
349
- "acc_norm_stderr": 0.031493281045079556
350
- },
351
- "harness|ko_mmlu_high_school_european_history|5": {
352
- "acc": 0.23030303030303031,
353
- "acc_stderr": 0.032876667586034886,
354
- "acc_norm": 0.23030303030303031,
355
- "acc_norm_stderr": 0.032876667586034886
356
- },
357
- "harness|ko_truthfulqa_mc|0": {
358
- "mc1": 0.2350061199510404,
359
- "mc1_stderr": 0.014843061507731613,
360
- "mc2": 0.3906729756392404,
361
- "mc2_stderr": 0.014736032677919548
362
- },
363
- "harness|ko_commongen_v2|2": {
364
- "acc": 0.3105076741440378,
365
- "acc_stderr": 0.015908004528762017,
366
- "acc_norm": 0.39787485242030696,
367
- "acc_norm_stderr": 0.016827959054733388
368
- }
369
- },
370
- "versions": {
371
- "all": 0,
372
- "harness|ko_arc_challenge|25": 0,
373
- "harness|ko_hellaswag|10": 0,
374
- "harness|ko_mmlu_world_religions|5": 1,
375
- "harness|ko_mmlu_management|5": 1,
376
- "harness|ko_mmlu_miscellaneous|5": 1,
377
- "harness|ko_mmlu_anatomy|5": 1,
378
- "harness|ko_mmlu_abstract_algebra|5": 1,
379
- "harness|ko_mmlu_conceptual_physics|5": 1,
380
- "harness|ko_mmlu_virology|5": 1,
381
- "harness|ko_mmlu_philosophy|5": 1,
382
- "harness|ko_mmlu_human_aging|5": 1,
383
- "harness|ko_mmlu_human_sexuality|5": 1,
384
- "harness|ko_mmlu_medical_genetics|5": 1,
385
- "harness|ko_mmlu_high_school_geography|5": 1,
386
- "harness|ko_mmlu_electrical_engineering|5": 1,
387
- "harness|ko_mmlu_college_physics|5": 1,
388
- "harness|ko_mmlu_high_school_microeconomics|5": 1,
389
- "harness|ko_mmlu_high_school_macroeconomics|5": 1,
390
- "harness|ko_mmlu_computer_security|5": 1,
391
- "harness|ko_mmlu_global_facts|5": 1,
392
- "harness|ko_mmlu_jurisprudence|5": 1,
393
- "harness|ko_mmlu_high_school_chemistry|5": 1,
394
- "harness|ko_mmlu_high_school_biology|5": 1,
395
- "harness|ko_mmlu_marketing|5": 1,
396
- "harness|ko_mmlu_clinical_knowledge|5": 1,
397
- "harness|ko_mmlu_public_relations|5": 1,
398
- "harness|ko_mmlu_high_school_mathematics|5": 1,
399
- "harness|ko_mmlu_high_school_physics|5": 1,
400
- "harness|ko_mmlu_sociology|5": 1,
401
- "harness|ko_mmlu_college_medicine|5": 1,
402
- "harness|ko_mmlu_elementary_mathematics|5": 1,
403
- "harness|ko_mmlu_college_biology|5": 1,
404
- "harness|ko_mmlu_college_chemistry|5": 1,
405
- "harness|ko_mmlu_us_foreign_policy|5": 1,
406
- "harness|ko_mmlu_moral_disputes|5": 1,
407
- "harness|ko_mmlu_logical_fallacies|5": 1,
408
- "harness|ko_mmlu_prehistory|5": 1,
409
- "harness|ko_mmlu_college_mathematics|5": 1,
410
- "harness|ko_mmlu_high_school_government_and_politics|5": 1,
411
- "harness|ko_mmlu_econometrics|5": 1,
412
- "harness|ko_mmlu_high_school_psychology|5": 1,
413
- "harness|ko_mmlu_formal_logic|5": 1,
414
- "harness|ko_mmlu_nutrition|5": 1,
415
- "harness|ko_mmlu_business_ethics|5": 1,
416
- "harness|ko_mmlu_international_law|5": 1,
417
- "harness|ko_mmlu_astronomy|5": 1,
418
- "harness|ko_mmlu_professional_psychology|5": 1,
419
- "harness|ko_mmlu_professional_accounting|5": 1,
420
- "harness|ko_mmlu_machine_learning|5": 1,
421
- "harness|ko_mmlu_high_school_statistics|5": 1,
422
- "harness|ko_mmlu_moral_scenarios|5": 1,
423
- "harness|ko_mmlu_college_computer_science|5": 1,
424
- "harness|ko_mmlu_high_school_computer_science|5": 1,
425
- "harness|ko_mmlu_professional_medicine|5": 1,
426
- "harness|ko_mmlu_security_studies|5": 1,
427
- "harness|ko_mmlu_high_school_world_history|5": 1,
428
- "harness|ko_mmlu_professional_law|5": 1,
429
- "harness|ko_mmlu_high_school_us_history|5": 1,
430
- "harness|ko_mmlu_high_school_european_history|5": 1,
431
- "harness|ko_truthfulqa_mc|0": 0,
432
- "harness|ko_commongen_v2|2": 1
433
- },
434
- "config_general": {
435
- "model_name": "EleutherAI/polyglot-ko-12.8b",
436
- "model_sha": "09dfc839067bf44e7f52976eca8adbc17f04e1b0",
437
- "model_dtype": "torch.float16",
438
- "lighteval_sha": "",
439
- "num_few_shot_default": 0,
440
- "num_fewshot_seeds": 1,
441
- "override_batch_size": 1,
442
- "max_samples": null
443
- }
444
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
EleutherAI/polyglot-ko-5.8b/result_2023-09-24 15:21:38.json DELETED
@@ -1,444 +0,0 @@
1
- {
2
- "results": {
3
- "harness|ko_arc_challenge|25": {
4
- "acc": 0.2696245733788396,
5
- "acc_stderr": 0.012968040686869164,
6
- "acc_norm": 0.32764505119453924,
7
- "acc_norm_stderr": 0.013715847940719339
8
- },
9
- "harness|ko_hellaswag|10": {
10
- "acc": 0.36875124477195775,
11
- "acc_stderr": 0.004814803098436807,
12
- "acc_norm": 0.48157737502489545,
13
- "acc_norm_stderr": 0.004986393266269156
14
- },
15
- "harness|ko_mmlu_world_religions|5": {
16
- "acc": 0.17543859649122806,
17
- "acc_stderr": 0.029170885500727665,
18
- "acc_norm": 0.17543859649122806,
19
- "acc_norm_stderr": 0.029170885500727665
20
- },
21
- "harness|ko_mmlu_management|5": {
22
- "acc": 0.3592233009708738,
23
- "acc_stderr": 0.04750458399041693,
24
- "acc_norm": 0.3592233009708738,
25
- "acc_norm_stderr": 0.04750458399041693
26
- },
27
- "harness|ko_mmlu_miscellaneous|5": {
28
- "acc": 0.20178799489144317,
29
- "acc_stderr": 0.014351702181636861,
30
- "acc_norm": 0.20178799489144317,
31
- "acc_norm_stderr": 0.014351702181636861
32
- },
33
- "harness|ko_mmlu_anatomy|5": {
34
- "acc": 0.22962962962962963,
35
- "acc_stderr": 0.036333844140734636,
36
- "acc_norm": 0.22962962962962963,
37
- "acc_norm_stderr": 0.036333844140734636
38
- },
39
- "harness|ko_mmlu_abstract_algebra|5": {
40
- "acc": 0.22,
41
- "acc_stderr": 0.0416333199893227,
42
- "acc_norm": 0.22,
43
- "acc_norm_stderr": 0.0416333199893227
44
- },
45
- "harness|ko_mmlu_conceptual_physics|5": {
46
- "acc": 0.17446808510638298,
47
- "acc_stderr": 0.02480944233550398,
48
- "acc_norm": 0.17446808510638298,
49
- "acc_norm_stderr": 0.02480944233550398
50
- },
51
- "harness|ko_mmlu_virology|5": {
52
- "acc": 0.1927710843373494,
53
- "acc_stderr": 0.030709824050565264,
54
- "acc_norm": 0.1927710843373494,
55
- "acc_norm_stderr": 0.030709824050565264
56
- },
57
- "harness|ko_mmlu_philosophy|5": {
58
- "acc": 0.24115755627009647,
59
- "acc_stderr": 0.024296594034763426,
60
- "acc_norm": 0.24115755627009647,
61
- "acc_norm_stderr": 0.024296594034763426
62
- },
63
- "harness|ko_mmlu_human_aging|5": {
64
- "acc": 0.11210762331838565,
65
- "acc_stderr": 0.021174894206346103,
66
- "acc_norm": 0.11210762331838565,
67
- "acc_norm_stderr": 0.021174894206346103
68
- },
69
- "harness|ko_mmlu_human_sexuality|5": {
70
- "acc": 0.2748091603053435,
71
- "acc_stderr": 0.03915345408847835,
72
- "acc_norm": 0.2748091603053435,
73
- "acc_norm_stderr": 0.03915345408847835
74
- },
75
- "harness|ko_mmlu_medical_genetics|5": {
76
- "acc": 0.24,
77
- "acc_stderr": 0.04292346959909284,
78
- "acc_norm": 0.24,
79
- "acc_norm_stderr": 0.04292346959909284
80
- },
81
- "harness|ko_mmlu_high_school_geography|5": {
82
- "acc": 0.35353535353535354,
83
- "acc_stderr": 0.03406086723547153,
84
- "acc_norm": 0.35353535353535354,
85
- "acc_norm_stderr": 0.03406086723547153
86
- },
87
- "harness|ko_mmlu_electrical_engineering|5": {
88
- "acc": 0.2413793103448276,
89
- "acc_stderr": 0.03565998174135302,
90
- "acc_norm": 0.2413793103448276,
91
- "acc_norm_stderr": 0.03565998174135302
92
- },
93
- "harness|ko_mmlu_college_physics|5": {
94
- "acc": 0.37254901960784315,
95
- "acc_stderr": 0.048108401480826346,
96
- "acc_norm": 0.37254901960784315,
97
- "acc_norm_stderr": 0.048108401480826346
98
- },
99
- "harness|ko_mmlu_high_school_microeconomics|5": {
100
- "acc": 0.3487394957983193,
101
- "acc_stderr": 0.030956636328566548,
102
- "acc_norm": 0.3487394957983193,
103
- "acc_norm_stderr": 0.030956636328566548
104
- },
105
- "harness|ko_mmlu_high_school_macroeconomics|5": {
106
- "acc": 0.3641025641025641,
107
- "acc_stderr": 0.024396672985094778,
108
- "acc_norm": 0.3641025641025641,
109
- "acc_norm_stderr": 0.024396672985094778
110
- },
111
- "harness|ko_mmlu_computer_security|5": {
112
- "acc": 0.18,
113
- "acc_stderr": 0.03861229196653694,
114
- "acc_norm": 0.18,
115
- "acc_norm_stderr": 0.03861229196653694
116
- },
117
- "harness|ko_mmlu_global_facts|5": {
118
- "acc": 0.16,
119
- "acc_stderr": 0.03684529491774709,
120
- "acc_norm": 0.16,
121
- "acc_norm_stderr": 0.03684529491774709
122
- },
123
- "harness|ko_mmlu_jurisprudence|5": {
124
- "acc": 0.21296296296296297,
125
- "acc_stderr": 0.03957835471980981,
126
- "acc_norm": 0.21296296296296297,
127
- "acc_norm_stderr": 0.03957835471980981
128
- },
129
- "harness|ko_mmlu_high_school_chemistry|5": {
130
- "acc": 0.28078817733990147,
131
- "acc_stderr": 0.0316185633535861,
132
- "acc_norm": 0.28078817733990147,
133
- "acc_norm_stderr": 0.0316185633535861
134
- },
135
- "harness|ko_mmlu_high_school_biology|5": {
136
- "acc": 0.3161290322580645,
137
- "acc_stderr": 0.026450874489042764,
138
- "acc_norm": 0.3161290322580645,
139
- "acc_norm_stderr": 0.026450874489042764
140
- },
141
- "harness|ko_mmlu_marketing|5": {
142
- "acc": 0.19658119658119658,
143
- "acc_stderr": 0.02603538609895129,
144
- "acc_norm": 0.19658119658119658,
145
- "acc_norm_stderr": 0.02603538609895129
146
- },
147
- "harness|ko_mmlu_clinical_knowledge|5": {
148
- "acc": 0.2981132075471698,
149
- "acc_stderr": 0.028152837942493864,
150
- "acc_norm": 0.2981132075471698,
151
- "acc_norm_stderr": 0.028152837942493864
152
- },
153
- "harness|ko_mmlu_public_relations|5": {
154
- "acc": 0.22727272727272727,
155
- "acc_stderr": 0.040139645540727735,
156
- "acc_norm": 0.22727272727272727,
157
- "acc_norm_stderr": 0.040139645540727735
158
- },
159
- "harness|ko_mmlu_high_school_mathematics|5": {
160
- "acc": 0.26296296296296295,
161
- "acc_stderr": 0.02684205787383371,
162
- "acc_norm": 0.26296296296296295,
163
- "acc_norm_stderr": 0.02684205787383371
164
- },
165
- "harness|ko_mmlu_high_school_physics|5": {
166
- "acc": 0.33112582781456956,
167
- "acc_stderr": 0.038425817186598696,
168
- "acc_norm": 0.33112582781456956,
169
- "acc_norm_stderr": 0.038425817186598696
170
- },
171
- "harness|ko_mmlu_sociology|5": {
172
- "acc": 0.263681592039801,
173
- "acc_stderr": 0.03115715086935556,
174
- "acc_norm": 0.263681592039801,
175
- "acc_norm_stderr": 0.03115715086935556
176
- },
177
- "harness|ko_mmlu_college_medicine|5": {
178
- "acc": 0.3352601156069364,
179
- "acc_stderr": 0.03599586301247078,
180
- "acc_norm": 0.3352601156069364,
181
- "acc_norm_stderr": 0.03599586301247078
182
- },
183
- "harness|ko_mmlu_elementary_mathematics|5": {
184
- "acc": 0.2698412698412698,
185
- "acc_stderr": 0.022860838309232072,
186
- "acc_norm": 0.2698412698412698,
187
- "acc_norm_stderr": 0.022860838309232072
188
- },
189
- "harness|ko_mmlu_college_biology|5": {
190
- "acc": 0.2638888888888889,
191
- "acc_stderr": 0.03685651095897532,
192
- "acc_norm": 0.2638888888888889,
193
- "acc_norm_stderr": 0.03685651095897532
194
- },
195
- "harness|ko_mmlu_college_chemistry|5": {
196
- "acc": 0.41,
197
- "acc_stderr": 0.049431107042371025,
198
- "acc_norm": 0.41,
199
- "acc_norm_stderr": 0.049431107042371025
200
- },
201
- "harness|ko_mmlu_us_foreign_policy|5": {
202
- "acc": 0.26,
203
- "acc_stderr": 0.044084400227680794,
204
- "acc_norm": 0.26,
205
- "acc_norm_stderr": 0.044084400227680794
206
- },
207
- "harness|ko_mmlu_moral_disputes|5": {
208
- "acc": 0.2138728323699422,
209
- "acc_stderr": 0.022075709251757173,
210
- "acc_norm": 0.2138728323699422,
211
- "acc_norm_stderr": 0.022075709251757173
212
- },
213
- "harness|ko_mmlu_logical_fallacies|5": {
214
- "acc": 0.2331288343558282,
215
- "acc_stderr": 0.0332201579577674,
216
- "acc_norm": 0.2331288343558282,
217
- "acc_norm_stderr": 0.0332201579577674
218
- },
219
- "harness|ko_mmlu_prehistory|5": {
220
- "acc": 0.22530864197530864,
221
- "acc_stderr": 0.02324620264781975,
222
- "acc_norm": 0.22530864197530864,
223
- "acc_norm_stderr": 0.02324620264781975
224
- },
225
- "harness|ko_mmlu_college_mathematics|5": {
226
- "acc": 0.31,
227
- "acc_stderr": 0.04648231987117316,
228
- "acc_norm": 0.31,
229
- "acc_norm_stderr": 0.04648231987117316
230
- },
231
- "harness|ko_mmlu_high_school_government_and_politics|5": {
232
- "acc": 0.36787564766839376,
233
- "acc_stderr": 0.034801756684660366,
234
- "acc_norm": 0.36787564766839376,
235
- "acc_norm_stderr": 0.034801756684660366
236
- },
237
- "harness|ko_mmlu_econometrics|5": {
238
- "acc": 0.23684210526315788,
239
- "acc_stderr": 0.039994238792813365,
240
- "acc_norm": 0.23684210526315788,
241
- "acc_norm_stderr": 0.039994238792813365
242
- },
243
- "harness|ko_mmlu_high_school_psychology|5": {
244
- "acc": 0.3504587155963303,
245
- "acc_stderr": 0.02045607759982446,
246
- "acc_norm": 0.3504587155963303,
247
- "acc_norm_stderr": 0.02045607759982446
248
- },
249
- "harness|ko_mmlu_formal_logic|5": {
250
- "acc": 0.36507936507936506,
251
- "acc_stderr": 0.04306241259127153,
252
- "acc_norm": 0.36507936507936506,
253
- "acc_norm_stderr": 0.04306241259127153
254
- },
255
- "harness|ko_mmlu_nutrition|5": {
256
- "acc": 0.29411764705882354,
257
- "acc_stderr": 0.026090162504279053,
258
- "acc_norm": 0.29411764705882354,
259
- "acc_norm_stderr": 0.026090162504279053
260
- },
261
- "harness|ko_mmlu_business_ethics|5": {
262
- "acc": 0.21,
263
- "acc_stderr": 0.040936018074033256,
264
- "acc_norm": 0.21,
265
- "acc_norm_stderr": 0.040936018074033256
266
- },
267
- "harness|ko_mmlu_international_law|5": {
268
- "acc": 0.14049586776859505,
269
- "acc_stderr": 0.031722334260021585,
270
- "acc_norm": 0.14049586776859505,
271
- "acc_norm_stderr": 0.031722334260021585
272
- },
273
- "harness|ko_mmlu_astronomy|5": {
274
- "acc": 0.3355263157894737,
275
- "acc_stderr": 0.038424985593952694,
276
- "acc_norm": 0.3355263157894737,
277
- "acc_norm_stderr": 0.038424985593952694
278
- },
279
- "harness|ko_mmlu_professional_psychology|5": {
280
- "acc": 0.2173202614379085,
281
- "acc_stderr": 0.016684820929148598,
282
- "acc_norm": 0.2173202614379085,
283
- "acc_norm_stderr": 0.016684820929148598
284
- },
285
- "harness|ko_mmlu_professional_accounting|5": {
286
- "acc": 0.24113475177304963,
287
- "acc_stderr": 0.02551873104953776,
288
- "acc_norm": 0.24113475177304963,
289
- "acc_norm_stderr": 0.02551873104953776
290
- },
291
- "harness|ko_mmlu_machine_learning|5": {
292
- "acc": 0.16964285714285715,
293
- "acc_stderr": 0.03562367850095391,
294
- "acc_norm": 0.16964285714285715,
295
- "acc_norm_stderr": 0.03562367850095391
296
- },
297
- "harness|ko_mmlu_high_school_statistics|5": {
298
- "acc": 0.4722222222222222,
299
- "acc_stderr": 0.0340470532865388,
300
- "acc_norm": 0.4722222222222222,
301
- "acc_norm_stderr": 0.0340470532865388
302
- },
303
- "harness|ko_mmlu_moral_scenarios|5": {
304
- "acc": 0.27262569832402234,
305
- "acc_stderr": 0.014893391735249608,
306
- "acc_norm": 0.27262569832402234,
307
- "acc_norm_stderr": 0.014893391735249608
308
- },
309
- "harness|ko_mmlu_college_computer_science|5": {
310
- "acc": 0.33,
311
- "acc_stderr": 0.04725815626252604,
312
- "acc_norm": 0.33,
313
- "acc_norm_stderr": 0.04725815626252604
314
- },
315
- "harness|ko_mmlu_high_school_computer_science|5": {
316
- "acc": 0.18,
317
- "acc_stderr": 0.038612291966536934,
318
- "acc_norm": 0.18,
319
- "acc_norm_stderr": 0.038612291966536934
320
- },
321
- "harness|ko_mmlu_professional_medicine|5": {
322
- "acc": 0.4485294117647059,
323
- "acc_stderr": 0.030211479609121593,
324
- "acc_norm": 0.4485294117647059,
325
- "acc_norm_stderr": 0.030211479609121593
326
- },
327
- "harness|ko_mmlu_security_studies|5": {
328
- "acc": 0.37142857142857144,
329
- "acc_stderr": 0.030932858792789834,
330
- "acc_norm": 0.37142857142857144,
331
- "acc_norm_stderr": 0.030932858792789834
332
- },
333
- "harness|ko_mmlu_high_school_world_history|5": {
334
- "acc": 0.20253164556962025,
335
- "acc_stderr": 0.026160568246601464,
336
- "acc_norm": 0.20253164556962025,
337
- "acc_norm_stderr": 0.026160568246601464
338
- },
339
- "harness|ko_mmlu_professional_law|5": {
340
- "acc": 0.2438070404172099,
341
- "acc_stderr": 0.010966507972178475,
342
- "acc_norm": 0.2438070404172099,
343
- "acc_norm_stderr": 0.010966507972178475
344
- },
345
- "harness|ko_mmlu_high_school_us_history|5": {
346
- "acc": 0.25980392156862747,
347
- "acc_stderr": 0.030778554678693257,
348
- "acc_norm": 0.25980392156862747,
349
- "acc_norm_stderr": 0.030778554678693257
350
- },
351
- "harness|ko_mmlu_high_school_european_history|5": {
352
- "acc": 0.2727272727272727,
353
- "acc_stderr": 0.03477691162163659,
354
- "acc_norm": 0.2727272727272727,
355
- "acc_norm_stderr": 0.03477691162163659
356
- },
357
- "harness|ko_truthfulqa_mc|0": {
358
- "mc1": 0.2521419828641371,
359
- "mc1_stderr": 0.015201522246299953,
360
- "mc2": 0.3923990529544255,
361
- "mc2_stderr": 0.014649495364495559
362
- },
363
- "harness|ko_commongen_v2|2": {
364
- "acc": 0.2857142857142857,
365
- "acc_stderr": 0.01553162078698674,
366
- "acc_norm": 0.3565525383707202,
367
- "acc_norm_stderr": 0.016467706981527448
368
- }
369
- },
370
- "versions": {
371
- "all": 0,
372
- "harness|ko_arc_challenge|25": 0,
373
- "harness|ko_hellaswag|10": 0,
374
- "harness|ko_mmlu_world_religions|5": 1,
375
- "harness|ko_mmlu_management|5": 1,
376
- "harness|ko_mmlu_miscellaneous|5": 1,
377
- "harness|ko_mmlu_anatomy|5": 1,
378
- "harness|ko_mmlu_abstract_algebra|5": 1,
379
- "harness|ko_mmlu_conceptual_physics|5": 1,
380
- "harness|ko_mmlu_virology|5": 1,
381
- "harness|ko_mmlu_philosophy|5": 1,
382
- "harness|ko_mmlu_human_aging|5": 1,
383
- "harness|ko_mmlu_human_sexuality|5": 1,
384
- "harness|ko_mmlu_medical_genetics|5": 1,
385
- "harness|ko_mmlu_high_school_geography|5": 1,
386
- "harness|ko_mmlu_electrical_engineering|5": 1,
387
- "harness|ko_mmlu_college_physics|5": 1,
388
- "harness|ko_mmlu_high_school_microeconomics|5": 1,
389
- "harness|ko_mmlu_high_school_macroeconomics|5": 1,
390
- "harness|ko_mmlu_computer_security|5": 1,
391
- "harness|ko_mmlu_global_facts|5": 1,
392
- "harness|ko_mmlu_jurisprudence|5": 1,
393
- "harness|ko_mmlu_high_school_chemistry|5": 1,
394
- "harness|ko_mmlu_high_school_biology|5": 1,
395
- "harness|ko_mmlu_marketing|5": 1,
396
- "harness|ko_mmlu_clinical_knowledge|5": 1,
397
- "harness|ko_mmlu_public_relations|5": 1,
398
- "harness|ko_mmlu_high_school_mathematics|5": 1,
399
- "harness|ko_mmlu_high_school_physics|5": 1,
400
- "harness|ko_mmlu_sociology|5": 1,
401
- "harness|ko_mmlu_college_medicine|5": 1,
402
- "harness|ko_mmlu_elementary_mathematics|5": 1,
403
- "harness|ko_mmlu_college_biology|5": 1,
404
- "harness|ko_mmlu_college_chemistry|5": 1,
405
- "harness|ko_mmlu_us_foreign_policy|5": 1,
406
- "harness|ko_mmlu_moral_disputes|5": 1,
407
- "harness|ko_mmlu_logical_fallacies|5": 1,
408
- "harness|ko_mmlu_prehistory|5": 1,
409
- "harness|ko_mmlu_college_mathematics|5": 1,
410
- "harness|ko_mmlu_high_school_government_and_politics|5": 1,
411
- "harness|ko_mmlu_econometrics|5": 1,
412
- "harness|ko_mmlu_high_school_psychology|5": 1,
413
- "harness|ko_mmlu_formal_logic|5": 1,
414
- "harness|ko_mmlu_nutrition|5": 1,
415
- "harness|ko_mmlu_business_ethics|5": 1,
416
- "harness|ko_mmlu_international_law|5": 1,
417
- "harness|ko_mmlu_astronomy|5": 1,
418
- "harness|ko_mmlu_professional_psychology|5": 1,
419
- "harness|ko_mmlu_professional_accounting|5": 1,
420
- "harness|ko_mmlu_machine_learning|5": 1,
421
- "harness|ko_mmlu_high_school_statistics|5": 1,
422
- "harness|ko_mmlu_moral_scenarios|5": 1,
423
- "harness|ko_mmlu_college_computer_science|5": 1,
424
- "harness|ko_mmlu_high_school_computer_science|5": 1,
425
- "harness|ko_mmlu_professional_medicine|5": 1,
426
- "harness|ko_mmlu_security_studies|5": 1,
427
- "harness|ko_mmlu_high_school_world_history|5": 1,
428
- "harness|ko_mmlu_professional_law|5": 1,
429
- "harness|ko_mmlu_high_school_us_history|5": 1,
430
- "harness|ko_mmlu_high_school_european_history|5": 1,
431
- "harness|ko_truthfulqa_mc|0": 0,
432
- "harness|ko_commongen_v2|2": 1
433
- },
434
- "config_general": {
435
- "model_name": "EleutherAI/polyglot-ko-5.8b",
436
- "model_sha": "581a4c3eebfac23536b3c9676bcfb05c6a97baa2",
437
- "model_dtype": "torch.float16",
438
- "lighteval_sha": "",
439
- "num_few_shot_default": 0,
440
- "num_fewshot_seeds": 1,
441
- "override_batch_size": 1,
442
- "max_samples": null
443
- }
444
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
beomi/KoAlpaca-Polyglot-12.8B/result_2023-09-26 09:57:09.json DELETED
@@ -1,444 +0,0 @@
1
- {
2
- "results": {
3
- "harness|ko_arc_challenge|25": {
4
- "acc": 0.31569965870307165,
5
- "acc_stderr": 0.013582571095815291,
6
- "acc_norm": 0.3455631399317406,
7
- "acc_norm_stderr": 0.01389693846114568
8
- },
9
- "harness|ko_hellaswag|10": {
10
- "acc": 0.38717386974706236,
11
- "acc_stderr": 0.004861084534087029,
12
- "acc_norm": 0.498406691894045,
13
- "acc_norm_stderr": 0.004989756076956349
14
- },
15
- "harness|ko_mmlu_world_religions|5": {
16
- "acc": 0.28654970760233917,
17
- "acc_stderr": 0.034678266857038266,
18
- "acc_norm": 0.28654970760233917,
19
- "acc_norm_stderr": 0.034678266857038266
20
- },
21
- "harness|ko_mmlu_management|5": {
22
- "acc": 0.22330097087378642,
23
- "acc_stderr": 0.04123553189891431,
24
- "acc_norm": 0.22330097087378642,
25
- "acc_norm_stderr": 0.04123553189891431
26
- },
27
- "harness|ko_mmlu_miscellaneous|5": {
28
- "acc": 0.26947637292464877,
29
- "acc_stderr": 0.01586624307321505,
30
- "acc_norm": 0.26947637292464877,
31
- "acc_norm_stderr": 0.01586624307321505
32
- },
33
- "harness|ko_mmlu_anatomy|5": {
34
- "acc": 0.2518518518518518,
35
- "acc_stderr": 0.037498507091740234,
36
- "acc_norm": 0.2518518518518518,
37
- "acc_norm_stderr": 0.037498507091740234
38
- },
39
- "harness|ko_mmlu_abstract_algebra|5": {
40
- "acc": 0.3,
41
- "acc_stderr": 0.046056618647183814,
42
- "acc_norm": 0.3,
43
- "acc_norm_stderr": 0.046056618647183814
44
- },
45
- "harness|ko_mmlu_conceptual_physics|5": {
46
- "acc": 0.2765957446808511,
47
- "acc_stderr": 0.029241883869628827,
48
- "acc_norm": 0.2765957446808511,
49
- "acc_norm_stderr": 0.029241883869628827
50
- },
51
- "harness|ko_mmlu_virology|5": {
52
- "acc": 0.24096385542168675,
53
- "acc_stderr": 0.033293941190735296,
54
- "acc_norm": 0.24096385542168675,
55
- "acc_norm_stderr": 0.033293941190735296
56
- },
57
- "harness|ko_mmlu_philosophy|5": {
58
- "acc": 0.2990353697749196,
59
- "acc_stderr": 0.026003301117885135,
60
- "acc_norm": 0.2990353697749196,
61
- "acc_norm_stderr": 0.026003301117885135
62
- },
63
- "harness|ko_mmlu_human_aging|5": {
64
- "acc": 0.20179372197309417,
65
- "acc_stderr": 0.026936111912802277,
66
- "acc_norm": 0.20179372197309417,
67
- "acc_norm_stderr": 0.026936111912802277
68
- },
69
- "harness|ko_mmlu_human_sexuality|5": {
70
- "acc": 0.24427480916030533,
71
- "acc_stderr": 0.03768335959728743,
72
- "acc_norm": 0.24427480916030533,
73
- "acc_norm_stderr": 0.03768335959728743
74
- },
75
- "harness|ko_mmlu_medical_genetics|5": {
76
- "acc": 0.25,
77
- "acc_stderr": 0.04351941398892446,
78
- "acc_norm": 0.25,
79
- "acc_norm_stderr": 0.04351941398892446
80
- },
81
- "harness|ko_mmlu_high_school_geography|5": {
82
- "acc": 0.25252525252525254,
83
- "acc_stderr": 0.030954055470365897,
84
- "acc_norm": 0.25252525252525254,
85
- "acc_norm_stderr": 0.030954055470365897
86
- },
87
- "harness|ko_mmlu_electrical_engineering|5": {
88
- "acc": 0.21379310344827587,
89
- "acc_stderr": 0.03416520447747549,
90
- "acc_norm": 0.21379310344827587,
91
- "acc_norm_stderr": 0.03416520447747549
92
- },
93
- "harness|ko_mmlu_college_physics|5": {
94
- "acc": 0.16666666666666666,
95
- "acc_stderr": 0.03708284662416545,
96
- "acc_norm": 0.16666666666666666,
97
- "acc_norm_stderr": 0.03708284662416545
98
- },
99
- "harness|ko_mmlu_high_school_microeconomics|5": {
100
- "acc": 0.22268907563025211,
101
- "acc_stderr": 0.027025433498882374,
102
- "acc_norm": 0.22268907563025211,
103
- "acc_norm_stderr": 0.027025433498882374
104
- },
105
- "harness|ko_mmlu_high_school_macroeconomics|5": {
106
- "acc": 0.19743589743589743,
107
- "acc_stderr": 0.020182646968674844,
108
- "acc_norm": 0.19743589743589743,
109
- "acc_norm_stderr": 0.020182646968674844
110
- },
111
- "harness|ko_mmlu_computer_security|5": {
112
- "acc": 0.29,
113
- "acc_stderr": 0.045604802157206845,
114
- "acc_norm": 0.29,
115
- "acc_norm_stderr": 0.045604802157206845
116
- },
117
- "harness|ko_mmlu_global_facts|5": {
118
- "acc": 0.32,
119
- "acc_stderr": 0.04688261722621504,
120
- "acc_norm": 0.32,
121
- "acc_norm_stderr": 0.04688261722621504
122
- },
123
- "harness|ko_mmlu_jurisprudence|5": {
124
- "acc": 0.2037037037037037,
125
- "acc_stderr": 0.03893542518824847,
126
- "acc_norm": 0.2037037037037037,
127
- "acc_norm_stderr": 0.03893542518824847
128
- },
129
- "harness|ko_mmlu_high_school_chemistry|5": {
130
- "acc": 0.27586206896551724,
131
- "acc_stderr": 0.03144712581678243,
132
- "acc_norm": 0.27586206896551724,
133
- "acc_norm_stderr": 0.03144712581678243
134
- },
135
- "harness|ko_mmlu_high_school_biology|5": {
136
- "acc": 0.24838709677419354,
137
- "acc_stderr": 0.02458002892148101,
138
- "acc_norm": 0.24838709677419354,
139
- "acc_norm_stderr": 0.02458002892148101
140
- },
141
- "harness|ko_mmlu_marketing|5": {
142
- "acc": 0.31196581196581197,
143
- "acc_stderr": 0.03035152732334494,
144
- "acc_norm": 0.31196581196581197,
145
- "acc_norm_stderr": 0.03035152732334494
146
- },
147
- "harness|ko_mmlu_clinical_knowledge|5": {
148
- "acc": 0.2,
149
- "acc_stderr": 0.02461829819586651,
150
- "acc_norm": 0.2,
151
- "acc_norm_stderr": 0.02461829819586651
152
- },
153
- "harness|ko_mmlu_public_relations|5": {
154
- "acc": 0.24545454545454545,
155
- "acc_stderr": 0.041220665028782855,
156
- "acc_norm": 0.24545454545454545,
157
- "acc_norm_stderr": 0.041220665028782855
158
- },
159
- "harness|ko_mmlu_high_school_mathematics|5": {
160
- "acc": 0.2777777777777778,
161
- "acc_stderr": 0.027309140588230172,
162
- "acc_norm": 0.2777777777777778,
163
- "acc_norm_stderr": 0.027309140588230172
164
- },
165
- "harness|ko_mmlu_high_school_physics|5": {
166
- "acc": 0.271523178807947,
167
- "acc_stderr": 0.03631329803969653,
168
- "acc_norm": 0.271523178807947,
169
- "acc_norm_stderr": 0.03631329803969653
170
- },
171
- "harness|ko_mmlu_sociology|5": {
172
- "acc": 0.23880597014925373,
173
- "acc_stderr": 0.03014777593540922,
174
- "acc_norm": 0.23880597014925373,
175
- "acc_norm_stderr": 0.03014777593540922
176
- },
177
- "harness|ko_mmlu_college_medicine|5": {
178
- "acc": 0.2543352601156069,
179
- "acc_stderr": 0.0332055644308557,
180
- "acc_norm": 0.2543352601156069,
181
- "acc_norm_stderr": 0.0332055644308557
182
- },
183
- "harness|ko_mmlu_elementary_mathematics|5": {
184
- "acc": 0.24867724867724866,
185
- "acc_stderr": 0.022261817692400168,
186
- "acc_norm": 0.24867724867724866,
187
- "acc_norm_stderr": 0.022261817692400168
188
- },
189
- "harness|ko_mmlu_college_biology|5": {
190
- "acc": 0.2777777777777778,
191
- "acc_stderr": 0.037455547914624576,
192
- "acc_norm": 0.2777777777777778,
193
- "acc_norm_stderr": 0.037455547914624576
194
- },
195
- "harness|ko_mmlu_college_chemistry|5": {
196
- "acc": 0.17,
197
- "acc_stderr": 0.0377525168068637,
198
- "acc_norm": 0.17,
199
- "acc_norm_stderr": 0.0377525168068637
200
- },
201
- "harness|ko_mmlu_us_foreign_policy|5": {
202
- "acc": 0.33,
203
- "acc_stderr": 0.04725815626252604,
204
- "acc_norm": 0.33,
205
- "acc_norm_stderr": 0.04725815626252604
206
- },
207
- "harness|ko_mmlu_moral_disputes|5": {
208
- "acc": 0.26878612716763006,
209
- "acc_stderr": 0.02386800326250011,
210
- "acc_norm": 0.26878612716763006,
211
- "acc_norm_stderr": 0.02386800326250011
212
- },
213
- "harness|ko_mmlu_logical_fallacies|5": {
214
- "acc": 0.2822085889570552,
215
- "acc_stderr": 0.03536117886664743,
216
- "acc_norm": 0.2822085889570552,
217
- "acc_norm_stderr": 0.03536117886664743
218
- },
219
- "harness|ko_mmlu_prehistory|5": {
220
- "acc": 0.28703703703703703,
221
- "acc_stderr": 0.02517104191530968,
222
- "acc_norm": 0.28703703703703703,
223
- "acc_norm_stderr": 0.02517104191530968
224
- },
225
- "harness|ko_mmlu_college_mathematics|5": {
226
- "acc": 0.29,
227
- "acc_stderr": 0.04560480215720683,
228
- "acc_norm": 0.29,
229
- "acc_norm_stderr": 0.04560480215720683
230
- },
231
- "harness|ko_mmlu_high_school_government_and_politics|5": {
232
- "acc": 0.24870466321243523,
233
- "acc_stderr": 0.031195840877700286,
234
- "acc_norm": 0.24870466321243523,
235
- "acc_norm_stderr": 0.031195840877700286
236
- },
237
- "harness|ko_mmlu_econometrics|5": {
238
- "acc": 0.21052631578947367,
239
- "acc_stderr": 0.038351539543994194,
240
- "acc_norm": 0.21052631578947367,
241
- "acc_norm_stderr": 0.038351539543994194
242
- },
243
- "harness|ko_mmlu_high_school_psychology|5": {
244
- "acc": 0.23853211009174313,
245
- "acc_stderr": 0.01827257581023186,
246
- "acc_norm": 0.23853211009174313,
247
- "acc_norm_stderr": 0.01827257581023186
248
- },
249
- "harness|ko_mmlu_formal_logic|5": {
250
- "acc": 0.18253968253968253,
251
- "acc_stderr": 0.034550710191021496,
252
- "acc_norm": 0.18253968253968253,
253
- "acc_norm_stderr": 0.034550710191021496
254
- },
255
- "harness|ko_mmlu_nutrition|5": {
256
- "acc": 0.2549019607843137,
257
- "acc_stderr": 0.0249541843248799,
258
- "acc_norm": 0.2549019607843137,
259
- "acc_norm_stderr": 0.0249541843248799
260
- },
261
- "harness|ko_mmlu_business_ethics|5": {
262
- "acc": 0.3,
263
- "acc_stderr": 0.046056618647183814,
264
- "acc_norm": 0.3,
265
- "acc_norm_stderr": 0.046056618647183814
266
- },
267
- "harness|ko_mmlu_international_law|5": {
268
- "acc": 0.2809917355371901,
269
- "acc_stderr": 0.04103203830514512,
270
- "acc_norm": 0.2809917355371901,
271
- "acc_norm_stderr": 0.04103203830514512
272
- },
273
- "harness|ko_mmlu_astronomy|5": {
274
- "acc": 0.23684210526315788,
275
- "acc_stderr": 0.03459777606810535,
276
- "acc_norm": 0.23684210526315788,
277
- "acc_norm_stderr": 0.03459777606810535
278
- },
279
- "harness|ko_mmlu_professional_psychology|5": {
280
- "acc": 0.2679738562091503,
281
- "acc_stderr": 0.017917974069594726,
282
- "acc_norm": 0.2679738562091503,
283
- "acc_norm_stderr": 0.017917974069594726
284
- },
285
- "harness|ko_mmlu_professional_accounting|5": {
286
- "acc": 0.25886524822695034,
287
- "acc_stderr": 0.026129572527180848,
288
- "acc_norm": 0.25886524822695034,
289
- "acc_norm_stderr": 0.026129572527180848
290
- },
291
- "harness|ko_mmlu_machine_learning|5": {
292
- "acc": 0.30357142857142855,
293
- "acc_stderr": 0.04364226155841044,
294
- "acc_norm": 0.30357142857142855,
295
- "acc_norm_stderr": 0.04364226155841044
296
- },
297
- "harness|ko_mmlu_high_school_statistics|5": {
298
- "acc": 0.20833333333333334,
299
- "acc_stderr": 0.027696910713093936,
300
- "acc_norm": 0.20833333333333334,
301
- "acc_norm_stderr": 0.027696910713093936
302
- },
303
- "harness|ko_mmlu_moral_scenarios|5": {
304
- "acc": 0.2748603351955307,
305
- "acc_stderr": 0.014931316703220511,
306
- "acc_norm": 0.2748603351955307,
307
- "acc_norm_stderr": 0.014931316703220511
308
- },
309
- "harness|ko_mmlu_college_computer_science|5": {
310
- "acc": 0.24,
311
- "acc_stderr": 0.04292346959909283,
312
- "acc_norm": 0.24,
313
- "acc_norm_stderr": 0.04292346959909283
314
- },
315
- "harness|ko_mmlu_high_school_computer_science|5": {
316
- "acc": 0.31,
317
- "acc_stderr": 0.04648231987117316,
318
- "acc_norm": 0.31,
319
- "acc_norm_stderr": 0.04648231987117316
320
- },
321
- "harness|ko_mmlu_professional_medicine|5": {
322
- "acc": 0.17647058823529413,
323
- "acc_stderr": 0.02315746830855938,
324
- "acc_norm": 0.17647058823529413,
325
- "acc_norm_stderr": 0.02315746830855938
326
- },
327
- "harness|ko_mmlu_security_studies|5": {
328
- "acc": 0.23265306122448978,
329
- "acc_stderr": 0.02704925791589618,
330
- "acc_norm": 0.23265306122448978,
331
- "acc_norm_stderr": 0.02704925791589618
332
- },
333
- "harness|ko_mmlu_high_school_world_history|5": {
334
- "acc": 0.29957805907172996,
335
- "acc_stderr": 0.029818024749753102,
336
- "acc_norm": 0.29957805907172996,
337
- "acc_norm_stderr": 0.029818024749753102
338
- },
339
- "harness|ko_mmlu_professional_law|5": {
340
- "acc": 0.2561929595827901,
341
- "acc_stderr": 0.01114917315311058,
342
- "acc_norm": 0.2561929595827901,
343
- "acc_norm_stderr": 0.01114917315311058
344
- },
345
- "harness|ko_mmlu_high_school_us_history|5": {
346
- "acc": 0.2549019607843137,
347
- "acc_stderr": 0.030587591351604246,
348
- "acc_norm": 0.2549019607843137,
349
- "acc_norm_stderr": 0.030587591351604246
350
- },
351
- "harness|ko_mmlu_high_school_european_history|5": {
352
- "acc": 0.26666666666666666,
353
- "acc_stderr": 0.03453131801885415,
354
- "acc_norm": 0.26666666666666666,
355
- "acc_norm_stderr": 0.03453131801885415
356
- },
357
- "harness|ko_truthfulqa_mc|0": {
358
- "mc1": 0.24724602203182375,
359
- "mc1_stderr": 0.015102404797359649,
360
- "mc2": 0.41954301811910955,
361
- "mc2_stderr": 0.016024343371178577
362
- },
363
- "harness|ko_commongen_v2|2": {
364
- "acc": 0.2987012987012987,
365
- "acc_stderr": 0.015735657391438295,
366
- "acc_norm": 0.3482880755608028,
367
- "acc_norm_stderr": 0.016379926739148037
368
- }
369
- },
370
- "versions": {
371
- "all": 0,
372
- "harness|ko_arc_challenge|25": 0,
373
- "harness|ko_hellaswag|10": 0,
374
- "harness|ko_mmlu_world_religions|5": 1,
375
- "harness|ko_mmlu_management|5": 1,
376
- "harness|ko_mmlu_miscellaneous|5": 1,
377
- "harness|ko_mmlu_anatomy|5": 1,
378
- "harness|ko_mmlu_abstract_algebra|5": 1,
379
- "harness|ko_mmlu_conceptual_physics|5": 1,
380
- "harness|ko_mmlu_virology|5": 1,
381
- "harness|ko_mmlu_philosophy|5": 1,
382
- "harness|ko_mmlu_human_aging|5": 1,
383
- "harness|ko_mmlu_human_sexuality|5": 1,
384
- "harness|ko_mmlu_medical_genetics|5": 1,
385
- "harness|ko_mmlu_high_school_geography|5": 1,
386
- "harness|ko_mmlu_electrical_engineering|5": 1,
387
- "harness|ko_mmlu_college_physics|5": 1,
388
- "harness|ko_mmlu_high_school_microeconomics|5": 1,
389
- "harness|ko_mmlu_high_school_macroeconomics|5": 1,
390
- "harness|ko_mmlu_computer_security|5": 1,
391
- "harness|ko_mmlu_global_facts|5": 1,
392
- "harness|ko_mmlu_jurisprudence|5": 1,
393
- "harness|ko_mmlu_high_school_chemistry|5": 1,
394
- "harness|ko_mmlu_high_school_biology|5": 1,
395
- "harness|ko_mmlu_marketing|5": 1,
396
- "harness|ko_mmlu_clinical_knowledge|5": 1,
397
- "harness|ko_mmlu_public_relations|5": 1,
398
- "harness|ko_mmlu_high_school_mathematics|5": 1,
399
- "harness|ko_mmlu_high_school_physics|5": 1,
400
- "harness|ko_mmlu_sociology|5": 1,
401
- "harness|ko_mmlu_college_medicine|5": 1,
402
- "harness|ko_mmlu_elementary_mathematics|5": 1,
403
- "harness|ko_mmlu_college_biology|5": 1,
404
- "harness|ko_mmlu_college_chemistry|5": 1,
405
- "harness|ko_mmlu_us_foreign_policy|5": 1,
406
- "harness|ko_mmlu_moral_disputes|5": 1,
407
- "harness|ko_mmlu_logical_fallacies|5": 1,
408
- "harness|ko_mmlu_prehistory|5": 1,
409
- "harness|ko_mmlu_college_mathematics|5": 1,
410
- "harness|ko_mmlu_high_school_government_and_politics|5": 1,
411
- "harness|ko_mmlu_econometrics|5": 1,
412
- "harness|ko_mmlu_high_school_psychology|5": 1,
413
- "harness|ko_mmlu_formal_logic|5": 1,
414
- "harness|ko_mmlu_nutrition|5": 1,
415
- "harness|ko_mmlu_business_ethics|5": 1,
416
- "harness|ko_mmlu_international_law|5": 1,
417
- "harness|ko_mmlu_astronomy|5": 1,
418
- "harness|ko_mmlu_professional_psychology|5": 1,
419
- "harness|ko_mmlu_professional_accounting|5": 1,
420
- "harness|ko_mmlu_machine_learning|5": 1,
421
- "harness|ko_mmlu_high_school_statistics|5": 1,
422
- "harness|ko_mmlu_moral_scenarios|5": 1,
423
- "harness|ko_mmlu_college_computer_science|5": 1,
424
- "harness|ko_mmlu_high_school_computer_science|5": 1,
425
- "harness|ko_mmlu_professional_medicine|5": 1,
426
- "harness|ko_mmlu_security_studies|5": 1,
427
- "harness|ko_mmlu_high_school_world_history|5": 1,
428
- "harness|ko_mmlu_professional_law|5": 1,
429
- "harness|ko_mmlu_high_school_us_history|5": 1,
430
- "harness|ko_mmlu_high_school_european_history|5": 1,
431
- "harness|ko_truthfulqa_mc|0": 0,
432
- "harness|ko_commongen_v2|2": 1
433
- },
434
- "config_general": {
435
- "model_name": "beomi/KoAlpaca-Polyglot-12.8B",
436
- "model_sha": "5f225e9c5ae6c7238fc2316da0b8a9922019674d",
437
- "model_dtype": "torch.float16",
438
- "lighteval_sha": "",
439
- "num_few_shot_default": 0,
440
- "num_fewshot_seeds": 1,
441
- "override_batch_size": 1,
442
- "max_samples": null
443
- }
444
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
beomi/kollama-13b/result_2023-09-26 17:41:30.json DELETED
@@ -1,444 +0,0 @@
1
- {
2
- "results": {
3
- "harness|ko_arc_challenge|25": {
4
- "acc": 0.18515358361774745,
5
- "acc_stderr": 0.01135077443838972,
6
- "acc_norm": 0.24146757679180889,
7
- "acc_norm_stderr": 0.01250656483973943
8
- },
9
- "harness|ko_hellaswag|10": {
10
- "acc": 0.27235610436168095,
11
- "acc_stderr": 0.004442623590846325,
12
- "acc_norm": 0.2985461063533161,
13
- "acc_norm_stderr": 0.0045668504212538225
14
- },
15
- "harness|ko_mmlu_world_religions|5": {
16
- "acc": 0.26900584795321636,
17
- "acc_stderr": 0.0340105262010409,
18
- "acc_norm": 0.26900584795321636,
19
- "acc_norm_stderr": 0.0340105262010409
20
- },
21
- "harness|ko_mmlu_management|5": {
22
- "acc": 0.17475728155339806,
23
- "acc_stderr": 0.037601780060266196,
24
- "acc_norm": 0.17475728155339806,
25
- "acc_norm_stderr": 0.037601780060266196
26
- },
27
- "harness|ko_mmlu_miscellaneous|5": {
28
- "acc": 0.26053639846743293,
29
- "acc_stderr": 0.01569600856380709,
30
- "acc_norm": 0.26053639846743293,
31
- "acc_norm_stderr": 0.01569600856380709
32
- },
33
- "harness|ko_mmlu_anatomy|5": {
34
- "acc": 0.22962962962962963,
35
- "acc_stderr": 0.036333844140734636,
36
- "acc_norm": 0.22962962962962963,
37
- "acc_norm_stderr": 0.036333844140734636
38
- },
39
- "harness|ko_mmlu_abstract_algebra|5": {
40
- "acc": 0.3,
41
- "acc_stderr": 0.046056618647183814,
42
- "acc_norm": 0.3,
43
- "acc_norm_stderr": 0.046056618647183814
44
- },
45
- "harness|ko_mmlu_conceptual_physics|5": {
46
- "acc": 0.20425531914893616,
47
- "acc_stderr": 0.02635515841334941,
48
- "acc_norm": 0.20425531914893616,
49
- "acc_norm_stderr": 0.02635515841334941
50
- },
51
- "harness|ko_mmlu_virology|5": {
52
- "acc": 0.28313253012048195,
53
- "acc_stderr": 0.03507295431370518,
54
- "acc_norm": 0.28313253012048195,
55
- "acc_norm_stderr": 0.03507295431370518
56
- },
57
- "harness|ko_mmlu_philosophy|5": {
58
- "acc": 0.28938906752411575,
59
- "acc_stderr": 0.025755865922632938,
60
- "acc_norm": 0.28938906752411575,
61
- "acc_norm_stderr": 0.025755865922632938
62
- },
63
- "harness|ko_mmlu_human_aging|5": {
64
- "acc": 0.2825112107623318,
65
- "acc_stderr": 0.030216831011508773,
66
- "acc_norm": 0.2825112107623318,
67
- "acc_norm_stderr": 0.030216831011508773
68
- },
69
- "harness|ko_mmlu_human_sexuality|5": {
70
- "acc": 0.2595419847328244,
71
- "acc_stderr": 0.03844876139785271,
72
- "acc_norm": 0.2595419847328244,
73
- "acc_norm_stderr": 0.03844876139785271
74
- },
75
- "harness|ko_mmlu_medical_genetics|5": {
76
- "acc": 0.3,
77
- "acc_stderr": 0.046056618647183814,
78
- "acc_norm": 0.3,
79
- "acc_norm_stderr": 0.046056618647183814
80
- },
81
- "harness|ko_mmlu_high_school_geography|5": {
82
- "acc": 0.24242424242424243,
83
- "acc_stderr": 0.030532892233932036,
84
- "acc_norm": 0.24242424242424243,
85
- "acc_norm_stderr": 0.030532892233932036
86
- },
87
- "harness|ko_mmlu_electrical_engineering|5": {
88
- "acc": 0.25517241379310346,
89
- "acc_stderr": 0.03632984052707842,
90
- "acc_norm": 0.25517241379310346,
91
- "acc_norm_stderr": 0.03632984052707842
92
- },
93
- "harness|ko_mmlu_college_physics|5": {
94
- "acc": 0.21568627450980393,
95
- "acc_stderr": 0.04092563958237654,
96
- "acc_norm": 0.21568627450980393,
97
- "acc_norm_stderr": 0.04092563958237654
98
- },
99
- "harness|ko_mmlu_high_school_microeconomics|5": {
100
- "acc": 0.3487394957983193,
101
- "acc_stderr": 0.030956636328566548,
102
- "acc_norm": 0.3487394957983193,
103
- "acc_norm_stderr": 0.030956636328566548
104
- },
105
- "harness|ko_mmlu_high_school_macroeconomics|5": {
106
- "acc": 0.3641025641025641,
107
- "acc_stderr": 0.024396672985094778,
108
- "acc_norm": 0.3641025641025641,
109
- "acc_norm_stderr": 0.024396672985094778
110
- },
111
- "harness|ko_mmlu_computer_security|5": {
112
- "acc": 0.22,
113
- "acc_stderr": 0.0416333199893227,
114
- "acc_norm": 0.22,
115
- "acc_norm_stderr": 0.0416333199893227
116
- },
117
- "harness|ko_mmlu_global_facts|5": {
118
- "acc": 0.31,
119
- "acc_stderr": 0.04648231987117316,
120
- "acc_norm": 0.31,
121
- "acc_norm_stderr": 0.04648231987117316
122
- },
123
- "harness|ko_mmlu_jurisprudence|5": {
124
- "acc": 0.21296296296296297,
125
- "acc_stderr": 0.03957835471980981,
126
- "acc_norm": 0.21296296296296297,
127
- "acc_norm_stderr": 0.03957835471980981
128
- },
129
- "harness|ko_mmlu_high_school_chemistry|5": {
130
- "acc": 0.2512315270935961,
131
- "acc_stderr": 0.030516530732694433,
132
- "acc_norm": 0.2512315270935961,
133
- "acc_norm_stderr": 0.030516530732694433
134
- },
135
- "harness|ko_mmlu_high_school_biology|5": {
136
- "acc": 0.3161290322580645,
137
- "acc_stderr": 0.026450874489042764,
138
- "acc_norm": 0.3161290322580645,
139
- "acc_norm_stderr": 0.026450874489042764
140
- },
141
- "harness|ko_mmlu_marketing|5": {
142
- "acc": 0.19658119658119658,
143
- "acc_stderr": 0.02603538609895129,
144
- "acc_norm": 0.19658119658119658,
145
- "acc_norm_stderr": 0.02603538609895129
146
- },
147
- "harness|ko_mmlu_clinical_knowledge|5": {
148
- "acc": 0.23773584905660378,
149
- "acc_stderr": 0.02619980880756189,
150
- "acc_norm": 0.23773584905660378,
151
- "acc_norm_stderr": 0.02619980880756189
152
- },
153
- "harness|ko_mmlu_public_relations|5": {
154
- "acc": 0.21818181818181817,
155
- "acc_stderr": 0.03955932861795833,
156
- "acc_norm": 0.21818181818181817,
157
- "acc_norm_stderr": 0.03955932861795833
158
- },
159
- "harness|ko_mmlu_high_school_mathematics|5": {
160
- "acc": 0.27037037037037037,
161
- "acc_stderr": 0.027080372815145665,
162
- "acc_norm": 0.27037037037037037,
163
- "acc_norm_stderr": 0.027080372815145665
164
- },
165
- "harness|ko_mmlu_high_school_physics|5": {
166
- "acc": 0.33112582781456956,
167
- "acc_stderr": 0.038425817186598696,
168
- "acc_norm": 0.33112582781456956,
169
- "acc_norm_stderr": 0.038425817186598696
170
- },
171
- "harness|ko_mmlu_sociology|5": {
172
- "acc": 0.23383084577114427,
173
- "acc_stderr": 0.02992941540834839,
174
- "acc_norm": 0.23383084577114427,
175
- "acc_norm_stderr": 0.02992941540834839
176
- },
177
- "harness|ko_mmlu_college_medicine|5": {
178
- "acc": 0.24855491329479767,
179
- "acc_stderr": 0.03295304696818318,
180
- "acc_norm": 0.24855491329479767,
181
- "acc_norm_stderr": 0.03295304696818318
182
- },
183
- "harness|ko_mmlu_elementary_mathematics|5": {
184
- "acc": 0.2566137566137566,
185
- "acc_stderr": 0.022494510767503154,
186
- "acc_norm": 0.2566137566137566,
187
- "acc_norm_stderr": 0.022494510767503154
188
- },
189
- "harness|ko_mmlu_college_biology|5": {
190
- "acc": 0.22916666666666666,
191
- "acc_stderr": 0.03514697467862388,
192
- "acc_norm": 0.22916666666666666,
193
- "acc_norm_stderr": 0.03514697467862388
194
- },
195
- "harness|ko_mmlu_college_chemistry|5": {
196
- "acc": 0.27,
197
- "acc_stderr": 0.044619604333847394,
198
- "acc_norm": 0.27,
199
- "acc_norm_stderr": 0.044619604333847394
200
- },
201
- "harness|ko_mmlu_us_foreign_policy|5": {
202
- "acc": 0.26,
203
- "acc_stderr": 0.044084400227680794,
204
- "acc_norm": 0.26,
205
- "acc_norm_stderr": 0.044084400227680794
206
- },
207
- "harness|ko_mmlu_moral_disputes|5": {
208
- "acc": 0.24566473988439305,
209
- "acc_stderr": 0.02317629820399201,
210
- "acc_norm": 0.24566473988439305,
211
- "acc_norm_stderr": 0.02317629820399201
212
- },
213
- "harness|ko_mmlu_logical_fallacies|5": {
214
- "acc": 0.2331288343558282,
215
- "acc_stderr": 0.0332201579577674,
216
- "acc_norm": 0.2331288343558282,
217
- "acc_norm_stderr": 0.0332201579577674
218
- },
219
- "harness|ko_mmlu_prehistory|5": {
220
- "acc": 0.2191358024691358,
221
- "acc_stderr": 0.023016705640262185,
222
- "acc_norm": 0.2191358024691358,
223
- "acc_norm_stderr": 0.023016705640262185
224
- },
225
- "harness|ko_mmlu_college_mathematics|5": {
226
- "acc": 0.29,
227
- "acc_stderr": 0.045604802157206845,
228
- "acc_norm": 0.29,
229
- "acc_norm_stderr": 0.045604802157206845
230
- },
231
- "harness|ko_mmlu_high_school_government_and_politics|5": {
232
- "acc": 0.36787564766839376,
233
- "acc_stderr": 0.034801756684660366,
234
- "acc_norm": 0.36787564766839376,
235
- "acc_norm_stderr": 0.034801756684660366
236
- },
237
- "harness|ko_mmlu_econometrics|5": {
238
- "acc": 0.24561403508771928,
239
- "acc_stderr": 0.04049339297748141,
240
- "acc_norm": 0.24561403508771928,
241
- "acc_norm_stderr": 0.04049339297748141
242
- },
243
- "harness|ko_mmlu_high_school_psychology|5": {
244
- "acc": 0.24036697247706423,
245
- "acc_stderr": 0.01832060732096407,
246
- "acc_norm": 0.24036697247706423,
247
- "acc_norm_stderr": 0.01832060732096407
248
- },
249
- "harness|ko_mmlu_formal_logic|5": {
250
- "acc": 0.24603174603174602,
251
- "acc_stderr": 0.03852273364924314,
252
- "acc_norm": 0.24603174603174602,
253
- "acc_norm_stderr": 0.03852273364924314
254
- },
255
- "harness|ko_mmlu_nutrition|5": {
256
- "acc": 0.24183006535947713,
257
- "acc_stderr": 0.024518195641879334,
258
- "acc_norm": 0.24183006535947713,
259
- "acc_norm_stderr": 0.024518195641879334
260
- },
261
- "harness|ko_mmlu_business_ethics|5": {
262
- "acc": 0.2,
263
- "acc_stderr": 0.04020151261036846,
264
- "acc_norm": 0.2,
265
- "acc_norm_stderr": 0.04020151261036846
266
- },
267
- "harness|ko_mmlu_international_law|5": {
268
- "acc": 0.24793388429752067,
269
- "acc_stderr": 0.039418975265163025,
270
- "acc_norm": 0.24793388429752067,
271
- "acc_norm_stderr": 0.039418975265163025
272
- },
273
- "harness|ko_mmlu_astronomy|5": {
274
- "acc": 0.17763157894736842,
275
- "acc_stderr": 0.03110318238312338,
276
- "acc_norm": 0.17763157894736842,
277
- "acc_norm_stderr": 0.03110318238312338
278
- },
279
- "harness|ko_mmlu_professional_psychology|5": {
280
- "acc": 0.2173202614379085,
281
- "acc_stderr": 0.016684820929148598,
282
- "acc_norm": 0.2173202614379085,
283
- "acc_norm_stderr": 0.016684820929148598
284
- },
285
- "harness|ko_mmlu_professional_accounting|5": {
286
- "acc": 0.24822695035460993,
287
- "acc_stderr": 0.025770015644290396,
288
- "acc_norm": 0.24822695035460993,
289
- "acc_norm_stderr": 0.025770015644290396
290
- },
291
- "harness|ko_mmlu_machine_learning|5": {
292
- "acc": 0.16071428571428573,
293
- "acc_stderr": 0.0348594609647574,
294
- "acc_norm": 0.16071428571428573,
295
- "acc_norm_stderr": 0.0348594609647574
296
- },
297
- "harness|ko_mmlu_high_school_statistics|5": {
298
- "acc": 0.4722222222222222,
299
- "acc_stderr": 0.0340470532865388,
300
- "acc_norm": 0.4722222222222222,
301
- "acc_norm_stderr": 0.0340470532865388
302
- },
303
- "harness|ko_mmlu_moral_scenarios|5": {
304
- "acc": 0.23798882681564246,
305
- "acc_stderr": 0.014242630070574892,
306
- "acc_norm": 0.23798882681564246,
307
- "acc_norm_stderr": 0.014242630070574892
308
- },
309
- "harness|ko_mmlu_college_computer_science|5": {
310
- "acc": 0.33,
311
- "acc_stderr": 0.04725815626252604,
312
- "acc_norm": 0.33,
313
- "acc_norm_stderr": 0.04725815626252604
314
- },
315
- "harness|ko_mmlu_high_school_computer_science|5": {
316
- "acc": 0.28,
317
- "acc_stderr": 0.04512608598542127,
318
- "acc_norm": 0.28,
319
- "acc_norm_stderr": 0.04512608598542127
320
- },
321
- "harness|ko_mmlu_professional_medicine|5": {
322
- "acc": 0.4485294117647059,
323
- "acc_stderr": 0.030211479609121593,
324
- "acc_norm": 0.4485294117647059,
325
- "acc_norm_stderr": 0.030211479609121593
326
- },
327
- "harness|ko_mmlu_security_studies|5": {
328
- "acc": 0.3877551020408163,
329
- "acc_stderr": 0.03119223072679566,
330
- "acc_norm": 0.3877551020408163,
331
- "acc_norm_stderr": 0.03119223072679566
332
- },
333
- "harness|ko_mmlu_high_school_world_history|5": {
334
- "acc": 0.2616033755274262,
335
- "acc_stderr": 0.028609516716994934,
336
- "acc_norm": 0.2616033755274262,
337
- "acc_norm_stderr": 0.028609516716994934
338
- },
339
- "harness|ko_mmlu_professional_law|5": {
340
- "acc": 0.2457627118644068,
341
- "acc_stderr": 0.010996156635142692,
342
- "acc_norm": 0.2457627118644068,
343
- "acc_norm_stderr": 0.010996156635142692
344
- },
345
- "harness|ko_mmlu_high_school_us_history|5": {
346
- "acc": 0.25980392156862747,
347
- "acc_stderr": 0.030778554678693257,
348
- "acc_norm": 0.25980392156862747,
349
- "acc_norm_stderr": 0.030778554678693257
350
- },
351
- "harness|ko_mmlu_high_school_european_history|5": {
352
- "acc": 0.21818181818181817,
353
- "acc_stderr": 0.03225078108306289,
354
- "acc_norm": 0.21818181818181817,
355
- "acc_norm_stderr": 0.03225078108306289
356
- },
357
- "harness|ko_truthfulqa_mc|0": {
358
- "mc1": 0.24479804161566707,
359
- "mc1_stderr": 0.015051869486715021,
360
- "mc2": 0.47010869246857606,
361
- "mc2_stderr": 0.01615099583611709
362
- },
363
- "harness|ko_commongen_v2|2": {
364
- "acc": 0.2101534828807556,
365
- "acc_stderr": 0.014007301224897517,
366
- "acc_norm": 0.3707201889020071,
367
- "acc_norm_stderr": 0.01660580128921259
368
- }
369
- },
370
- "versions": {
371
- "all": 0,
372
- "harness|ko_arc_challenge|25": 0,
373
- "harness|ko_hellaswag|10": 0,
374
- "harness|ko_mmlu_world_religions|5": 1,
375
- "harness|ko_mmlu_management|5": 1,
376
- "harness|ko_mmlu_miscellaneous|5": 1,
377
- "harness|ko_mmlu_anatomy|5": 1,
378
- "harness|ko_mmlu_abstract_algebra|5": 1,
379
- "harness|ko_mmlu_conceptual_physics|5": 1,
380
- "harness|ko_mmlu_virology|5": 1,
381
- "harness|ko_mmlu_philosophy|5": 1,
382
- "harness|ko_mmlu_human_aging|5": 1,
383
- "harness|ko_mmlu_human_sexuality|5": 1,
384
- "harness|ko_mmlu_medical_genetics|5": 1,
385
- "harness|ko_mmlu_high_school_geography|5": 1,
386
- "harness|ko_mmlu_electrical_engineering|5": 1,
387
- "harness|ko_mmlu_college_physics|5": 1,
388
- "harness|ko_mmlu_high_school_microeconomics|5": 1,
389
- "harness|ko_mmlu_high_school_macroeconomics|5": 1,
390
- "harness|ko_mmlu_computer_security|5": 1,
391
- "harness|ko_mmlu_global_facts|5": 1,
392
- "harness|ko_mmlu_jurisprudence|5": 1,
393
- "harness|ko_mmlu_high_school_chemistry|5": 1,
394
- "harness|ko_mmlu_high_school_biology|5": 1,
395
- "harness|ko_mmlu_marketing|5": 1,
396
- "harness|ko_mmlu_clinical_knowledge|5": 1,
397
- "harness|ko_mmlu_public_relations|5": 1,
398
- "harness|ko_mmlu_high_school_mathematics|5": 1,
399
- "harness|ko_mmlu_high_school_physics|5": 1,
400
- "harness|ko_mmlu_sociology|5": 1,
401
- "harness|ko_mmlu_college_medicine|5": 1,
402
- "harness|ko_mmlu_elementary_mathematics|5": 1,
403
- "harness|ko_mmlu_college_biology|5": 1,
404
- "harness|ko_mmlu_college_chemistry|5": 1,
405
- "harness|ko_mmlu_us_foreign_policy|5": 1,
406
- "harness|ko_mmlu_moral_disputes|5": 1,
407
- "harness|ko_mmlu_logical_fallacies|5": 1,
408
- "harness|ko_mmlu_prehistory|5": 1,
409
- "harness|ko_mmlu_college_mathematics|5": 1,
410
- "harness|ko_mmlu_high_school_government_and_politics|5": 1,
411
- "harness|ko_mmlu_econometrics|5": 1,
412
- "harness|ko_mmlu_high_school_psychology|5": 1,
413
- "harness|ko_mmlu_formal_logic|5": 1,
414
- "harness|ko_mmlu_nutrition|5": 1,
415
- "harness|ko_mmlu_business_ethics|5": 1,
416
- "harness|ko_mmlu_international_law|5": 1,
417
- "harness|ko_mmlu_astronomy|5": 1,
418
- "harness|ko_mmlu_professional_psychology|5": 1,
419
- "harness|ko_mmlu_professional_accounting|5": 1,
420
- "harness|ko_mmlu_machine_learning|5": 1,
421
- "harness|ko_mmlu_high_school_statistics|5": 1,
422
- "harness|ko_mmlu_moral_scenarios|5": 1,
423
- "harness|ko_mmlu_college_computer_science|5": 1,
424
- "harness|ko_mmlu_high_school_computer_science|5": 1,
425
- "harness|ko_mmlu_professional_medicine|5": 1,
426
- "harness|ko_mmlu_security_studies|5": 1,
427
- "harness|ko_mmlu_high_school_world_history|5": 1,
428
- "harness|ko_mmlu_professional_law|5": 1,
429
- "harness|ko_mmlu_high_school_us_history|5": 1,
430
- "harness|ko_mmlu_high_school_european_history|5": 1,
431
- "harness|ko_truthfulqa_mc|0": 0,
432
- "harness|ko_commongen_v2|2": 1
433
- },
434
- "config_general": {
435
- "model_name": "beomi/kollama-13b",
436
- "model_sha": "d25ffb8c1a147e67c1bce0aca49a710395ce18ae",
437
- "model_dtype": "torch.float16",
438
- "lighteval_sha": "",
439
- "num_few_shot_default": 0,
440
- "num_fewshot_seeds": 1,
441
- "override_batch_size": 1,
442
- "max_samples": null
443
- }
444
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
huggyllama/llama-13b/result_2023-09-27 04:58:53.json DELETED
@@ -1,444 +0,0 @@
1
- {
2
- "results": {
3
- "harness|ko_arc_challenge|25": {
4
- "acc": 0.2090443686006826,
5
- "acc_stderr": 0.011882746987406451,
6
- "acc_norm": 0.257679180887372,
7
- "acc_norm_stderr": 0.012780770562768414
8
- },
9
- "harness|ko_hellaswag|10": {
10
- "acc": 0.3011352320254929,
11
- "acc_stderr": 0.004578137949298174,
12
- "acc_norm": 0.3565026887074288,
13
- "acc_norm_stderr": 0.004779872250633706
14
- },
15
- "harness|ko_mmlu_world_religions|5": {
16
- "acc": 0.3391812865497076,
17
- "acc_stderr": 0.03631053496488905,
18
- "acc_norm": 0.3391812865497076,
19
- "acc_norm_stderr": 0.03631053496488905
20
- },
21
- "harness|ko_mmlu_management|5": {
22
- "acc": 0.2815533980582524,
23
- "acc_stderr": 0.04453254836326467,
24
- "acc_norm": 0.2815533980582524,
25
- "acc_norm_stderr": 0.04453254836326467
26
- },
27
- "harness|ko_mmlu_miscellaneous|5": {
28
- "acc": 0.30779054916985954,
29
- "acc_stderr": 0.01650604504515563,
30
- "acc_norm": 0.30779054916985954,
31
- "acc_norm_stderr": 0.01650604504515563
32
- },
33
- "harness|ko_mmlu_anatomy|5": {
34
- "acc": 0.2814814814814815,
35
- "acc_stderr": 0.03885004245800254,
36
- "acc_norm": 0.2814814814814815,
37
- "acc_norm_stderr": 0.03885004245800254
38
- },
39
- "harness|ko_mmlu_abstract_algebra|5": {
40
- "acc": 0.29,
41
- "acc_stderr": 0.045604802157206845,
42
- "acc_norm": 0.29,
43
- "acc_norm_stderr": 0.045604802157206845
44
- },
45
- "harness|ko_mmlu_conceptual_physics|5": {
46
- "acc": 0.3148936170212766,
47
- "acc_stderr": 0.03036358219723816,
48
- "acc_norm": 0.3148936170212766,
49
- "acc_norm_stderr": 0.03036358219723816
50
- },
51
- "harness|ko_mmlu_virology|5": {
52
- "acc": 0.28313253012048195,
53
- "acc_stderr": 0.03507295431370519,
54
- "acc_norm": 0.28313253012048195,
55
- "acc_norm_stderr": 0.03507295431370519
56
- },
57
- "harness|ko_mmlu_philosophy|5": {
58
- "acc": 0.3536977491961415,
59
- "acc_stderr": 0.02715520810320086,
60
- "acc_norm": 0.3536977491961415,
61
- "acc_norm_stderr": 0.02715520810320086
62
- },
63
- "harness|ko_mmlu_human_aging|5": {
64
- "acc": 0.27802690582959644,
65
- "acc_stderr": 0.03006958487449405,
66
- "acc_norm": 0.27802690582959644,
67
- "acc_norm_stderr": 0.03006958487449405
68
- },
69
- "harness|ko_mmlu_human_sexuality|5": {
70
- "acc": 0.3053435114503817,
71
- "acc_stderr": 0.040393149787245605,
72
- "acc_norm": 0.3053435114503817,
73
- "acc_norm_stderr": 0.040393149787245605
74
- },
75
- "harness|ko_mmlu_medical_genetics|5": {
76
- "acc": 0.29,
77
- "acc_stderr": 0.04560480215720684,
78
- "acc_norm": 0.29,
79
- "acc_norm_stderr": 0.04560480215720684
80
- },
81
- "harness|ko_mmlu_high_school_geography|5": {
82
- "acc": 0.29292929292929293,
83
- "acc_stderr": 0.03242497958178815,
84
- "acc_norm": 0.29292929292929293,
85
- "acc_norm_stderr": 0.03242497958178815
86
- },
87
- "harness|ko_mmlu_electrical_engineering|5": {
88
- "acc": 0.32413793103448274,
89
- "acc_stderr": 0.039004320691855526,
90
- "acc_norm": 0.32413793103448274,
91
- "acc_norm_stderr": 0.039004320691855526
92
- },
93
- "harness|ko_mmlu_college_physics|5": {
94
- "acc": 0.19607843137254902,
95
- "acc_stderr": 0.03950581861179964,
96
- "acc_norm": 0.19607843137254902,
97
- "acc_norm_stderr": 0.03950581861179964
98
- },
99
- "harness|ko_mmlu_high_school_microeconomics|5": {
100
- "acc": 0.3235294117647059,
101
- "acc_stderr": 0.030388353551886838,
102
- "acc_norm": 0.3235294117647059,
103
- "acc_norm_stderr": 0.030388353551886838
104
- },
105
- "harness|ko_mmlu_high_school_macroeconomics|5": {
106
- "acc": 0.3333333333333333,
107
- "acc_stderr": 0.023901157979402538,
108
- "acc_norm": 0.3333333333333333,
109
- "acc_norm_stderr": 0.023901157979402538
110
- },
111
- "harness|ko_mmlu_computer_security|5": {
112
- "acc": 0.32,
113
- "acc_stderr": 0.04688261722621505,
114
- "acc_norm": 0.32,
115
- "acc_norm_stderr": 0.04688261722621505
116
- },
117
- "harness|ko_mmlu_global_facts|5": {
118
- "acc": 0.21,
119
- "acc_stderr": 0.040936018074033256,
120
- "acc_norm": 0.21,
121
- "acc_norm_stderr": 0.040936018074033256
122
- },
123
- "harness|ko_mmlu_jurisprudence|5": {
124
- "acc": 0.39814814814814814,
125
- "acc_stderr": 0.04732332615978814,
126
- "acc_norm": 0.39814814814814814,
127
- "acc_norm_stderr": 0.04732332615978814
128
- },
129
- "harness|ko_mmlu_high_school_chemistry|5": {
130
- "acc": 0.2660098522167488,
131
- "acc_stderr": 0.031089826002937523,
132
- "acc_norm": 0.2660098522167488,
133
- "acc_norm_stderr": 0.031089826002937523
134
- },
135
- "harness|ko_mmlu_high_school_biology|5": {
136
- "acc": 0.33548387096774196,
137
- "acc_stderr": 0.02686020644472434,
138
- "acc_norm": 0.33548387096774196,
139
- "acc_norm_stderr": 0.02686020644472434
140
- },
141
- "harness|ko_mmlu_marketing|5": {
142
- "acc": 0.4658119658119658,
143
- "acc_stderr": 0.03267942734081228,
144
- "acc_norm": 0.4658119658119658,
145
- "acc_norm_stderr": 0.03267942734081228
146
- },
147
- "harness|ko_mmlu_clinical_knowledge|5": {
148
- "acc": 0.27169811320754716,
149
- "acc_stderr": 0.027377706624670716,
150
- "acc_norm": 0.27169811320754716,
151
- "acc_norm_stderr": 0.027377706624670716
152
- },
153
- "harness|ko_mmlu_public_relations|5": {
154
- "acc": 0.3181818181818182,
155
- "acc_stderr": 0.04461272175910507,
156
- "acc_norm": 0.3181818181818182,
157
- "acc_norm_stderr": 0.04461272175910507
158
- },
159
- "harness|ko_mmlu_high_school_mathematics|5": {
160
- "acc": 0.21851851851851853,
161
- "acc_stderr": 0.025195752251823793,
162
- "acc_norm": 0.21851851851851853,
163
- "acc_norm_stderr": 0.025195752251823793
164
- },
165
- "harness|ko_mmlu_high_school_physics|5": {
166
- "acc": 0.2582781456953642,
167
- "acc_stderr": 0.035737053147634576,
168
- "acc_norm": 0.2582781456953642,
169
- "acc_norm_stderr": 0.035737053147634576
170
- },
171
- "harness|ko_mmlu_sociology|5": {
172
- "acc": 0.3283582089552239,
173
- "acc_stderr": 0.033206858897443244,
174
- "acc_norm": 0.3283582089552239,
175
- "acc_norm_stderr": 0.033206858897443244
176
- },
177
- "harness|ko_mmlu_college_medicine|5": {
178
- "acc": 0.2774566473988439,
179
- "acc_stderr": 0.034140140070440354,
180
- "acc_norm": 0.2774566473988439,
181
- "acc_norm_stderr": 0.034140140070440354
182
- },
183
- "harness|ko_mmlu_elementary_mathematics|5": {
184
- "acc": 0.23544973544973544,
185
- "acc_stderr": 0.021851509822031705,
186
- "acc_norm": 0.23544973544973544,
187
- "acc_norm_stderr": 0.021851509822031705
188
- },
189
- "harness|ko_mmlu_college_biology|5": {
190
- "acc": 0.2708333333333333,
191
- "acc_stderr": 0.03716177437566018,
192
- "acc_norm": 0.2708333333333333,
193
- "acc_norm_stderr": 0.03716177437566018
194
- },
195
- "harness|ko_mmlu_college_chemistry|5": {
196
- "acc": 0.27,
197
- "acc_stderr": 0.0446196043338474,
198
- "acc_norm": 0.27,
199
- "acc_norm_stderr": 0.0446196043338474
200
- },
201
- "harness|ko_mmlu_us_foreign_policy|5": {
202
- "acc": 0.31,
203
- "acc_stderr": 0.04648231987117316,
204
- "acc_norm": 0.31,
205
- "acc_norm_stderr": 0.04648231987117316
206
- },
207
- "harness|ko_mmlu_moral_disputes|5": {
208
- "acc": 0.35260115606936415,
209
- "acc_stderr": 0.025722802200895817,
210
- "acc_norm": 0.35260115606936415,
211
- "acc_norm_stderr": 0.025722802200895817
212
- },
213
- "harness|ko_mmlu_logical_fallacies|5": {
214
- "acc": 0.2822085889570552,
215
- "acc_stderr": 0.03536117886664743,
216
- "acc_norm": 0.2822085889570552,
217
- "acc_norm_stderr": 0.03536117886664743
218
- },
219
- "harness|ko_mmlu_prehistory|5": {
220
- "acc": 0.3148148148148148,
221
- "acc_stderr": 0.02584224870090217,
222
- "acc_norm": 0.3148148148148148,
223
- "acc_norm_stderr": 0.02584224870090217
224
- },
225
- "harness|ko_mmlu_college_mathematics|5": {
226
- "acc": 0.34,
227
- "acc_stderr": 0.04760952285695235,
228
- "acc_norm": 0.34,
229
- "acc_norm_stderr": 0.04760952285695235
230
- },
231
- "harness|ko_mmlu_high_school_government_and_politics|5": {
232
- "acc": 0.33678756476683935,
233
- "acc_stderr": 0.03410780251836184,
234
- "acc_norm": 0.33678756476683935,
235
- "acc_norm_stderr": 0.03410780251836184
236
- },
237
- "harness|ko_mmlu_econometrics|5": {
238
- "acc": 0.22807017543859648,
239
- "acc_stderr": 0.03947152782669415,
240
- "acc_norm": 0.22807017543859648,
241
- "acc_norm_stderr": 0.03947152782669415
242
- },
243
- "harness|ko_mmlu_high_school_psychology|5": {
244
- "acc": 0.29908256880733947,
245
- "acc_stderr": 0.01963041728541518,
246
- "acc_norm": 0.29908256880733947,
247
- "acc_norm_stderr": 0.01963041728541518
248
- },
249
- "harness|ko_mmlu_formal_logic|5": {
250
- "acc": 0.3333333333333333,
251
- "acc_stderr": 0.042163702135578345,
252
- "acc_norm": 0.3333333333333333,
253
- "acc_norm_stderr": 0.042163702135578345
254
- },
255
- "harness|ko_mmlu_nutrition|5": {
256
- "acc": 0.3758169934640523,
257
- "acc_stderr": 0.027732834353363944,
258
- "acc_norm": 0.3758169934640523,
259
- "acc_norm_stderr": 0.027732834353363944
260
- },
261
- "harness|ko_mmlu_business_ethics|5": {
262
- "acc": 0.27,
263
- "acc_stderr": 0.044619604333847394,
264
- "acc_norm": 0.27,
265
- "acc_norm_stderr": 0.044619604333847394
266
- },
267
- "harness|ko_mmlu_international_law|5": {
268
- "acc": 0.4462809917355372,
269
- "acc_stderr": 0.04537935177947879,
270
- "acc_norm": 0.4462809917355372,
271
- "acc_norm_stderr": 0.04537935177947879
272
- },
273
- "harness|ko_mmlu_astronomy|5": {
274
- "acc": 0.2894736842105263,
275
- "acc_stderr": 0.03690677986137282,
276
- "acc_norm": 0.2894736842105263,
277
- "acc_norm_stderr": 0.03690677986137282
278
- },
279
- "harness|ko_mmlu_professional_psychology|5": {
280
- "acc": 0.2826797385620915,
281
- "acc_stderr": 0.018217269552053442,
282
- "acc_norm": 0.2826797385620915,
283
- "acc_norm_stderr": 0.018217269552053442
284
- },
285
- "harness|ko_mmlu_professional_accounting|5": {
286
- "acc": 0.26595744680851063,
287
- "acc_stderr": 0.026358065698880585,
288
- "acc_norm": 0.26595744680851063,
289
- "acc_norm_stderr": 0.026358065698880585
290
- },
291
- "harness|ko_mmlu_machine_learning|5": {
292
- "acc": 0.35714285714285715,
293
- "acc_stderr": 0.04547960999764376,
294
- "acc_norm": 0.35714285714285715,
295
- "acc_norm_stderr": 0.04547960999764376
296
- },
297
- "harness|ko_mmlu_high_school_statistics|5": {
298
- "acc": 0.3287037037037037,
299
- "acc_stderr": 0.032036140846700596,
300
- "acc_norm": 0.3287037037037037,
301
- "acc_norm_stderr": 0.032036140846700596
302
- },
303
- "harness|ko_mmlu_moral_scenarios|5": {
304
- "acc": 0.27150837988826815,
305
- "acc_stderr": 0.014874252168095278,
306
- "acc_norm": 0.27150837988826815,
307
- "acc_norm_stderr": 0.014874252168095278
308
- },
309
- "harness|ko_mmlu_college_computer_science|5": {
310
- "acc": 0.32,
311
- "acc_stderr": 0.04688261722621504,
312
- "acc_norm": 0.32,
313
- "acc_norm_stderr": 0.04688261722621504
314
- },
315
- "harness|ko_mmlu_high_school_computer_science|5": {
316
- "acc": 0.41,
317
- "acc_stderr": 0.049431107042371025,
318
- "acc_norm": 0.41,
319
- "acc_norm_stderr": 0.049431107042371025
320
- },
321
- "harness|ko_mmlu_professional_medicine|5": {
322
- "acc": 0.25,
323
- "acc_stderr": 0.026303648393696036,
324
- "acc_norm": 0.25,
325
- "acc_norm_stderr": 0.026303648393696036
326
- },
327
- "harness|ko_mmlu_security_studies|5": {
328
- "acc": 0.44081632653061226,
329
- "acc_stderr": 0.03178419114175363,
330
- "acc_norm": 0.44081632653061226,
331
- "acc_norm_stderr": 0.03178419114175363
332
- },
333
- "harness|ko_mmlu_high_school_world_history|5": {
334
- "acc": 0.29535864978902954,
335
- "acc_stderr": 0.029696338713422893,
336
- "acc_norm": 0.29535864978902954,
337
- "acc_norm_stderr": 0.029696338713422893
338
- },
339
- "harness|ko_mmlu_professional_law|5": {
340
- "acc": 0.2666232073011734,
341
- "acc_stderr": 0.011293836031612138,
342
- "acc_norm": 0.2666232073011734,
343
- "acc_norm_stderr": 0.011293836031612138
344
- },
345
- "harness|ko_mmlu_high_school_us_history|5": {
346
- "acc": 0.28921568627450983,
347
- "acc_stderr": 0.03182231867647553,
348
- "acc_norm": 0.28921568627450983,
349
- "acc_norm_stderr": 0.03182231867647553
350
- },
351
- "harness|ko_mmlu_high_school_european_history|5": {
352
- "acc": 0.32727272727272727,
353
- "acc_stderr": 0.03663974994391242,
354
- "acc_norm": 0.32727272727272727,
355
- "acc_norm_stderr": 0.03663974994391242
356
- },
357
- "harness|ko_truthfulqa_mc|0": {
358
- "mc1": 0.2717258261933905,
359
- "mc1_stderr": 0.015572840452875833,
360
- "mc2": 0.4355240865330701,
361
- "mc2_stderr": 0.015876262317642633
362
- },
363
- "harness|ko_commongen_v2|2": {
364
- "acc": 0.22077922077922077,
365
- "acc_stderr": 0.014260152803540045,
366
- "acc_norm": 0.34238488783943327,
367
- "acc_norm_stderr": 0.016313907844146366
368
- }
369
- },
370
- "versions": {
371
- "all": 0,
372
- "harness|ko_arc_challenge|25": 0,
373
- "harness|ko_hellaswag|10": 0,
374
- "harness|ko_mmlu_world_religions|5": 1,
375
- "harness|ko_mmlu_management|5": 1,
376
- "harness|ko_mmlu_miscellaneous|5": 1,
377
- "harness|ko_mmlu_anatomy|5": 1,
378
- "harness|ko_mmlu_abstract_algebra|5": 1,
379
- "harness|ko_mmlu_conceptual_physics|5": 1,
380
- "harness|ko_mmlu_virology|5": 1,
381
- "harness|ko_mmlu_philosophy|5": 1,
382
- "harness|ko_mmlu_human_aging|5": 1,
383
- "harness|ko_mmlu_human_sexuality|5": 1,
384
- "harness|ko_mmlu_medical_genetics|5": 1,
385
- "harness|ko_mmlu_high_school_geography|5": 1,
386
- "harness|ko_mmlu_electrical_engineering|5": 1,
387
- "harness|ko_mmlu_college_physics|5": 1,
388
- "harness|ko_mmlu_high_school_microeconomics|5": 1,
389
- "harness|ko_mmlu_high_school_macroeconomics|5": 1,
390
- "harness|ko_mmlu_computer_security|5": 1,
391
- "harness|ko_mmlu_global_facts|5": 1,
392
- "harness|ko_mmlu_jurisprudence|5": 1,
393
- "harness|ko_mmlu_high_school_chemistry|5": 1,
394
- "harness|ko_mmlu_high_school_biology|5": 1,
395
- "harness|ko_mmlu_marketing|5": 1,
396
- "harness|ko_mmlu_clinical_knowledge|5": 1,
397
- "harness|ko_mmlu_public_relations|5": 1,
398
- "harness|ko_mmlu_high_school_mathematics|5": 1,
399
- "harness|ko_mmlu_high_school_physics|5": 1,
400
- "harness|ko_mmlu_sociology|5": 1,
401
- "harness|ko_mmlu_college_medicine|5": 1,
402
- "harness|ko_mmlu_elementary_mathematics|5": 1,
403
- "harness|ko_mmlu_college_biology|5": 1,
404
- "harness|ko_mmlu_college_chemistry|5": 1,
405
- "harness|ko_mmlu_us_foreign_policy|5": 1,
406
- "harness|ko_mmlu_moral_disputes|5": 1,
407
- "harness|ko_mmlu_logical_fallacies|5": 1,
408
- "harness|ko_mmlu_prehistory|5": 1,
409
- "harness|ko_mmlu_college_mathematics|5": 1,
410
- "harness|ko_mmlu_high_school_government_and_politics|5": 1,
411
- "harness|ko_mmlu_econometrics|5": 1,
412
- "harness|ko_mmlu_high_school_psychology|5": 1,
413
- "harness|ko_mmlu_formal_logic|5": 1,
414
- "harness|ko_mmlu_nutrition|5": 1,
415
- "harness|ko_mmlu_business_ethics|5": 1,
416
- "harness|ko_mmlu_international_law|5": 1,
417
- "harness|ko_mmlu_astronomy|5": 1,
418
- "harness|ko_mmlu_professional_psychology|5": 1,
419
- "harness|ko_mmlu_professional_accounting|5": 1,
420
- "harness|ko_mmlu_machine_learning|5": 1,
421
- "harness|ko_mmlu_high_school_statistics|5": 1,
422
- "harness|ko_mmlu_moral_scenarios|5": 1,
423
- "harness|ko_mmlu_college_computer_science|5": 1,
424
- "harness|ko_mmlu_high_school_computer_science|5": 1,
425
- "harness|ko_mmlu_professional_medicine|5": 1,
426
- "harness|ko_mmlu_security_studies|5": 1,
427
- "harness|ko_mmlu_high_school_world_history|5": 1,
428
- "harness|ko_mmlu_professional_law|5": 1,
429
- "harness|ko_mmlu_high_school_us_history|5": 1,
430
- "harness|ko_mmlu_high_school_european_history|5": 1,
431
- "harness|ko_truthfulqa_mc|0": 0,
432
- "harness|ko_commongen_v2|2": 1
433
- },
434
- "config_general": {
435
- "model_name": "huggyllama/llama-13b",
436
- "model_sha": "bf57045473f207bb1de1ed035ace226f4d9f9bba",
437
- "model_dtype": "torch.float16",
438
- "lighteval_sha": "",
439
- "num_few_shot_default": 0,
440
- "num_fewshot_seeds": 1,
441
- "override_batch_size": 1,
442
- "max_samples": null
443
- }
444
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
skt/ko-gpt-trinity-1.2B-v0.5/result_2023-09-27 05:12:48.json DELETED
@@ -1,444 +0,0 @@
1
- {
2
- "results": {
3
- "harness|ko_arc_challenge|25": {
4
- "acc": 0.21331058020477817,
5
- "acc_stderr": 0.011970971742326334,
6
- "acc_norm": 0.2687713310580205,
7
- "acc_norm_stderr": 0.012955065963710686
8
- },
9
- "harness|ko_hellaswag|10": {
10
- "acc": 0.3132842063333997,
11
- "acc_stderr": 0.004628809258483527,
12
- "acc_norm": 0.3739294961163115,
13
- "acc_norm_stderr": 0.004828564090620288
14
- },
15
- "harness|ko_mmlu_world_religions|5": {
16
- "acc": 0.2046783625730994,
17
- "acc_stderr": 0.03094445977853321,
18
- "acc_norm": 0.2046783625730994,
19
- "acc_norm_stderr": 0.03094445977853321
20
- },
21
- "harness|ko_mmlu_management|5": {
22
- "acc": 0.2524271844660194,
23
- "acc_stderr": 0.04301250399690877,
24
- "acc_norm": 0.2524271844660194,
25
- "acc_norm_stderr": 0.04301250399690877
26
- },
27
- "harness|ko_mmlu_miscellaneous|5": {
28
- "acc": 0.2681992337164751,
29
- "acc_stderr": 0.01584243083526943,
30
- "acc_norm": 0.2681992337164751,
31
- "acc_norm_stderr": 0.01584243083526943
32
- },
33
- "harness|ko_mmlu_anatomy|5": {
34
- "acc": 0.2222222222222222,
35
- "acc_stderr": 0.03591444084196969,
36
- "acc_norm": 0.2222222222222222,
37
- "acc_norm_stderr": 0.03591444084196969
38
- },
39
- "harness|ko_mmlu_abstract_algebra|5": {
40
- "acc": 0.28,
41
- "acc_stderr": 0.045126085985421276,
42
- "acc_norm": 0.28,
43
- "acc_norm_stderr": 0.045126085985421276
44
- },
45
- "harness|ko_mmlu_conceptual_physics|5": {
46
- "acc": 0.23829787234042554,
47
- "acc_stderr": 0.0278512529738898,
48
- "acc_norm": 0.23829787234042554,
49
- "acc_norm_stderr": 0.0278512529738898
50
- },
51
- "harness|ko_mmlu_virology|5": {
52
- "acc": 0.3192771084337349,
53
- "acc_stderr": 0.03629335329947859,
54
- "acc_norm": 0.3192771084337349,
55
- "acc_norm_stderr": 0.03629335329947859
56
- },
57
- "harness|ko_mmlu_philosophy|5": {
58
- "acc": 0.2282958199356913,
59
- "acc_stderr": 0.023839303311398195,
60
- "acc_norm": 0.2282958199356913,
61
- "acc_norm_stderr": 0.023839303311398195
62
- },
63
- "harness|ko_mmlu_human_aging|5": {
64
- "acc": 0.20179372197309417,
65
- "acc_stderr": 0.02693611191280227,
66
- "acc_norm": 0.20179372197309417,
67
- "acc_norm_stderr": 0.02693611191280227
68
- },
69
- "harness|ko_mmlu_human_sexuality|5": {
70
- "acc": 0.22900763358778625,
71
- "acc_stderr": 0.036853466317118506,
72
- "acc_norm": 0.22900763358778625,
73
- "acc_norm_stderr": 0.036853466317118506
74
- },
75
- "harness|ko_mmlu_medical_genetics|5": {
76
- "acc": 0.24,
77
- "acc_stderr": 0.042923469599092816,
78
- "acc_norm": 0.24,
79
- "acc_norm_stderr": 0.042923469599092816
80
- },
81
- "harness|ko_mmlu_high_school_geography|5": {
82
- "acc": 0.25757575757575757,
83
- "acc_stderr": 0.03115626951964684,
84
- "acc_norm": 0.25757575757575757,
85
- "acc_norm_stderr": 0.03115626951964684
86
- },
87
- "harness|ko_mmlu_electrical_engineering|5": {
88
- "acc": 0.2413793103448276,
89
- "acc_stderr": 0.03565998174135302,
90
- "acc_norm": 0.2413793103448276,
91
- "acc_norm_stderr": 0.03565998174135302
92
- },
93
- "harness|ko_mmlu_college_physics|5": {
94
- "acc": 0.20588235294117646,
95
- "acc_stderr": 0.04023382273617747,
96
- "acc_norm": 0.20588235294117646,
97
- "acc_norm_stderr": 0.04023382273617747
98
- },
99
- "harness|ko_mmlu_high_school_microeconomics|5": {
100
- "acc": 0.33613445378151263,
101
- "acc_stderr": 0.030684737115135356,
102
- "acc_norm": 0.33613445378151263,
103
- "acc_norm_stderr": 0.030684737115135356
104
- },
105
- "harness|ko_mmlu_high_school_macroeconomics|5": {
106
- "acc": 0.32051282051282054,
107
- "acc_stderr": 0.02366129639396428,
108
- "acc_norm": 0.32051282051282054,
109
- "acc_norm_stderr": 0.02366129639396428
110
- },
111
- "harness|ko_mmlu_computer_security|5": {
112
- "acc": 0.22,
113
- "acc_stderr": 0.04163331998932269,
114
- "acc_norm": 0.22,
115
- "acc_norm_stderr": 0.04163331998932269
116
- },
117
- "harness|ko_mmlu_global_facts|5": {
118
- "acc": 0.18,
119
- "acc_stderr": 0.038612291966536955,
120
- "acc_norm": 0.18,
121
- "acc_norm_stderr": 0.038612291966536955
122
- },
123
- "harness|ko_mmlu_jurisprudence|5": {
124
- "acc": 0.24074074074074073,
125
- "acc_stderr": 0.04133119440243838,
126
- "acc_norm": 0.24074074074074073,
127
- "acc_norm_stderr": 0.04133119440243838
128
- },
129
- "harness|ko_mmlu_high_school_chemistry|5": {
130
- "acc": 0.30049261083743845,
131
- "acc_stderr": 0.03225799476233484,
132
- "acc_norm": 0.30049261083743845,
133
- "acc_norm_stderr": 0.03225799476233484
134
- },
135
- "harness|ko_mmlu_high_school_biology|5": {
136
- "acc": 0.3193548387096774,
137
- "acc_stderr": 0.026522709674667768,
138
- "acc_norm": 0.3193548387096774,
139
- "acc_norm_stderr": 0.026522709674667768
140
- },
141
- "harness|ko_mmlu_marketing|5": {
142
- "acc": 0.23076923076923078,
143
- "acc_stderr": 0.02760192138141759,
144
- "acc_norm": 0.23076923076923078,
145
- "acc_norm_stderr": 0.02760192138141759
146
- },
147
- "harness|ko_mmlu_clinical_knowledge|5": {
148
- "acc": 0.21509433962264152,
149
- "acc_stderr": 0.02528839450289137,
150
- "acc_norm": 0.21509433962264152,
151
- "acc_norm_stderr": 0.02528839450289137
152
- },
153
- "harness|ko_mmlu_public_relations|5": {
154
- "acc": 0.22727272727272727,
155
- "acc_stderr": 0.040139645540727735,
156
- "acc_norm": 0.22727272727272727,
157
- "acc_norm_stderr": 0.040139645540727735
158
- },
159
- "harness|ko_mmlu_high_school_mathematics|5": {
160
- "acc": 0.26296296296296295,
161
- "acc_stderr": 0.026842057873833713,
162
- "acc_norm": 0.26296296296296295,
163
- "acc_norm_stderr": 0.026842057873833713
164
- },
165
- "harness|ko_mmlu_high_school_physics|5": {
166
- "acc": 0.1986754966887417,
167
- "acc_stderr": 0.03257847384436774,
168
- "acc_norm": 0.1986754966887417,
169
- "acc_norm_stderr": 0.03257847384436774
170
- },
171
- "harness|ko_mmlu_sociology|5": {
172
- "acc": 0.26865671641791045,
173
- "acc_stderr": 0.03134328358208954,
174
- "acc_norm": 0.26865671641791045,
175
- "acc_norm_stderr": 0.03134328358208954
176
- },
177
- "harness|ko_mmlu_college_medicine|5": {
178
- "acc": 0.1791907514450867,
179
- "acc_stderr": 0.02924251305906329,
180
- "acc_norm": 0.1791907514450867,
181
- "acc_norm_stderr": 0.02924251305906329
182
- },
183
- "harness|ko_mmlu_elementary_mathematics|5": {
184
- "acc": 0.25132275132275134,
185
- "acc_stderr": 0.022340482339643898,
186
- "acc_norm": 0.25132275132275134,
187
- "acc_norm_stderr": 0.022340482339643898
188
- },
189
- "harness|ko_mmlu_college_biology|5": {
190
- "acc": 0.2569444444444444,
191
- "acc_stderr": 0.03653946969442099,
192
- "acc_norm": 0.2569444444444444,
193
- "acc_norm_stderr": 0.03653946969442099
194
- },
195
- "harness|ko_mmlu_college_chemistry|5": {
196
- "acc": 0.23,
197
- "acc_stderr": 0.042295258468165044,
198
- "acc_norm": 0.23,
199
- "acc_norm_stderr": 0.042295258468165044
200
- },
201
- "harness|ko_mmlu_us_foreign_policy|5": {
202
- "acc": 0.26,
203
- "acc_stderr": 0.044084400227680794,
204
- "acc_norm": 0.26,
205
- "acc_norm_stderr": 0.044084400227680794
206
- },
207
- "harness|ko_mmlu_moral_disputes|5": {
208
- "acc": 0.24566473988439305,
209
- "acc_stderr": 0.02317629820399201,
210
- "acc_norm": 0.24566473988439305,
211
- "acc_norm_stderr": 0.02317629820399201
212
- },
213
- "harness|ko_mmlu_logical_fallacies|5": {
214
- "acc": 0.3006134969325153,
215
- "acc_stderr": 0.03602511318806771,
216
- "acc_norm": 0.3006134969325153,
217
- "acc_norm_stderr": 0.03602511318806771
218
- },
219
- "harness|ko_mmlu_prehistory|5": {
220
- "acc": 0.25308641975308643,
221
- "acc_stderr": 0.024191808600713002,
222
- "acc_norm": 0.25308641975308643,
223
- "acc_norm_stderr": 0.024191808600713002
224
- },
225
- "harness|ko_mmlu_college_mathematics|5": {
226
- "acc": 0.23,
227
- "acc_stderr": 0.042295258468165065,
228
- "acc_norm": 0.23,
229
- "acc_norm_stderr": 0.042295258468165065
230
- },
231
- "harness|ko_mmlu_high_school_government_and_politics|5": {
232
- "acc": 0.3471502590673575,
233
- "acc_stderr": 0.03435696168361355,
234
- "acc_norm": 0.3471502590673575,
235
- "acc_norm_stderr": 0.03435696168361355
236
- },
237
- "harness|ko_mmlu_econometrics|5": {
238
- "acc": 0.2807017543859649,
239
- "acc_stderr": 0.04227054451232199,
240
- "acc_norm": 0.2807017543859649,
241
- "acc_norm_stderr": 0.04227054451232199
242
- },
243
- "harness|ko_mmlu_high_school_psychology|5": {
244
- "acc": 0.22201834862385322,
245
- "acc_stderr": 0.01781884956479662,
246
- "acc_norm": 0.22201834862385322,
247
- "acc_norm_stderr": 0.01781884956479662
248
- },
249
- "harness|ko_mmlu_formal_logic|5": {
250
- "acc": 0.2777777777777778,
251
- "acc_stderr": 0.04006168083848877,
252
- "acc_norm": 0.2777777777777778,
253
- "acc_norm_stderr": 0.04006168083848877
254
- },
255
- "harness|ko_mmlu_nutrition|5": {
256
- "acc": 0.24509803921568626,
257
- "acc_stderr": 0.02463004897982476,
258
- "acc_norm": 0.24509803921568626,
259
- "acc_norm_stderr": 0.02463004897982476
260
- },
261
- "harness|ko_mmlu_business_ethics|5": {
262
- "acc": 0.27,
263
- "acc_stderr": 0.0446196043338474,
264
- "acc_norm": 0.27,
265
- "acc_norm_stderr": 0.0446196043338474
266
- },
267
- "harness|ko_mmlu_international_law|5": {
268
- "acc": 0.35537190082644626,
269
- "acc_stderr": 0.04369236326573981,
270
- "acc_norm": 0.35537190082644626,
271
- "acc_norm_stderr": 0.04369236326573981
272
- },
273
- "harness|ko_mmlu_astronomy|5": {
274
- "acc": 0.17105263157894737,
275
- "acc_stderr": 0.030643607071677105,
276
- "acc_norm": 0.17105263157894737,
277
- "acc_norm_stderr": 0.030643607071677105
278
- },
279
- "harness|ko_mmlu_professional_psychology|5": {
280
- "acc": 0.2434640522875817,
281
- "acc_stderr": 0.017362473762146623,
282
- "acc_norm": 0.2434640522875817,
283
- "acc_norm_stderr": 0.017362473762146623
284
- },
285
- "harness|ko_mmlu_professional_accounting|5": {
286
- "acc": 0.2553191489361702,
287
- "acc_stderr": 0.02601199293090201,
288
- "acc_norm": 0.2553191489361702,
289
- "acc_norm_stderr": 0.02601199293090201
290
- },
291
- "harness|ko_mmlu_machine_learning|5": {
292
- "acc": 0.1875,
293
- "acc_stderr": 0.0370468111477387,
294
- "acc_norm": 0.1875,
295
- "acc_norm_stderr": 0.0370468111477387
296
- },
297
- "harness|ko_mmlu_high_school_statistics|5": {
298
- "acc": 0.46296296296296297,
299
- "acc_stderr": 0.03400603625538272,
300
- "acc_norm": 0.46296296296296297,
301
- "acc_norm_stderr": 0.03400603625538272
302
- },
303
- "harness|ko_mmlu_moral_scenarios|5": {
304
- "acc": 0.27262569832402234,
305
- "acc_stderr": 0.014893391735249608,
306
- "acc_norm": 0.27262569832402234,
307
- "acc_norm_stderr": 0.014893391735249608
308
- },
309
- "harness|ko_mmlu_college_computer_science|5": {
310
- "acc": 0.31,
311
- "acc_stderr": 0.04648231987117316,
312
- "acc_norm": 0.31,
313
- "acc_norm_stderr": 0.04648231987117316
314
- },
315
- "harness|ko_mmlu_high_school_computer_science|5": {
316
- "acc": 0.27,
317
- "acc_stderr": 0.0446196043338474,
318
- "acc_norm": 0.27,
319
- "acc_norm_stderr": 0.0446196043338474
320
- },
321
- "harness|ko_mmlu_professional_medicine|5": {
322
- "acc": 0.4485294117647059,
323
- "acc_stderr": 0.030211479609121593,
324
- "acc_norm": 0.4485294117647059,
325
- "acc_norm_stderr": 0.030211479609121593
326
- },
327
- "harness|ko_mmlu_security_studies|5": {
328
- "acc": 0.17142857142857143,
329
- "acc_stderr": 0.024127463462650146,
330
- "acc_norm": 0.17142857142857143,
331
- "acc_norm_stderr": 0.024127463462650146
332
- },
333
- "harness|ko_mmlu_high_school_world_history|5": {
334
- "acc": 0.29535864978902954,
335
- "acc_stderr": 0.029696338713422893,
336
- "acc_norm": 0.29535864978902954,
337
- "acc_norm_stderr": 0.029696338713422893
338
- },
339
- "harness|ko_mmlu_professional_law|5": {
340
- "acc": 0.2405475880052151,
341
- "acc_stderr": 0.010916406735478947,
342
- "acc_norm": 0.2405475880052151,
343
- "acc_norm_stderr": 0.010916406735478947
344
- },
345
- "harness|ko_mmlu_high_school_us_history|5": {
346
- "acc": 0.2549019607843137,
347
- "acc_stderr": 0.030587591351604246,
348
- "acc_norm": 0.2549019607843137,
349
- "acc_norm_stderr": 0.030587591351604246
350
- },
351
- "harness|ko_mmlu_high_school_european_history|5": {
352
- "acc": 0.24242424242424243,
353
- "acc_stderr": 0.033464098810559534,
354
- "acc_norm": 0.24242424242424243,
355
- "acc_norm_stderr": 0.033464098810559534
356
- },
357
- "harness|ko_truthfulqa_mc|0": {
358
- "mc1": 0.25091799265605874,
359
- "mc1_stderr": 0.01517698502770768,
360
- "mc2": 0.42692392975687876,
361
- "mc2_stderr": 0.01514023890455296
362
- },
363
- "harness|ko_commongen_v2|2": {
364
- "acc": 0.2857142857142857,
365
- "acc_stderr": 0.015531620786986736,
366
- "acc_norm": 0.39669421487603307,
367
- "acc_norm_stderr": 0.01681943864297141
368
- }
369
- },
370
- "versions": {
371
- "all": 0,
372
- "harness|ko_arc_challenge|25": 0,
373
- "harness|ko_hellaswag|10": 0,
374
- "harness|ko_mmlu_world_religions|5": 1,
375
- "harness|ko_mmlu_management|5": 1,
376
- "harness|ko_mmlu_miscellaneous|5": 1,
377
- "harness|ko_mmlu_anatomy|5": 1,
378
- "harness|ko_mmlu_abstract_algebra|5": 1,
379
- "harness|ko_mmlu_conceptual_physics|5": 1,
380
- "harness|ko_mmlu_virology|5": 1,
381
- "harness|ko_mmlu_philosophy|5": 1,
382
- "harness|ko_mmlu_human_aging|5": 1,
383
- "harness|ko_mmlu_human_sexuality|5": 1,
384
- "harness|ko_mmlu_medical_genetics|5": 1,
385
- "harness|ko_mmlu_high_school_geography|5": 1,
386
- "harness|ko_mmlu_electrical_engineering|5": 1,
387
- "harness|ko_mmlu_college_physics|5": 1,
388
- "harness|ko_mmlu_high_school_microeconomics|5": 1,
389
- "harness|ko_mmlu_high_school_macroeconomics|5": 1,
390
- "harness|ko_mmlu_computer_security|5": 1,
391
- "harness|ko_mmlu_global_facts|5": 1,
392
- "harness|ko_mmlu_jurisprudence|5": 1,
393
- "harness|ko_mmlu_high_school_chemistry|5": 1,
394
- "harness|ko_mmlu_high_school_biology|5": 1,
395
- "harness|ko_mmlu_marketing|5": 1,
396
- "harness|ko_mmlu_clinical_knowledge|5": 1,
397
- "harness|ko_mmlu_public_relations|5": 1,
398
- "harness|ko_mmlu_high_school_mathematics|5": 1,
399
- "harness|ko_mmlu_high_school_physics|5": 1,
400
- "harness|ko_mmlu_sociology|5": 1,
401
- "harness|ko_mmlu_college_medicine|5": 1,
402
- "harness|ko_mmlu_elementary_mathematics|5": 1,
403
- "harness|ko_mmlu_college_biology|5": 1,
404
- "harness|ko_mmlu_college_chemistry|5": 1,
405
- "harness|ko_mmlu_us_foreign_policy|5": 1,
406
- "harness|ko_mmlu_moral_disputes|5": 1,
407
- "harness|ko_mmlu_logical_fallacies|5": 1,
408
- "harness|ko_mmlu_prehistory|5": 1,
409
- "harness|ko_mmlu_college_mathematics|5": 1,
410
- "harness|ko_mmlu_high_school_government_and_politics|5": 1,
411
- "harness|ko_mmlu_econometrics|5": 1,
412
- "harness|ko_mmlu_high_school_psychology|5": 1,
413
- "harness|ko_mmlu_formal_logic|5": 1,
414
- "harness|ko_mmlu_nutrition|5": 1,
415
- "harness|ko_mmlu_business_ethics|5": 1,
416
- "harness|ko_mmlu_international_law|5": 1,
417
- "harness|ko_mmlu_astronomy|5": 1,
418
- "harness|ko_mmlu_professional_psychology|5": 1,
419
- "harness|ko_mmlu_professional_accounting|5": 1,
420
- "harness|ko_mmlu_machine_learning|5": 1,
421
- "harness|ko_mmlu_high_school_statistics|5": 1,
422
- "harness|ko_mmlu_moral_scenarios|5": 1,
423
- "harness|ko_mmlu_college_computer_science|5": 1,
424
- "harness|ko_mmlu_high_school_computer_science|5": 1,
425
- "harness|ko_mmlu_professional_medicine|5": 1,
426
- "harness|ko_mmlu_security_studies|5": 1,
427
- "harness|ko_mmlu_high_school_world_history|5": 1,
428
- "harness|ko_mmlu_professional_law|5": 1,
429
- "harness|ko_mmlu_high_school_us_history|5": 1,
430
- "harness|ko_mmlu_high_school_european_history|5": 1,
431
- "harness|ko_truthfulqa_mc|0": 0,
432
- "harness|ko_commongen_v2|2": 1
433
- },
434
- "config_general": {
435
- "model_name": "skt/ko-gpt-trinity-1.2B-v0.5",
436
- "model_sha": "33f84c0da333d34533f0cfbe8f5972022d681e96",
437
- "model_dtype": "torch.float16",
438
- "lighteval_sha": "",
439
- "num_few_shot_default": 0,
440
- "num_fewshot_seeds": 1,
441
- "override_batch_size": 1,
442
- "max_samples": null
443
- }
444
- }