open-ko-llm-bot commited on
Commit
7768576
1 Parent(s): c857421

Add results for 2023-11-13 02:04:55

Browse files
lcw99/llama2-ko-chang-13b-instruct-chat/result_2023-11-13 02:04:55.json CHANGED
@@ -2,87 +2,87 @@
2
  "results": {
3
  "harness|ko_arc_challenge|25": {
4
  "acc": 0.3916382252559727,
5
- "acc_stderr": 0.014264122124938215,
6
- "acc_norm": 0.46501706484641636,
7
- "acc_norm_stderr": 0.014575583922019677
8
  },
9
  "harness|ko_hellaswag|10": {
10
- "acc": 0.4447321250746863,
11
- "acc_stderr": 0.004959204773046197,
12
- "acc_norm": 0.5995817566221868,
13
- "acc_norm_stderr": 0.004889817489739683
14
  },
15
  "harness|ko_mmlu_world_religions|5": {
16
- "acc": 0.4619883040935672,
17
- "acc_stderr": 0.03823727092882307,
18
- "acc_norm": 0.4619883040935672,
19
- "acc_norm_stderr": 0.03823727092882307
20
  },
21
  "harness|ko_mmlu_management|5": {
22
- "acc": 0.5242718446601942,
23
- "acc_stderr": 0.049449010929737795,
24
- "acc_norm": 0.5242718446601942,
25
- "acc_norm_stderr": 0.049449010929737795
26
  },
27
  "harness|ko_mmlu_miscellaneous|5": {
28
- "acc": 0.5325670498084292,
29
- "acc_stderr": 0.01784199575052087,
30
- "acc_norm": 0.5325670498084292,
31
- "acc_norm_stderr": 0.01784199575052087
32
  },
33
  "harness|ko_mmlu_anatomy|5": {
34
- "acc": 0.45185185185185184,
35
- "acc_stderr": 0.04299268905480863,
36
- "acc_norm": 0.45185185185185184,
37
- "acc_norm_stderr": 0.04299268905480863
38
  },
39
  "harness|ko_mmlu_abstract_algebra|5": {
40
- "acc": 0.32,
41
- "acc_stderr": 0.046882617226215034,
42
- "acc_norm": 0.32,
43
- "acc_norm_stderr": 0.046882617226215034
44
  },
45
  "harness|ko_mmlu_conceptual_physics|5": {
46
- "acc": 0.3829787234042553,
47
- "acc_stderr": 0.03177821250236922,
48
- "acc_norm": 0.3829787234042553,
49
- "acc_norm_stderr": 0.03177821250236922
50
  },
51
  "harness|ko_mmlu_virology|5": {
52
- "acc": 0.3855421686746988,
53
- "acc_stderr": 0.03789134424611548,
54
- "acc_norm": 0.3855421686746988,
55
- "acc_norm_stderr": 0.03789134424611548
56
  },
57
  "harness|ko_mmlu_philosophy|5": {
58
- "acc": 0.5016077170418006,
59
- "acc_stderr": 0.02839794490780661,
60
- "acc_norm": 0.5016077170418006,
61
- "acc_norm_stderr": 0.02839794490780661
62
  },
63
  "harness|ko_mmlu_human_aging|5": {
64
- "acc": 0.4798206278026906,
65
- "acc_stderr": 0.033530461674123,
66
- "acc_norm": 0.4798206278026906,
67
- "acc_norm_stderr": 0.033530461674123
68
  },
69
  "harness|ko_mmlu_human_sexuality|5": {
70
- "acc": 0.4351145038167939,
71
- "acc_stderr": 0.04348208051644858,
72
- "acc_norm": 0.4351145038167939,
73
- "acc_norm_stderr": 0.04348208051644858
74
  },
75
  "harness|ko_mmlu_medical_genetics|5": {
76
- "acc": 0.32,
77
- "acc_stderr": 0.046882617226215034,
78
- "acc_norm": 0.32,
79
- "acc_norm_stderr": 0.046882617226215034
80
  },
81
  "harness|ko_mmlu_high_school_geography|5": {
82
- "acc": 0.494949494949495,
83
- "acc_stderr": 0.035621707606254015,
84
- "acc_norm": 0.494949494949495,
85
- "acc_norm_stderr": 0.035621707606254015
86
  },
87
  "harness|ko_mmlu_electrical_engineering|5": {
88
  "acc": 0.43448275862068964,
@@ -97,274 +97,274 @@
97
  "acc_norm_stderr": 0.036186648199362466
98
  },
99
  "harness|ko_mmlu_high_school_microeconomics|5": {
100
- "acc": 0.37815126050420167,
101
- "acc_stderr": 0.031499305777849054,
102
- "acc_norm": 0.37815126050420167,
103
- "acc_norm_stderr": 0.031499305777849054
104
  },
105
  "harness|ko_mmlu_high_school_macroeconomics|5": {
106
- "acc": 0.4205128205128205,
107
- "acc_stderr": 0.025028610276710855,
108
- "acc_norm": 0.4205128205128205,
109
- "acc_norm_stderr": 0.025028610276710855
110
  },
111
  "harness|ko_mmlu_computer_security|5": {
112
- "acc": 0.53,
113
- "acc_stderr": 0.05016135580465919,
114
- "acc_norm": 0.53,
115
- "acc_norm_stderr": 0.05016135580465919
116
  },
117
  "harness|ko_mmlu_global_facts|5": {
118
- "acc": 0.33,
119
- "acc_stderr": 0.047258156262526045,
120
- "acc_norm": 0.33,
121
- "acc_norm_stderr": 0.047258156262526045
122
  },
123
  "harness|ko_mmlu_jurisprudence|5": {
124
- "acc": 0.48148148148148145,
125
- "acc_stderr": 0.04830366024635331,
126
- "acc_norm": 0.48148148148148145,
127
- "acc_norm_stderr": 0.04830366024635331
128
  },
129
  "harness|ko_mmlu_high_school_chemistry|5": {
130
- "acc": 0.3793103448275862,
131
- "acc_stderr": 0.03413963805906235,
132
- "acc_norm": 0.3793103448275862,
133
- "acc_norm_stderr": 0.03413963805906235
134
  },
135
  "harness|ko_mmlu_high_school_biology|5": {
136
- "acc": 0.44193548387096776,
137
- "acc_stderr": 0.02825155790684974,
138
- "acc_norm": 0.44193548387096776,
139
- "acc_norm_stderr": 0.02825155790684974
140
  },
141
  "harness|ko_mmlu_marketing|5": {
142
- "acc": 0.5555555555555556,
143
- "acc_stderr": 0.03255326307272486,
144
- "acc_norm": 0.5555555555555556,
145
- "acc_norm_stderr": 0.03255326307272486
146
  },
147
  "harness|ko_mmlu_clinical_knowledge|5": {
148
- "acc": 0.3849056603773585,
149
- "acc_stderr": 0.02994649856769995,
150
- "acc_norm": 0.3849056603773585,
151
- "acc_norm_stderr": 0.02994649856769995
152
  },
153
  "harness|ko_mmlu_public_relations|5": {
154
- "acc": 0.43636363636363634,
155
- "acc_stderr": 0.04750185058907297,
156
- "acc_norm": 0.43636363636363634,
157
- "acc_norm_stderr": 0.04750185058907297
158
  },
159
  "harness|ko_mmlu_high_school_mathematics|5": {
160
- "acc": 0.2814814814814815,
161
- "acc_stderr": 0.027420019350945273,
162
- "acc_norm": 0.2814814814814815,
163
- "acc_norm_stderr": 0.027420019350945273
164
  },
165
  "harness|ko_mmlu_high_school_physics|5": {
166
- "acc": 0.25165562913907286,
167
- "acc_stderr": 0.03543304234389985,
168
- "acc_norm": 0.25165562913907286,
169
- "acc_norm_stderr": 0.03543304234389985
170
  },
171
  "harness|ko_mmlu_sociology|5": {
172
- "acc": 0.47761194029850745,
173
- "acc_stderr": 0.03531987930208731,
174
- "acc_norm": 0.47761194029850745,
175
- "acc_norm_stderr": 0.03531987930208731
176
  },
177
  "harness|ko_mmlu_college_medicine|5": {
178
- "acc": 0.3930635838150289,
179
- "acc_stderr": 0.037242495958177295,
180
- "acc_norm": 0.3930635838150289,
181
- "acc_norm_stderr": 0.037242495958177295
182
  },
183
  "harness|ko_mmlu_elementary_mathematics|5": {
184
- "acc": 0.2830687830687831,
185
- "acc_stderr": 0.023201392938194978,
186
- "acc_norm": 0.2830687830687831,
187
- "acc_norm_stderr": 0.023201392938194978
188
  },
189
  "harness|ko_mmlu_college_biology|5": {
190
- "acc": 0.3541666666666667,
191
- "acc_stderr": 0.039994111357535424,
192
- "acc_norm": 0.3541666666666667,
193
- "acc_norm_stderr": 0.039994111357535424
194
  },
195
  "harness|ko_mmlu_college_chemistry|5": {
196
- "acc": 0.3,
197
- "acc_stderr": 0.046056618647183814,
198
- "acc_norm": 0.3,
199
- "acc_norm_stderr": 0.046056618647183814
200
  },
201
  "harness|ko_mmlu_us_foreign_policy|5": {
202
- "acc": 0.58,
203
- "acc_stderr": 0.049604496374885836,
204
- "acc_norm": 0.58,
205
- "acc_norm_stderr": 0.049604496374885836
206
  },
207
  "harness|ko_mmlu_moral_disputes|5": {
208
- "acc": 0.49421965317919075,
209
- "acc_stderr": 0.026917296179149123,
210
- "acc_norm": 0.49421965317919075,
211
- "acc_norm_stderr": 0.026917296179149123
212
  },
213
  "harness|ko_mmlu_logical_fallacies|5": {
214
- "acc": 0.4723926380368098,
215
- "acc_stderr": 0.039223782906109894,
216
- "acc_norm": 0.4723926380368098,
217
- "acc_norm_stderr": 0.039223782906109894
218
  },
219
  "harness|ko_mmlu_prehistory|5": {
220
- "acc": 0.5061728395061729,
221
- "acc_stderr": 0.02781862396258329,
222
- "acc_norm": 0.5061728395061729,
223
- "acc_norm_stderr": 0.02781862396258329
224
  },
225
  "harness|ko_mmlu_college_mathematics|5": {
226
- "acc": 0.27,
227
- "acc_stderr": 0.0446196043338474,
228
- "acc_norm": 0.27,
229
- "acc_norm_stderr": 0.0446196043338474
230
  },
231
  "harness|ko_mmlu_high_school_government_and_politics|5": {
232
- "acc": 0.48704663212435234,
233
- "acc_stderr": 0.03607228061047749,
234
- "acc_norm": 0.48704663212435234,
235
- "acc_norm_stderr": 0.03607228061047749
236
  },
237
  "harness|ko_mmlu_econometrics|5": {
238
- "acc": 0.2719298245614035,
239
- "acc_stderr": 0.04185774424022056,
240
- "acc_norm": 0.2719298245614035,
241
- "acc_norm_stderr": 0.04185774424022056
242
  },
243
  "harness|ko_mmlu_high_school_psychology|5": {
244
- "acc": 0.5100917431192661,
245
- "acc_stderr": 0.021432956203453327,
246
- "acc_norm": 0.5100917431192661,
247
- "acc_norm_stderr": 0.021432956203453327
248
  },
249
  "harness|ko_mmlu_formal_logic|5": {
250
- "acc": 0.23015873015873015,
251
- "acc_stderr": 0.03764950879790606,
252
- "acc_norm": 0.23015873015873015,
253
- "acc_norm_stderr": 0.03764950879790606
254
  },
255
  "harness|ko_mmlu_nutrition|5": {
256
- "acc": 0.3888888888888889,
257
- "acc_stderr": 0.027914055510468,
258
- "acc_norm": 0.3888888888888889,
259
- "acc_norm_stderr": 0.027914055510468
260
  },
261
  "harness|ko_mmlu_business_ethics|5": {
262
- "acc": 0.39,
263
- "acc_stderr": 0.04902071300001975,
264
- "acc_norm": 0.39,
265
- "acc_norm_stderr": 0.04902071300001975
266
  },
267
  "harness|ko_mmlu_international_law|5": {
268
- "acc": 0.6033057851239669,
269
- "acc_stderr": 0.044658697805310094,
270
- "acc_norm": 0.6033057851239669,
271
- "acc_norm_stderr": 0.044658697805310094
272
  },
273
  "harness|ko_mmlu_astronomy|5": {
274
- "acc": 0.40131578947368424,
275
- "acc_stderr": 0.03988903703336284,
276
- "acc_norm": 0.40131578947368424,
277
- "acc_norm_stderr": 0.03988903703336284
278
  },
279
  "harness|ko_mmlu_professional_psychology|5": {
280
- "acc": 0.3741830065359477,
281
- "acc_stderr": 0.019576953122088844,
282
- "acc_norm": 0.3741830065359477,
283
- "acc_norm_stderr": 0.019576953122088844
284
  },
285
  "harness|ko_mmlu_professional_accounting|5": {
286
- "acc": 0.30141843971631205,
287
- "acc_stderr": 0.02737412888263115,
288
- "acc_norm": 0.30141843971631205,
289
- "acc_norm_stderr": 0.02737412888263115
290
  },
291
  "harness|ko_mmlu_machine_learning|5": {
292
- "acc": 0.29464285714285715,
293
- "acc_stderr": 0.0432704093257873,
294
- "acc_norm": 0.29464285714285715,
295
- "acc_norm_stderr": 0.0432704093257873
296
  },
297
  "harness|ko_mmlu_high_school_statistics|5": {
298
- "acc": 0.2777777777777778,
299
- "acc_stderr": 0.03054674526495319,
300
- "acc_norm": 0.2777777777777778,
301
- "acc_norm_stderr": 0.03054674526495319
302
  },
303
  "harness|ko_mmlu_moral_scenarios|5": {
304
- "acc": 0.24804469273743016,
305
- "acc_stderr": 0.014444157808261453,
306
- "acc_norm": 0.24804469273743016,
307
- "acc_norm_stderr": 0.014444157808261453
308
  },
309
  "harness|ko_mmlu_college_computer_science|5": {
310
- "acc": 0.4,
311
- "acc_stderr": 0.04923659639173309,
312
- "acc_norm": 0.4,
313
- "acc_norm_stderr": 0.04923659639173309
314
  },
315
  "harness|ko_mmlu_high_school_computer_science|5": {
316
- "acc": 0.56,
317
- "acc_stderr": 0.049888765156985884,
318
- "acc_norm": 0.56,
319
- "acc_norm_stderr": 0.049888765156985884
320
  },
321
  "harness|ko_mmlu_professional_medicine|5": {
322
- "acc": 0.29411764705882354,
323
- "acc_stderr": 0.027678468642144696,
324
- "acc_norm": 0.29411764705882354,
325
- "acc_norm_stderr": 0.027678468642144696
326
  },
327
  "harness|ko_mmlu_security_studies|5": {
328
- "acc": 0.46122448979591835,
329
- "acc_stderr": 0.03191282052669277,
330
- "acc_norm": 0.46122448979591835,
331
- "acc_norm_stderr": 0.03191282052669277
332
  },
333
  "harness|ko_mmlu_high_school_world_history|5": {
334
- "acc": 0.5780590717299579,
335
- "acc_stderr": 0.032148146302403695,
336
- "acc_norm": 0.5780590717299579,
337
- "acc_norm_stderr": 0.032148146302403695
338
  },
339
  "harness|ko_mmlu_professional_law|5": {
340
- "acc": 0.31421121251629724,
341
- "acc_stderr": 0.011855911587048228,
342
- "acc_norm": 0.31421121251629724,
343
- "acc_norm_stderr": 0.011855911587048228
344
  },
345
  "harness|ko_mmlu_high_school_us_history|5": {
346
- "acc": 0.4411764705882353,
347
- "acc_stderr": 0.03484941514429231,
348
- "acc_norm": 0.4411764705882353,
349
- "acc_norm_stderr": 0.03484941514429231
350
  },
351
  "harness|ko_mmlu_high_school_european_history|5": {
352
- "acc": 0.5454545454545454,
353
- "acc_stderr": 0.03888176921674101,
354
- "acc_norm": 0.5454545454545454,
355
- "acc_norm_stderr": 0.03888176921674101
356
  },
357
  "harness|ko_truthfulqa_mc|0": {
358
- "mc1": 0.3108935128518972,
359
- "mc1_stderr": 0.016203316673559696,
360
- "mc2": 0.47797395322509245,
361
- "mc2_stderr": 0.015295300677969451
362
  },
363
  "harness|ko_commongen_v2|2": {
364
- "acc": 0.3919716646989374,
365
- "acc_stderr": 0.016784332119424088,
366
- "acc_norm": 0.4852420306965762,
367
- "acc_norm_stderr": 0.017182864434998564
368
  }
369
  },
370
  "versions": {
 
2
  "results": {
3
  "harness|ko_arc_challenge|25": {
4
  "acc": 0.3916382252559727,
5
+ "acc_stderr": 0.014264122124938218,
6
+ "acc_norm": 0.4667235494880546,
7
+ "acc_norm_stderr": 0.014578995859605818
8
  },
9
  "harness|ko_hellaswag|10": {
10
+ "acc": 0.4404501095399323,
11
+ "acc_stderr": 0.004954265595373462,
12
+ "acc_norm": 0.5983867755427206,
13
+ "acc_norm_stderr": 0.004892226011836585
14
  },
15
  "harness|ko_mmlu_world_religions|5": {
16
+ "acc": 0.4327485380116959,
17
+ "acc_stderr": 0.03799978644370607,
18
+ "acc_norm": 0.4327485380116959,
19
+ "acc_norm_stderr": 0.03799978644370607
20
  },
21
  "harness|ko_mmlu_management|5": {
22
+ "acc": 0.5048543689320388,
23
+ "acc_stderr": 0.049505043821289195,
24
+ "acc_norm": 0.5048543689320388,
25
+ "acc_norm_stderr": 0.049505043821289195
26
  },
27
  "harness|ko_mmlu_miscellaneous|5": {
28
+ "acc": 0.545338441890166,
29
+ "acc_stderr": 0.0178063045850526,
30
+ "acc_norm": 0.545338441890166,
31
+ "acc_norm_stderr": 0.0178063045850526
32
  },
33
  "harness|ko_mmlu_anatomy|5": {
34
+ "acc": 0.4888888888888889,
35
+ "acc_stderr": 0.04318275491977976,
36
+ "acc_norm": 0.4888888888888889,
37
+ "acc_norm_stderr": 0.04318275491977976
38
  },
39
  "harness|ko_mmlu_abstract_algebra|5": {
40
+ "acc": 0.3,
41
+ "acc_stderr": 0.046056618647183814,
42
+ "acc_norm": 0.3,
43
+ "acc_norm_stderr": 0.046056618647183814
44
  },
45
  "harness|ko_mmlu_conceptual_physics|5": {
46
+ "acc": 0.42127659574468085,
47
+ "acc_stderr": 0.03227834510146268,
48
+ "acc_norm": 0.42127659574468085,
49
+ "acc_norm_stderr": 0.03227834510146268
50
  },
51
  "harness|ko_mmlu_virology|5": {
52
+ "acc": 0.4036144578313253,
53
+ "acc_stderr": 0.03819486140758398,
54
+ "acc_norm": 0.4036144578313253,
55
+ "acc_norm_stderr": 0.03819486140758398
56
  },
57
  "harness|ko_mmlu_philosophy|5": {
58
+ "acc": 0.4758842443729904,
59
+ "acc_stderr": 0.028365041542564577,
60
+ "acc_norm": 0.4758842443729904,
61
+ "acc_norm_stderr": 0.028365041542564577
62
  },
63
  "harness|ko_mmlu_human_aging|5": {
64
+ "acc": 0.5067264573991032,
65
+ "acc_stderr": 0.03355476596234354,
66
+ "acc_norm": 0.5067264573991032,
67
+ "acc_norm_stderr": 0.03355476596234354
68
  },
69
  "harness|ko_mmlu_human_sexuality|5": {
70
+ "acc": 0.48091603053435117,
71
+ "acc_stderr": 0.04382094705550988,
72
+ "acc_norm": 0.48091603053435117,
73
+ "acc_norm_stderr": 0.04382094705550988
74
  },
75
  "harness|ko_mmlu_medical_genetics|5": {
76
+ "acc": 0.33,
77
+ "acc_stderr": 0.04725815626252606,
78
+ "acc_norm": 0.33,
79
+ "acc_norm_stderr": 0.04725815626252606
80
  },
81
  "harness|ko_mmlu_high_school_geography|5": {
82
+ "acc": 0.4898989898989899,
83
+ "acc_stderr": 0.035616254886737454,
84
+ "acc_norm": 0.4898989898989899,
85
+ "acc_norm_stderr": 0.035616254886737454
86
  },
87
  "harness|ko_mmlu_electrical_engineering|5": {
88
  "acc": 0.43448275862068964,
 
97
  "acc_norm_stderr": 0.036186648199362466
98
  },
99
  "harness|ko_mmlu_high_school_microeconomics|5": {
100
+ "acc": 0.4369747899159664,
101
+ "acc_stderr": 0.032219436365661956,
102
+ "acc_norm": 0.4369747899159664,
103
+ "acc_norm_stderr": 0.032219436365661956
104
  },
105
  "harness|ko_mmlu_high_school_macroeconomics|5": {
106
+ "acc": 0.4256410256410256,
107
+ "acc_stderr": 0.02506909438729654,
108
+ "acc_norm": 0.4256410256410256,
109
+ "acc_norm_stderr": 0.02506909438729654
110
  },
111
  "harness|ko_mmlu_computer_security|5": {
112
+ "acc": 0.54,
113
+ "acc_stderr": 0.05009082659620332,
114
+ "acc_norm": 0.54,
115
+ "acc_norm_stderr": 0.05009082659620332
116
  },
117
  "harness|ko_mmlu_global_facts|5": {
118
+ "acc": 0.39,
119
+ "acc_stderr": 0.04902071300001975,
120
+ "acc_norm": 0.39,
121
+ "acc_norm_stderr": 0.04902071300001975
122
  },
123
  "harness|ko_mmlu_jurisprudence|5": {
124
+ "acc": 0.49074074074074076,
125
+ "acc_stderr": 0.04832853553437055,
126
+ "acc_norm": 0.49074074074074076,
127
+ "acc_norm_stderr": 0.04832853553437055
128
  },
129
  "harness|ko_mmlu_high_school_chemistry|5": {
130
+ "acc": 0.3891625615763547,
131
+ "acc_stderr": 0.034304624161038716,
132
+ "acc_norm": 0.3891625615763547,
133
+ "acc_norm_stderr": 0.034304624161038716
134
  },
135
  "harness|ko_mmlu_high_school_biology|5": {
136
+ "acc": 0.432258064516129,
137
+ "acc_stderr": 0.028181739720019413,
138
+ "acc_norm": 0.432258064516129,
139
+ "acc_norm_stderr": 0.028181739720019413
140
  },
141
  "harness|ko_mmlu_marketing|5": {
142
+ "acc": 0.5726495726495726,
143
+ "acc_stderr": 0.03240847393516327,
144
+ "acc_norm": 0.5726495726495726,
145
+ "acc_norm_stderr": 0.03240847393516327
146
  },
147
  "harness|ko_mmlu_clinical_knowledge|5": {
148
+ "acc": 0.4226415094339623,
149
+ "acc_stderr": 0.030402331445769537,
150
+ "acc_norm": 0.4226415094339623,
151
+ "acc_norm_stderr": 0.030402331445769537
152
  },
153
  "harness|ko_mmlu_public_relations|5": {
154
+ "acc": 0.4818181818181818,
155
+ "acc_stderr": 0.04785964010794917,
156
+ "acc_norm": 0.4818181818181818,
157
+ "acc_norm_stderr": 0.04785964010794917
158
  },
159
  "harness|ko_mmlu_high_school_mathematics|5": {
160
+ "acc": 0.2777777777777778,
161
+ "acc_stderr": 0.027309140588230165,
162
+ "acc_norm": 0.2777777777777778,
163
+ "acc_norm_stderr": 0.027309140588230165
164
  },
165
  "harness|ko_mmlu_high_school_physics|5": {
166
+ "acc": 0.2847682119205298,
167
+ "acc_stderr": 0.03684881521389023,
168
+ "acc_norm": 0.2847682119205298,
169
+ "acc_norm_stderr": 0.03684881521389023
170
  },
171
  "harness|ko_mmlu_sociology|5": {
172
+ "acc": 0.5174129353233831,
173
+ "acc_stderr": 0.03533389234739245,
174
+ "acc_norm": 0.5174129353233831,
175
+ "acc_norm_stderr": 0.03533389234739245
176
  },
177
  "harness|ko_mmlu_college_medicine|5": {
178
+ "acc": 0.41040462427745666,
179
+ "acc_stderr": 0.03750757044895537,
180
+ "acc_norm": 0.41040462427745666,
181
+ "acc_norm_stderr": 0.03750757044895537
182
  },
183
  "harness|ko_mmlu_elementary_mathematics|5": {
184
+ "acc": 0.30158730158730157,
185
+ "acc_stderr": 0.023636975996101813,
186
+ "acc_norm": 0.30158730158730157,
187
+ "acc_norm_stderr": 0.023636975996101813
188
  },
189
  "harness|ko_mmlu_college_biology|5": {
190
+ "acc": 0.3333333333333333,
191
+ "acc_stderr": 0.039420826399272135,
192
+ "acc_norm": 0.3333333333333333,
193
+ "acc_norm_stderr": 0.039420826399272135
194
  },
195
  "harness|ko_mmlu_college_chemistry|5": {
196
+ "acc": 0.33,
197
+ "acc_stderr": 0.047258156262526045,
198
+ "acc_norm": 0.33,
199
+ "acc_norm_stderr": 0.047258156262526045
200
  },
201
  "harness|ko_mmlu_us_foreign_policy|5": {
202
+ "acc": 0.61,
203
+ "acc_stderr": 0.04902071300001975,
204
+ "acc_norm": 0.61,
205
+ "acc_norm_stderr": 0.04902071300001975
206
  },
207
  "harness|ko_mmlu_moral_disputes|5": {
208
+ "acc": 0.5,
209
+ "acc_stderr": 0.026919095102908273,
210
+ "acc_norm": 0.5,
211
+ "acc_norm_stderr": 0.026919095102908273
212
  },
213
  "harness|ko_mmlu_logical_fallacies|5": {
214
+ "acc": 0.49079754601226994,
215
+ "acc_stderr": 0.03927705600787443,
216
+ "acc_norm": 0.49079754601226994,
217
+ "acc_norm_stderr": 0.03927705600787443
218
  },
219
  "harness|ko_mmlu_prehistory|5": {
220
+ "acc": 0.49691358024691357,
221
+ "acc_stderr": 0.027820214158594377,
222
+ "acc_norm": 0.49691358024691357,
223
+ "acc_norm_stderr": 0.027820214158594377
224
  },
225
  "harness|ko_mmlu_college_mathematics|5": {
226
+ "acc": 0.29,
227
+ "acc_stderr": 0.045604802157206845,
228
+ "acc_norm": 0.29,
229
+ "acc_norm_stderr": 0.045604802157206845
230
  },
231
  "harness|ko_mmlu_high_school_government_and_politics|5": {
232
+ "acc": 0.49740932642487046,
233
+ "acc_stderr": 0.03608390745384487,
234
+ "acc_norm": 0.49740932642487046,
235
+ "acc_norm_stderr": 0.03608390745384487
236
  },
237
  "harness|ko_mmlu_econometrics|5": {
238
+ "acc": 0.21929824561403508,
239
+ "acc_stderr": 0.03892431106518753,
240
+ "acc_norm": 0.21929824561403508,
241
+ "acc_norm_stderr": 0.03892431106518753
242
  },
243
  "harness|ko_mmlu_high_school_psychology|5": {
244
+ "acc": 0.5339449541284403,
245
+ "acc_stderr": 0.021387863350353985,
246
+ "acc_norm": 0.5339449541284403,
247
+ "acc_norm_stderr": 0.021387863350353985
248
  },
249
  "harness|ko_mmlu_formal_logic|5": {
250
+ "acc": 0.2619047619047619,
251
+ "acc_stderr": 0.0393253768039287,
252
+ "acc_norm": 0.2619047619047619,
253
+ "acc_norm_stderr": 0.0393253768039287
254
  },
255
  "harness|ko_mmlu_nutrition|5": {
256
+ "acc": 0.4150326797385621,
257
+ "acc_stderr": 0.028213504177824093,
258
+ "acc_norm": 0.4150326797385621,
259
+ "acc_norm_stderr": 0.028213504177824093
260
  },
261
  "harness|ko_mmlu_business_ethics|5": {
262
+ "acc": 0.4,
263
+ "acc_stderr": 0.049236596391733084,
264
+ "acc_norm": 0.4,
265
+ "acc_norm_stderr": 0.049236596391733084
266
  },
267
  "harness|ko_mmlu_international_law|5": {
268
+ "acc": 0.5785123966942148,
269
+ "acc_stderr": 0.04507732278775087,
270
+ "acc_norm": 0.5785123966942148,
271
+ "acc_norm_stderr": 0.04507732278775087
272
  },
273
  "harness|ko_mmlu_astronomy|5": {
274
+ "acc": 0.42105263157894735,
275
+ "acc_stderr": 0.04017901275981748,
276
+ "acc_norm": 0.42105263157894735,
277
+ "acc_norm_stderr": 0.04017901275981748
278
  },
279
  "harness|ko_mmlu_professional_psychology|5": {
280
+ "acc": 0.37254901960784315,
281
+ "acc_stderr": 0.01955964680921593,
282
+ "acc_norm": 0.37254901960784315,
283
+ "acc_norm_stderr": 0.01955964680921593
284
  },
285
  "harness|ko_mmlu_professional_accounting|5": {
286
+ "acc": 0.3120567375886525,
287
+ "acc_stderr": 0.02764012054516993,
288
+ "acc_norm": 0.3120567375886525,
289
+ "acc_norm_stderr": 0.02764012054516993
290
  },
291
  "harness|ko_mmlu_machine_learning|5": {
292
+ "acc": 0.2857142857142857,
293
+ "acc_stderr": 0.042878587513404565,
294
+ "acc_norm": 0.2857142857142857,
295
+ "acc_norm_stderr": 0.042878587513404565
296
  },
297
  "harness|ko_mmlu_high_school_statistics|5": {
298
+ "acc": 0.30092592592592593,
299
+ "acc_stderr": 0.031280390843298804,
300
+ "acc_norm": 0.30092592592592593,
301
+ "acc_norm_stderr": 0.031280390843298804
302
  },
303
  "harness|ko_mmlu_moral_scenarios|5": {
304
+ "acc": 0.25139664804469275,
305
+ "acc_stderr": 0.014508979453553976,
306
+ "acc_norm": 0.25139664804469275,
307
+ "acc_norm_stderr": 0.014508979453553976
308
  },
309
  "harness|ko_mmlu_college_computer_science|5": {
310
+ "acc": 0.37,
311
+ "acc_stderr": 0.04852365870939099,
312
+ "acc_norm": 0.37,
313
+ "acc_norm_stderr": 0.04852365870939099
314
  },
315
  "harness|ko_mmlu_high_school_computer_science|5": {
316
+ "acc": 0.5,
317
+ "acc_stderr": 0.050251890762960605,
318
+ "acc_norm": 0.5,
319
+ "acc_norm_stderr": 0.050251890762960605
320
  },
321
  "harness|ko_mmlu_professional_medicine|5": {
322
+ "acc": 0.3786764705882353,
323
+ "acc_stderr": 0.02946513363977613,
324
+ "acc_norm": 0.3786764705882353,
325
+ "acc_norm_stderr": 0.02946513363977613
326
  },
327
  "harness|ko_mmlu_security_studies|5": {
328
+ "acc": 0.4816326530612245,
329
+ "acc_stderr": 0.03198761546763126,
330
+ "acc_norm": 0.4816326530612245,
331
+ "acc_norm_stderr": 0.03198761546763126
332
  },
333
  "harness|ko_mmlu_high_school_world_history|5": {
334
+ "acc": 0.6497890295358649,
335
+ "acc_stderr": 0.031052391937584346,
336
+ "acc_norm": 0.6497890295358649,
337
+ "acc_norm_stderr": 0.031052391937584346
338
  },
339
  "harness|ko_mmlu_professional_law|5": {
340
+ "acc": 0.30638852672750977,
341
+ "acc_stderr": 0.011773980329380726,
342
+ "acc_norm": 0.30638852672750977,
343
+ "acc_norm_stderr": 0.011773980329380726
344
  },
345
  "harness|ko_mmlu_high_school_us_history|5": {
346
+ "acc": 0.46078431372549017,
347
+ "acc_stderr": 0.03498501649369527,
348
+ "acc_norm": 0.46078431372549017,
349
+ "acc_norm_stderr": 0.03498501649369527
350
  },
351
  "harness|ko_mmlu_high_school_european_history|5": {
352
+ "acc": 0.5393939393939394,
353
+ "acc_stderr": 0.03892207016552012,
354
+ "acc_norm": 0.5393939393939394,
355
+ "acc_norm_stderr": 0.03892207016552012
356
  },
357
  "harness|ko_truthfulqa_mc|0": {
358
+ "mc1": 0.3292533659730722,
359
+ "mc1_stderr": 0.016451264440068246,
360
+ "mc2": 0.4970299025244721,
361
+ "mc2_stderr": 0.01555960496501192
362
  },
363
  "harness|ko_commongen_v2|2": {
364
+ "acc": 0.43683589138134593,
365
+ "acc_stderr": 0.01705263355985608,
366
+ "acc_norm": 0.5076741440377804,
367
+ "acc_norm_stderr": 0.017188329219654273
368
  }
369
  },
370
  "versions": {