choco9966 commited on
Commit
0c511da
1 Parent(s): 0e48229

Delete KT-AI

Browse files
KT-AI/midm-bitext-S-7B-inst-v1/result_2023-10-29 20:13:00.json DELETED
@@ -1,444 +0,0 @@
1
- {
2
- "results": {
3
- "harness|ko_arc_challenge|25": {
4
- "acc": 0.3447098976109215,
5
- "acc_stderr": 0.013888816286782112,
6
- "acc_norm": 0.4129692832764505,
7
- "acc_norm_stderr": 0.014388344935398324
8
- },
9
- "harness|ko_hellaswag|10": {
10
- "acc": 0.4164509061939853,
11
- "acc_stderr": 0.004919626380645514,
12
- "acc_norm": 0.5483967337183828,
13
- "acc_norm_stderr": 0.004966351835028203
14
- },
15
- "harness|ko_mmlu_world_religions|5": {
16
- "acc": 0.5321637426900585,
17
- "acc_stderr": 0.03826882417660368,
18
- "acc_norm": 0.5321637426900585,
19
- "acc_norm_stderr": 0.03826882417660368
20
- },
21
- "harness|ko_mmlu_management|5": {
22
- "acc": 0.5048543689320388,
23
- "acc_stderr": 0.049505043821289195,
24
- "acc_norm": 0.5048543689320388,
25
- "acc_norm_stderr": 0.049505043821289195
26
- },
27
- "harness|ko_mmlu_miscellaneous|5": {
28
- "acc": 0.5504469987228607,
29
- "acc_stderr": 0.017788725283507337,
30
- "acc_norm": 0.5504469987228607,
31
- "acc_norm_stderr": 0.017788725283507337
32
- },
33
- "harness|ko_mmlu_anatomy|5": {
34
- "acc": 0.45925925925925926,
35
- "acc_stderr": 0.04304979692464244,
36
- "acc_norm": 0.45925925925925926,
37
- "acc_norm_stderr": 0.04304979692464244
38
- },
39
- "harness|ko_mmlu_abstract_algebra|5": {
40
- "acc": 0.36,
41
- "acc_stderr": 0.048241815132442176,
42
- "acc_norm": 0.36,
43
- "acc_norm_stderr": 0.048241815132442176
44
- },
45
- "harness|ko_mmlu_conceptual_physics|5": {
46
- "acc": 0.3659574468085106,
47
- "acc_stderr": 0.031489558297455304,
48
- "acc_norm": 0.3659574468085106,
49
- "acc_norm_stderr": 0.031489558297455304
50
- },
51
- "harness|ko_mmlu_virology|5": {
52
- "acc": 0.3614457831325301,
53
- "acc_stderr": 0.0374005938202932,
54
- "acc_norm": 0.3614457831325301,
55
- "acc_norm_stderr": 0.0374005938202932
56
- },
57
- "harness|ko_mmlu_philosophy|5": {
58
- "acc": 0.4662379421221865,
59
- "acc_stderr": 0.028333277109562807,
60
- "acc_norm": 0.4662379421221865,
61
- "acc_norm_stderr": 0.028333277109562807
62
- },
63
- "harness|ko_mmlu_human_aging|5": {
64
- "acc": 0.47085201793721976,
65
- "acc_stderr": 0.03350073248773403,
66
- "acc_norm": 0.47085201793721976,
67
- "acc_norm_stderr": 0.03350073248773403
68
- },
69
- "harness|ko_mmlu_human_sexuality|5": {
70
- "acc": 0.5190839694656488,
71
- "acc_stderr": 0.04382094705550988,
72
- "acc_norm": 0.5190839694656488,
73
- "acc_norm_stderr": 0.04382094705550988
74
- },
75
- "harness|ko_mmlu_medical_genetics|5": {
76
- "acc": 0.46,
77
- "acc_stderr": 0.05009082659620332,
78
- "acc_norm": 0.46,
79
- "acc_norm_stderr": 0.05009082659620332
80
- },
81
- "harness|ko_mmlu_high_school_geography|5": {
82
- "acc": 0.5909090909090909,
83
- "acc_stderr": 0.03502975799413007,
84
- "acc_norm": 0.5909090909090909,
85
- "acc_norm_stderr": 0.03502975799413007
86
- },
87
- "harness|ko_mmlu_electrical_engineering|5": {
88
- "acc": 0.4206896551724138,
89
- "acc_stderr": 0.0411391498118926,
90
- "acc_norm": 0.4206896551724138,
91
- "acc_norm_stderr": 0.0411391498118926
92
- },
93
- "harness|ko_mmlu_college_physics|5": {
94
- "acc": 0.2647058823529412,
95
- "acc_stderr": 0.04389869956808778,
96
- "acc_norm": 0.2647058823529412,
97
- "acc_norm_stderr": 0.04389869956808778
98
- },
99
- "harness|ko_mmlu_high_school_microeconomics|5": {
100
- "acc": 0.42857142857142855,
101
- "acc_stderr": 0.032145368597886394,
102
- "acc_norm": 0.42857142857142855,
103
- "acc_norm_stderr": 0.032145368597886394
104
- },
105
- "harness|ko_mmlu_high_school_macroeconomics|5": {
106
- "acc": 0.43333333333333335,
107
- "acc_stderr": 0.025124653525885134,
108
- "acc_norm": 0.43333333333333335,
109
- "acc_norm_stderr": 0.025124653525885134
110
- },
111
- "harness|ko_mmlu_computer_security|5": {
112
- "acc": 0.56,
113
- "acc_stderr": 0.04988876515698589,
114
- "acc_norm": 0.56,
115
- "acc_norm_stderr": 0.04988876515698589
116
- },
117
- "harness|ko_mmlu_global_facts|5": {
118
- "acc": 0.24,
119
- "acc_stderr": 0.042923469599092816,
120
- "acc_norm": 0.24,
121
- "acc_norm_stderr": 0.042923469599092816
122
- },
123
- "harness|ko_mmlu_jurisprudence|5": {
124
- "acc": 0.5,
125
- "acc_stderr": 0.04833682445228318,
126
- "acc_norm": 0.5,
127
- "acc_norm_stderr": 0.04833682445228318
128
- },
129
- "harness|ko_mmlu_high_school_chemistry|5": {
130
- "acc": 0.3054187192118227,
131
- "acc_stderr": 0.032406615658684086,
132
- "acc_norm": 0.3054187192118227,
133
- "acc_norm_stderr": 0.032406615658684086
134
- },
135
- "harness|ko_mmlu_high_school_biology|5": {
136
- "acc": 0.44516129032258067,
137
- "acc_stderr": 0.028272410186214906,
138
- "acc_norm": 0.44516129032258067,
139
- "acc_norm_stderr": 0.028272410186214906
140
- },
141
- "harness|ko_mmlu_marketing|5": {
142
- "acc": 0.6452991452991453,
143
- "acc_stderr": 0.03134250486245402,
144
- "acc_norm": 0.6452991452991453,
145
- "acc_norm_stderr": 0.03134250486245402
146
- },
147
- "harness|ko_mmlu_clinical_knowledge|5": {
148
- "acc": 0.45660377358490567,
149
- "acc_stderr": 0.03065674869673943,
150
- "acc_norm": 0.45660377358490567,
151
- "acc_norm_stderr": 0.03065674869673943
152
- },
153
- "harness|ko_mmlu_public_relations|5": {
154
- "acc": 0.5272727272727272,
155
- "acc_stderr": 0.04782001791380061,
156
- "acc_norm": 0.5272727272727272,
157
- "acc_norm_stderr": 0.04782001791380061
158
- },
159
- "harness|ko_mmlu_high_school_mathematics|5": {
160
- "acc": 0.22962962962962963,
161
- "acc_stderr": 0.025644108639267638,
162
- "acc_norm": 0.22962962962962963,
163
- "acc_norm_stderr": 0.025644108639267638
164
- },
165
- "harness|ko_mmlu_high_school_physics|5": {
166
- "acc": 0.23178807947019867,
167
- "acc_stderr": 0.034454062719870546,
168
- "acc_norm": 0.23178807947019867,
169
- "acc_norm_stderr": 0.034454062719870546
170
- },
171
- "harness|ko_mmlu_sociology|5": {
172
- "acc": 0.5970149253731343,
173
- "acc_stderr": 0.03468343295111126,
174
- "acc_norm": 0.5970149253731343,
175
- "acc_norm_stderr": 0.03468343295111126
176
- },
177
- "harness|ko_mmlu_college_medicine|5": {
178
- "acc": 0.4277456647398844,
179
- "acc_stderr": 0.03772446857518027,
180
- "acc_norm": 0.4277456647398844,
181
- "acc_norm_stderr": 0.03772446857518027
182
- },
183
- "harness|ko_mmlu_elementary_mathematics|5": {
184
- "acc": 0.30423280423280424,
185
- "acc_stderr": 0.023695415009463087,
186
- "acc_norm": 0.30423280423280424,
187
- "acc_norm_stderr": 0.023695415009463087
188
- },
189
- "harness|ko_mmlu_college_biology|5": {
190
- "acc": 0.3333333333333333,
191
- "acc_stderr": 0.039420826399272135,
192
- "acc_norm": 0.3333333333333333,
193
- "acc_norm_stderr": 0.039420826399272135
194
- },
195
- "harness|ko_mmlu_college_chemistry|5": {
196
- "acc": 0.28,
197
- "acc_stderr": 0.04512608598542129,
198
- "acc_norm": 0.28,
199
- "acc_norm_stderr": 0.04512608598542129
200
- },
201
- "harness|ko_mmlu_us_foreign_policy|5": {
202
- "acc": 0.63,
203
- "acc_stderr": 0.04852365870939099,
204
- "acc_norm": 0.63,
205
- "acc_norm_stderr": 0.04852365870939099
206
- },
207
- "harness|ko_mmlu_moral_disputes|5": {
208
- "acc": 0.4161849710982659,
209
- "acc_stderr": 0.026538189104705474,
210
- "acc_norm": 0.4161849710982659,
211
- "acc_norm_stderr": 0.026538189104705474
212
- },
213
- "harness|ko_mmlu_logical_fallacies|5": {
214
- "acc": 0.43558282208588955,
215
- "acc_stderr": 0.03895632464138937,
216
- "acc_norm": 0.43558282208588955,
217
- "acc_norm_stderr": 0.03895632464138937
218
- },
219
- "harness|ko_mmlu_prehistory|5": {
220
- "acc": 0.4444444444444444,
221
- "acc_stderr": 0.027648477877413324,
222
- "acc_norm": 0.4444444444444444,
223
- "acc_norm_stderr": 0.027648477877413324
224
- },
225
- "harness|ko_mmlu_college_mathematics|5": {
226
- "acc": 0.34,
227
- "acc_stderr": 0.047609522856952344,
228
- "acc_norm": 0.34,
229
- "acc_norm_stderr": 0.047609522856952344
230
- },
231
- "harness|ko_mmlu_high_school_government_and_politics|5": {
232
- "acc": 0.538860103626943,
233
- "acc_stderr": 0.035975244117345775,
234
- "acc_norm": 0.538860103626943,
235
- "acc_norm_stderr": 0.035975244117345775
236
- },
237
- "harness|ko_mmlu_econometrics|5": {
238
- "acc": 0.24561403508771928,
239
- "acc_stderr": 0.04049339297748141,
240
- "acc_norm": 0.24561403508771928,
241
- "acc_norm_stderr": 0.04049339297748141
242
- },
243
- "harness|ko_mmlu_high_school_psychology|5": {
244
- "acc": 0.5761467889908257,
245
- "acc_stderr": 0.021187263209087523,
246
- "acc_norm": 0.5761467889908257,
247
- "acc_norm_stderr": 0.021187263209087523
248
- },
249
- "harness|ko_mmlu_formal_logic|5": {
250
- "acc": 0.3888888888888889,
251
- "acc_stderr": 0.04360314860077459,
252
- "acc_norm": 0.3888888888888889,
253
- "acc_norm_stderr": 0.04360314860077459
254
- },
255
- "harness|ko_mmlu_nutrition|5": {
256
- "acc": 0.45751633986928103,
257
- "acc_stderr": 0.028526383452142635,
258
- "acc_norm": 0.45751633986928103,
259
- "acc_norm_stderr": 0.028526383452142635
260
- },
261
- "harness|ko_mmlu_business_ethics|5": {
262
- "acc": 0.42,
263
- "acc_stderr": 0.049604496374885836,
264
- "acc_norm": 0.42,
265
- "acc_norm_stderr": 0.049604496374885836
266
- },
267
- "harness|ko_mmlu_international_law|5": {
268
- "acc": 0.512396694214876,
269
- "acc_stderr": 0.04562951548180765,
270
- "acc_norm": 0.512396694214876,
271
- "acc_norm_stderr": 0.04562951548180765
272
- },
273
- "harness|ko_mmlu_astronomy|5": {
274
- "acc": 0.4342105263157895,
275
- "acc_stderr": 0.040335656678483205,
276
- "acc_norm": 0.4342105263157895,
277
- "acc_norm_stderr": 0.040335656678483205
278
- },
279
- "harness|ko_mmlu_professional_psychology|5": {
280
- "acc": 0.36764705882352944,
281
- "acc_stderr": 0.019506291693954854,
282
- "acc_norm": 0.36764705882352944,
283
- "acc_norm_stderr": 0.019506291693954854
284
- },
285
- "harness|ko_mmlu_professional_accounting|5": {
286
- "acc": 0.3475177304964539,
287
- "acc_stderr": 0.028406627809590947,
288
- "acc_norm": 0.3475177304964539,
289
- "acc_norm_stderr": 0.028406627809590947
290
- },
291
- "harness|ko_mmlu_machine_learning|5": {
292
- "acc": 0.30357142857142855,
293
- "acc_stderr": 0.04364226155841044,
294
- "acc_norm": 0.30357142857142855,
295
- "acc_norm_stderr": 0.04364226155841044
296
- },
297
- "harness|ko_mmlu_high_school_statistics|5": {
298
- "acc": 0.37962962962962965,
299
- "acc_stderr": 0.03309682581119035,
300
- "acc_norm": 0.37962962962962965,
301
- "acc_norm_stderr": 0.03309682581119035
302
- },
303
- "harness|ko_mmlu_moral_scenarios|5": {
304
- "acc": 0.2916201117318436,
305
- "acc_stderr": 0.015201032512520437,
306
- "acc_norm": 0.2916201117318436,
307
- "acc_norm_stderr": 0.015201032512520437
308
- },
309
- "harness|ko_mmlu_college_computer_science|5": {
310
- "acc": 0.34,
311
- "acc_stderr": 0.04760952285695235,
312
- "acc_norm": 0.34,
313
- "acc_norm_stderr": 0.04760952285695235
314
- },
315
- "harness|ko_mmlu_high_school_computer_science|5": {
316
- "acc": 0.38,
317
- "acc_stderr": 0.048783173121456316,
318
- "acc_norm": 0.38,
319
- "acc_norm_stderr": 0.048783173121456316
320
- },
321
- "harness|ko_mmlu_professional_medicine|5": {
322
- "acc": 0.4227941176470588,
323
- "acc_stderr": 0.03000856284500348,
324
- "acc_norm": 0.4227941176470588,
325
- "acc_norm_stderr": 0.03000856284500348
326
- },
327
- "harness|ko_mmlu_security_studies|5": {
328
- "acc": 0.37551020408163266,
329
- "acc_stderr": 0.031001209039894843,
330
- "acc_norm": 0.37551020408163266,
331
- "acc_norm_stderr": 0.031001209039894843
332
- },
333
- "harness|ko_mmlu_high_school_world_history|5": {
334
- "acc": 0.5654008438818565,
335
- "acc_stderr": 0.03226759995510145,
336
- "acc_norm": 0.5654008438818565,
337
- "acc_norm_stderr": 0.03226759995510145
338
- },
339
- "harness|ko_mmlu_professional_law|5": {
340
- "acc": 0.318122555410691,
341
- "acc_stderr": 0.01189540728110412,
342
- "acc_norm": 0.318122555410691,
343
- "acc_norm_stderr": 0.01189540728110412
344
- },
345
- "harness|ko_mmlu_high_school_us_history|5": {
346
- "acc": 0.2549019607843137,
347
- "acc_stderr": 0.030587591351604243,
348
- "acc_norm": 0.2549019607843137,
349
- "acc_norm_stderr": 0.030587591351604243
350
- },
351
- "harness|ko_mmlu_high_school_european_history|5": {
352
- "acc": 0.23030303030303031,
353
- "acc_stderr": 0.032876667586034886,
354
- "acc_norm": 0.23030303030303031,
355
- "acc_norm_stderr": 0.032876667586034886
356
- },
357
- "harness|ko_truthfulqa_mc|0": {
358
- "mc1": 0.2937576499388005,
359
- "mc1_stderr": 0.015945068581236618,
360
- "mc2": 0.4608476284919872,
361
- "mc2_stderr": 0.0153801623360934
362
- },
363
- "harness|ko_commongen_v2|2": {
364
- "acc": 0.5737898465171193,
365
- "acc_stderr": 0.01700212260948926,
366
- "acc_norm": 0.5879574970484062,
367
- "acc_norm_stderr": 0.01692227673852836
368
- }
369
- },
370
- "versions": {
371
- "all": 0,
372
- "harness|ko_arc_challenge|25": 0,
373
- "harness|ko_hellaswag|10": 0,
374
- "harness|ko_mmlu_world_religions|5": 1,
375
- "harness|ko_mmlu_management|5": 1,
376
- "harness|ko_mmlu_miscellaneous|5": 1,
377
- "harness|ko_mmlu_anatomy|5": 1,
378
- "harness|ko_mmlu_abstract_algebra|5": 1,
379
- "harness|ko_mmlu_conceptual_physics|5": 1,
380
- "harness|ko_mmlu_virology|5": 1,
381
- "harness|ko_mmlu_philosophy|5": 1,
382
- "harness|ko_mmlu_human_aging|5": 1,
383
- "harness|ko_mmlu_human_sexuality|5": 1,
384
- "harness|ko_mmlu_medical_genetics|5": 1,
385
- "harness|ko_mmlu_high_school_geography|5": 1,
386
- "harness|ko_mmlu_electrical_engineering|5": 1,
387
- "harness|ko_mmlu_college_physics|5": 1,
388
- "harness|ko_mmlu_high_school_microeconomics|5": 1,
389
- "harness|ko_mmlu_high_school_macroeconomics|5": 1,
390
- "harness|ko_mmlu_computer_security|5": 1,
391
- "harness|ko_mmlu_global_facts|5": 1,
392
- "harness|ko_mmlu_jurisprudence|5": 1,
393
- "harness|ko_mmlu_high_school_chemistry|5": 1,
394
- "harness|ko_mmlu_high_school_biology|5": 1,
395
- "harness|ko_mmlu_marketing|5": 1,
396
- "harness|ko_mmlu_clinical_knowledge|5": 1,
397
- "harness|ko_mmlu_public_relations|5": 1,
398
- "harness|ko_mmlu_high_school_mathematics|5": 1,
399
- "harness|ko_mmlu_high_school_physics|5": 1,
400
- "harness|ko_mmlu_sociology|5": 1,
401
- "harness|ko_mmlu_college_medicine|5": 1,
402
- "harness|ko_mmlu_elementary_mathematics|5": 1,
403
- "harness|ko_mmlu_college_biology|5": 1,
404
- "harness|ko_mmlu_college_chemistry|5": 1,
405
- "harness|ko_mmlu_us_foreign_policy|5": 1,
406
- "harness|ko_mmlu_moral_disputes|5": 1,
407
- "harness|ko_mmlu_logical_fallacies|5": 1,
408
- "harness|ko_mmlu_prehistory|5": 1,
409
- "harness|ko_mmlu_college_mathematics|5": 1,
410
- "harness|ko_mmlu_high_school_government_and_politics|5": 1,
411
- "harness|ko_mmlu_econometrics|5": 1,
412
- "harness|ko_mmlu_high_school_psychology|5": 1,
413
- "harness|ko_mmlu_formal_logic|5": 1,
414
- "harness|ko_mmlu_nutrition|5": 1,
415
- "harness|ko_mmlu_business_ethics|5": 1,
416
- "harness|ko_mmlu_international_law|5": 1,
417
- "harness|ko_mmlu_astronomy|5": 1,
418
- "harness|ko_mmlu_professional_psychology|5": 1,
419
- "harness|ko_mmlu_professional_accounting|5": 1,
420
- "harness|ko_mmlu_machine_learning|5": 1,
421
- "harness|ko_mmlu_high_school_statistics|5": 1,
422
- "harness|ko_mmlu_moral_scenarios|5": 1,
423
- "harness|ko_mmlu_college_computer_science|5": 1,
424
- "harness|ko_mmlu_high_school_computer_science|5": 1,
425
- "harness|ko_mmlu_professional_medicine|5": 1,
426
- "harness|ko_mmlu_security_studies|5": 1,
427
- "harness|ko_mmlu_high_school_world_history|5": 1,
428
- "harness|ko_mmlu_professional_law|5": 1,
429
- "harness|ko_mmlu_high_school_us_history|5": 1,
430
- "harness|ko_mmlu_high_school_european_history|5": 1,
431
- "harness|ko_truthfulqa_mc|0": 0,
432
- "harness|ko_commongen_v2|2": 1
433
- },
434
- "config_general": {
435
- "model_name": "KT-AI/midm-bitext-S-7B-inst-v1",
436
- "model_sha": "401838023f9ce8b7b3ff260fd1b4f971cd280bc5",
437
- "model_dtype": "torch.float16",
438
- "lighteval_sha": "",
439
- "num_few_shot_default": 0,
440
- "num_fewshot_seeds": 1,
441
- "override_batch_size": 1,
442
- "max_samples": null
443
- }
444
- }