barthfab commited on
Commit
d262ac5
1 Parent(s): 84463de

Delete cstr/llama3-8b-spaetzle-v33

Browse files
cstr/llama3-8b-spaetzle-v33/results_2024_06_03T12-24-12.json DELETED
@@ -1,570 +0,0 @@
1
- {
2
- "config_general": {
3
- "lighteval_sha": "",
4
- "num_few_shot_default": 0,
5
- "num_fewshot_seeds": 1,
6
- "override_batch_size": "auto:6",
7
- "max_samples": "null",
8
- "job_id": "",
9
- "model_name": "cstr/llama3-8b-spaetzle-v33",
10
- "model_sha": "",
11
- "model_dtype": "torch.bfloat16",
12
- "model_size": ""
13
- },
14
- "results": {
15
- "harness|mmlu_m_es|5": {
16
- "acc,none": 0.5938203089845507,
17
- "acc_stderr,none": 0.004253267388577371,
18
- "alias": "mmlu_m_es"
19
- },
20
- "harness|belebele_ita_Latn|5": {
21
- "acc,none": 0.8433333333333334,
22
- "acc_stderr,none": 0.012122943719642242,
23
- "acc_norm,none": 0.8433333333333334,
24
- "acc_norm_stderr,none": 0.012122943719642242,
25
- "alias": "belebele_ita_Latn"
26
- },
27
- "harness|hellaswag_it|10": {
28
- "acc,none": 0.5115849015555314,
29
- "acc_stderr,none": 0.005213728259571761,
30
- "acc_norm,none": 0.6808441205264876,
31
- "acc_norm_stderr,none": 0.00486205942505904,
32
- "alias": "hellaswag_it"
33
- },
34
- "harness|mmlu_m_fr|5": {
35
- "acc,none": 0.5862806508288136,
36
- "acc_stderr,none": 0.004304630453111705,
37
- "alias": "mmlu_m_fr"
38
- },
39
- "harness|belebele_spa_Latn|5": {
40
- "acc,none": 0.8655555555555555,
41
- "acc_stderr,none": 0.01137729604019696,
42
- "acc_norm,none": 0.8655555555555555,
43
- "acc_norm_stderr,none": 0.01137729604019696,
44
- "alias": "belebele_spa_Latn"
45
- },
46
- "harness|truthfulqa_mc2_m_it|0": {
47
- "acc,none": 0.29757343550446996,
48
- "acc_stderr,none": 0.01634911191290943,
49
- "alias": "truthfulqa_mc2_m_it"
50
- },
51
- "harness|arc_challenge_m_fr|25": {
52
- "acc,none": 0.5680068434559452,
53
- "acc_stderr,none": 0.014494184864971345,
54
- "acc_norm,none": 0.6013686911890505,
55
- "acc_norm_stderr,none": 0.014326321344263279,
56
- "alias": "arc_challenge_m_fr"
57
- },
58
- "harness|arc_challenge|25": {
59
- "acc,none": 0.658703071672355,
60
- "acc_stderr,none": 0.01385583128749772,
61
- "acc_norm,none": 0.6953924914675768,
62
- "acc_norm_stderr,none": 0.013449522109932494,
63
- "alias": "arc_challenge"
64
- },
65
- "harness|truthfulqa_mc2_m_es|0": {
66
- "acc,none": 0.31305449936628643,
67
- "acc_stderr,none": 0.016519922589911265,
68
- "alias": "truthfulqa_mc2_m_es"
69
- },
70
- "harness|truthfulqa_mc2|0": {
71
- "acc,none": 0.5930618621358874,
72
- "acc_stderr,none": 0.015175369538354843,
73
- "alias": "truthfulqa_mc2"
74
- },
75
- "harness|truthfulqa_mc2_m_de|0": {
76
- "acc,none": 0.28553299492385786,
77
- "acc_stderr,none": 0.016100222311899746,
78
- "alias": "truthfulqa_mc2_m_de"
79
- },
80
- "harness|arc_challenge_m_de|25": {
81
- "acc,none": 0.5303678357570573,
82
- "acc_stderr,none": 0.01460313447281179,
83
- "acc_norm,none": 0.5705731394354149,
84
- "acc_norm_stderr,none": 0.014483677397351057,
85
- "alias": "arc_challenge_m_de"
86
- },
87
- "harness|belebele_deu_Latn|5": {
88
- "acc,none": 0.8755555555555555,
89
- "acc_stderr,none": 0.011009047987347441,
90
- "acc_norm,none": 0.8755555555555555,
91
- "acc_norm_stderr,none": 0.011009047987347441,
92
- "alias": "belebele_deu_Latn"
93
- },
94
- "harness|hellaswag|10": {
95
- "acc,none": 0.660426209918343,
96
- "acc_stderr,none": 0.0047259676848063975,
97
- "acc_norm,none": 0.8460466042620992,
98
- "acc_norm_stderr,none": 0.0036016648387188783,
99
- "alias": "hellaswag"
100
- },
101
- "harness|belebele_eng_Latn|5": {
102
- "acc,none": 0.9144444444444444,
103
- "acc_stderr,none": 0.009328738621020996,
104
- "acc_norm,none": 0.9144444444444444,
105
- "acc_norm_stderr,none": 0.009328738621020996,
106
- "alias": "belebele_eng_Latn"
107
- },
108
- "harness|hellaswag_es|10": {
109
- "acc,none": 0.5450181352677619,
110
- "acc_stderr,none": 0.005143552907164789,
111
- "acc_norm,none": 0.7192233838276083,
112
- "acc_norm_stderr,none": 0.004641657151727049,
113
- "alias": "hellaswag_es"
114
- },
115
- "harness|mmlu_m_it|5": {
116
- "acc,none": 0.5821560776611014,
117
- "acc_stderr,none": 0.004286949656863299,
118
- "alias": "mmlu_m_it"
119
- },
120
- "harness|hendrycksTest|5": {
121
- "acc,none": 0.6706309642501068,
122
- "acc_stderr,none": 0.12741823697613897,
123
- "alias": "mmlu"
124
- },
125
- "harness|hendrycksTest-humanities|5": {
126
- "acc,none": 0.6706309642501068,
127
- "acc_stderr,none": 0.12741823697613897,
128
- "alias": "mmlu"
129
- },
130
- "harness|hendrycksTest-formal_logic|5": {
131
- "acc,none": 0.6706309642501068,
132
- "acc_stderr,none": 0.12741823697613897,
133
- "alias": "mmlu"
134
- },
135
- "harness|hendrycksTest-high_school_european_history|5": {
136
- "acc,none": 0.6706309642501068,
137
- "acc_stderr,none": 0.12741823697613897,
138
- "alias": "mmlu"
139
- },
140
- "harness|hendrycksTest-high_school_us_history|5": {
141
- "acc,none": 0.6706309642501068,
142
- "acc_stderr,none": 0.12741823697613897,
143
- "alias": "mmlu"
144
- },
145
- "harness|hendrycksTest-high_school_world_history|5": {
146
- "acc,none": 0.6706309642501068,
147
- "acc_stderr,none": 0.12741823697613897,
148
- "alias": "mmlu"
149
- },
150
- "harness|hendrycksTest-international_law|5": {
151
- "acc,none": 0.6706309642501068,
152
- "acc_stderr,none": 0.12741823697613897,
153
- "alias": "mmlu"
154
- },
155
- "harness|hendrycksTest-jurisprudence|5": {
156
- "acc,none": 0.6706309642501068,
157
- "acc_stderr,none": 0.12741823697613897,
158
- "alias": "mmlu"
159
- },
160
- "harness|hendrycksTest-logical_fallacies|5": {
161
- "acc,none": 0.6706309642501068,
162
- "acc_stderr,none": 0.12741823697613897,
163
- "alias": "mmlu"
164
- },
165
- "harness|hendrycksTest-moral_disputes|5": {
166
- "acc,none": 0.6706309642501068,
167
- "acc_stderr,none": 0.12741823697613897,
168
- "alias": "mmlu"
169
- },
170
- "harness|hendrycksTest-moral_scenarios|5": {
171
- "acc,none": 0.6706309642501068,
172
- "acc_stderr,none": 0.12741823697613897,
173
- "alias": "mmlu"
174
- },
175
- "harness|hendrycksTest-philosophy|5": {
176
- "acc,none": 0.6706309642501068,
177
- "acc_stderr,none": 0.12741823697613897,
178
- "alias": "mmlu"
179
- },
180
- "harness|hendrycksTest-prehistory|5": {
181
- "acc,none": 0.6706309642501068,
182
- "acc_stderr,none": 0.12741823697613897,
183
- "alias": "mmlu"
184
- },
185
- "harness|hendrycksTest-professional_law|5": {
186
- "acc,none": 0.6706309642501068,
187
- "acc_stderr,none": 0.12741823697613897,
188
- "alias": "mmlu"
189
- },
190
- "harness|hendrycksTest-world_religions|5": {
191
- "acc,none": 0.6706309642501068,
192
- "acc_stderr,none": 0.12741823697613897,
193
- "alias": "mmlu"
194
- },
195
- "harness|hendrycksTest-other|5": {
196
- "acc,none": 0.6706309642501068,
197
- "acc_stderr,none": 0.12741823697613897,
198
- "alias": "mmlu"
199
- },
200
- "harness|hendrycksTest-business_ethics|5": {
201
- "acc,none": 0.6706309642501068,
202
- "acc_stderr,none": 0.12741823697613897,
203
- "alias": "mmlu"
204
- },
205
- "harness|hendrycksTest-clinical_knowledge|5": {
206
- "acc,none": 0.6706309642501068,
207
- "acc_stderr,none": 0.12741823697613897,
208
- "alias": "mmlu"
209
- },
210
- "harness|hendrycksTest-college_medicine|5": {
211
- "acc,none": 0.6706309642501068,
212
- "acc_stderr,none": 0.12741823697613897,
213
- "alias": "mmlu"
214
- },
215
- "harness|hendrycksTest-global_facts|5": {
216
- "acc,none": 0.6706309642501068,
217
- "acc_stderr,none": 0.12741823697613897,
218
- "alias": "mmlu"
219
- },
220
- "harness|hendrycksTest-human_aging|5": {
221
- "acc,none": 0.6706309642501068,
222
- "acc_stderr,none": 0.12741823697613897,
223
- "alias": "mmlu"
224
- },
225
- "harness|hendrycksTest-management|5": {
226
- "acc,none": 0.6706309642501068,
227
- "acc_stderr,none": 0.12741823697613897,
228
- "alias": "mmlu"
229
- },
230
- "harness|hendrycksTest-marketing|5": {
231
- "acc,none": 0.6706309642501068,
232
- "acc_stderr,none": 0.12741823697613897,
233
- "alias": "mmlu"
234
- },
235
- "harness|hendrycksTest-medical_genetics|5": {
236
- "acc,none": 0.6706309642501068,
237
- "acc_stderr,none": 0.12741823697613897,
238
- "alias": "mmlu"
239
- },
240
- "harness|hendrycksTest-miscellaneous|5": {
241
- "acc,none": 0.6706309642501068,
242
- "acc_stderr,none": 0.12741823697613897,
243
- "alias": "mmlu"
244
- },
245
- "harness|hendrycksTest-nutrition|5": {
246
- "acc,none": 0.6706309642501068,
247
- "acc_stderr,none": 0.12741823697613897,
248
- "alias": "mmlu"
249
- },
250
- "harness|hendrycksTest-professional_accounting|5": {
251
- "acc,none": 0.6706309642501068,
252
- "acc_stderr,none": 0.12741823697613897,
253
- "alias": "mmlu"
254
- },
255
- "harness|hendrycksTest-professional_medicine|5": {
256
- "acc,none": 0.6706309642501068,
257
- "acc_stderr,none": 0.12741823697613897,
258
- "alias": "mmlu"
259
- },
260
- "harness|hendrycksTest-virology|5": {
261
- "acc,none": 0.6706309642501068,
262
- "acc_stderr,none": 0.12741823697613897,
263
- "alias": "mmlu"
264
- },
265
- "harness|hendrycksTest-social_sciences|5": {
266
- "acc,none": 0.6706309642501068,
267
- "acc_stderr,none": 0.12741823697613897,
268
- "alias": "mmlu"
269
- },
270
- "harness|hendrycksTest-econometrics|5": {
271
- "acc,none": 0.6706309642501068,
272
- "acc_stderr,none": 0.12741823697613897,
273
- "alias": "mmlu"
274
- },
275
- "harness|hendrycksTest-high_school_geography|5": {
276
- "acc,none": 0.6706309642501068,
277
- "acc_stderr,none": 0.12741823697613897,
278
- "alias": "mmlu"
279
- },
280
- "harness|hendrycksTest-high_school_government_and_politics|5": {
281
- "acc,none": 0.6706309642501068,
282
- "acc_stderr,none": 0.12741823697613897,
283
- "alias": "mmlu"
284
- },
285
- "harness|hendrycksTest-high_school_macroeconomics|5": {
286
- "acc,none": 0.6706309642501068,
287
- "acc_stderr,none": 0.12741823697613897,
288
- "alias": "mmlu"
289
- },
290
- "harness|hendrycksTest-high_school_microeconomics|5": {
291
- "acc,none": 0.6706309642501068,
292
- "acc_stderr,none": 0.12741823697613897,
293
- "alias": "mmlu"
294
- },
295
- "harness|hendrycksTest-high_school_psychology|5": {
296
- "acc,none": 0.6706309642501068,
297
- "acc_stderr,none": 0.12741823697613897,
298
- "alias": "mmlu"
299
- },
300
- "harness|hendrycksTest-human_sexuality|5": {
301
- "acc,none": 0.6706309642501068,
302
- "acc_stderr,none": 0.12741823697613897,
303
- "alias": "mmlu"
304
- },
305
- "harness|hendrycksTest-professional_psychology|5": {
306
- "acc,none": 0.6706309642501068,
307
- "acc_stderr,none": 0.12741823697613897,
308
- "alias": "mmlu"
309
- },
310
- "harness|hendrycksTest-public_relations|5": {
311
- "acc,none": 0.6706309642501068,
312
- "acc_stderr,none": 0.12741823697613897,
313
- "alias": "mmlu"
314
- },
315
- "harness|hendrycksTest-security_studies|5": {
316
- "acc,none": 0.6706309642501068,
317
- "acc_stderr,none": 0.12741823697613897,
318
- "alias": "mmlu"
319
- },
320
- "harness|hendrycksTest-sociology|5": {
321
- "acc,none": 0.6706309642501068,
322
- "acc_stderr,none": 0.12741823697613897,
323
- "alias": "mmlu"
324
- },
325
- "harness|hendrycksTest-us_foreign_policy|5": {
326
- "acc,none": 0.6706309642501068,
327
- "acc_stderr,none": 0.12741823697613897,
328
- "alias": "mmlu"
329
- },
330
- "harness|hendrycksTest-stem|5": {
331
- "acc,none": 0.6706309642501068,
332
- "acc_stderr,none": 0.12741823697613897,
333
- "alias": "mmlu"
334
- },
335
- "harness|hendrycksTest-abstract_algebra|5": {
336
- "acc,none": 0.6706309642501068,
337
- "acc_stderr,none": 0.12741823697613897,
338
- "alias": "mmlu"
339
- },
340
- "harness|hendrycksTest-anatomy|5": {
341
- "acc,none": 0.6706309642501068,
342
- "acc_stderr,none": 0.12741823697613897,
343
- "alias": "mmlu"
344
- },
345
- "harness|hendrycksTest-astronomy|5": {
346
- "acc,none": 0.6706309642501068,
347
- "acc_stderr,none": 0.12741823697613897,
348
- "alias": "mmlu"
349
- },
350
- "harness|hendrycksTest-college_biology|5": {
351
- "acc,none": 0.6706309642501068,
352
- "acc_stderr,none": 0.12741823697613897,
353
- "alias": "mmlu"
354
- },
355
- "harness|hendrycksTest-college_chemistry|5": {
356
- "acc,none": 0.6706309642501068,
357
- "acc_stderr,none": 0.12741823697613897,
358
- "alias": "mmlu"
359
- },
360
- "harness|hendrycksTest-college_computer_science|5": {
361
- "acc,none": 0.6706309642501068,
362
- "acc_stderr,none": 0.12741823697613897,
363
- "alias": "mmlu"
364
- },
365
- "harness|hendrycksTest-college_mathematics|5": {
366
- "acc,none": 0.6706309642501068,
367
- "acc_stderr,none": 0.12741823697613897,
368
- "alias": "mmlu"
369
- },
370
- "harness|hendrycksTest-college_physics|5": {
371
- "acc,none": 0.6706309642501068,
372
- "acc_stderr,none": 0.12741823697613897,
373
- "alias": "mmlu"
374
- },
375
- "harness|hendrycksTest-computer_security|5": {
376
- "acc,none": 0.6706309642501068,
377
- "acc_stderr,none": 0.12741823697613897,
378
- "alias": "mmlu"
379
- },
380
- "harness|hendrycksTest-conceptual_physics|5": {
381
- "acc,none": 0.6706309642501068,
382
- "acc_stderr,none": 0.12741823697613897,
383
- "alias": "mmlu"
384
- },
385
- "harness|hendrycksTest-electrical_engineering|5": {
386
- "acc,none": 0.6706309642501068,
387
- "acc_stderr,none": 0.12741823697613897,
388
- "alias": "mmlu"
389
- },
390
- "harness|hendrycksTest-elementary_mathematics|5": {
391
- "acc,none": 0.6706309642501068,
392
- "acc_stderr,none": 0.12741823697613897,
393
- "alias": "mmlu"
394
- },
395
- "harness|hendrycksTest-high_school_biology|5": {
396
- "acc,none": 0.6706309642501068,
397
- "acc_stderr,none": 0.12741823697613897,
398
- "alias": "mmlu"
399
- },
400
- "harness|hendrycksTest-high_school_chemistry|5": {
401
- "acc,none": 0.6706309642501068,
402
- "acc_stderr,none": 0.12741823697613897,
403
- "alias": "mmlu"
404
- },
405
- "harness|hendrycksTest-high_school_computer_science|5": {
406
- "acc,none": 0.6706309642501068,
407
- "acc_stderr,none": 0.12741823697613897,
408
- "alias": "mmlu"
409
- },
410
- "harness|hendrycksTest-high_school_mathematics|5": {
411
- "acc,none": 0.6706309642501068,
412
- "acc_stderr,none": 0.12741823697613897,
413
- "alias": "mmlu"
414
- },
415
- "harness|hendrycksTest-high_school_physics|5": {
416
- "acc,none": 0.6706309642501068,
417
- "acc_stderr,none": 0.12741823697613897,
418
- "alias": "mmlu"
419
- },
420
- "harness|hendrycksTest-high_school_statistics|5": {
421
- "acc,none": 0.6706309642501068,
422
- "acc_stderr,none": 0.12741823697613897,
423
- "alias": "mmlu"
424
- },
425
- "harness|hendrycksTest-machine_learning|5": {
426
- "acc,none": 0.6706309642501068,
427
- "acc_stderr,none": 0.12741823697613897,
428
- "alias": "mmlu"
429
- },
430
- "harness|gsm8k|5": {
431
- "exact_match,get-answer": 0.7604245640636846,
432
- "exact_match_stderr,get-answer": 0.01175686434407741,
433
- "alias": "gsm8k"
434
- },
435
- "harness|hellaswag_fr|10": {
436
- "acc,none": 0.5294495609338188,
437
- "acc_stderr,none": 0.005165492179144542,
438
- "acc_norm,none": 0.6972585136003426,
439
- "acc_norm_stderr,none": 0.004754766595508117,
440
- "alias": "hellaswag_fr"
441
- },
442
- "harness|mmlu_m_de|5": {
443
- "acc,none": 0.5791974656810982,
444
- "acc_stderr,none": 0.00428775410783531,
445
- "alias": "mmlu_m_de"
446
- },
447
- "harness|arc_challenge_m_es|25": {
448
- "acc,none": 0.5794871794871795,
449
- "acc_stderr,none": 0.014437908361636175,
450
- "acc_norm,none": 0.617948717948718,
451
- "acc_norm_stderr,none": 0.014211168632005491,
452
- "alias": "arc_challenge_m_es"
453
- },
454
- "harness|truthfulqa_mc2_m_fr|0": {
455
- "acc,none": 0.3087674714104193,
456
- "acc_stderr,none": 0.016478458449960293,
457
- "alias": "truthfulqa_mc2_m_fr"
458
- },
459
- "harness|hellaswag_de|10": {
460
- "acc,none": 0.5086464560204953,
461
- "acc_stderr,none": 0.005165409985466292,
462
- "acc_norm,none": 0.6669513236549958,
463
- "acc_norm_stderr,none": 0.0048696827111701585,
464
- "alias": "hellaswag_de"
465
- },
466
- "harness|arc_challenge_m_it|25": {
467
- "acc,none": 0.5637296834901625,
468
- "acc_stderr,none": 0.014510816921925736,
469
- "acc_norm,none": 0.5902480752780154,
470
- "acc_norm_stderr,none": 0.014389853401438965,
471
- "alias": "arc_challenge_m_it"
472
- },
473
- "harness|belebele_fra_Latn|5": {
474
- "acc,none": 0.8777777777777778,
475
- "acc_stderr,none": 0.010924146933565158,
476
- "acc_norm,none": 0.8777777777777778,
477
- "acc_norm_stderr,none": 0.010924146933565158,
478
- "alias": "belebele_fra_Latn"
479
- }
480
- },
481
- "versions": {
482
- "harness|mmlu_m_es|5": "Yaml",
483
- "harness|belebele_ita_Latn|5": 0.0,
484
- "harness|hellaswag_it|10": 1.0,
485
- "harness|mmlu_m_fr|5": "Yaml",
486
- "harness|belebele_spa_Latn|5": 0.0,
487
- "harness|truthfulqa_mc2_m_it|0": "Yaml",
488
- "harness|arc_challenge_m_fr|25": 1.0,
489
- "harness|arc_challenge|25": 1.0,
490
- "harness|truthfulqa_mc2_m_es|0": "Yaml",
491
- "harness|truthfulqa_mc2|0": 2.0,
492
- "harness|truthfulqa_mc2_m_de|0": "Yaml",
493
- "harness|arc_challenge_m_de|25": 1.0,
494
- "harness|belebele_deu_Latn|5": 0.0,
495
- "harness|hellaswag|10": 1.0,
496
- "harness|belebele_eng_Latn|5": 0.0,
497
- "harness|hellaswag_es|10": 1.0,
498
- "harness|mmlu_m_it|5": "Yaml",
499
- "harness|hendrycksTest|5": "N/A",
500
- "harness|hendrycksTest-humanities|5": "N/A",
501
- "harness|hendrycksTest-formal_logic|5": "N/A",
502
- "harness|hendrycksTest-high_school_european_history|5": "N/A",
503
- "harness|hendrycksTest-high_school_us_history|5": "N/A",
504
- "harness|hendrycksTest-high_school_world_history|5": "N/A",
505
- "harness|hendrycksTest-international_law|5": "N/A",
506
- "harness|hendrycksTest-jurisprudence|5": "N/A",
507
- "harness|hendrycksTest-logical_fallacies|5": "N/A",
508
- "harness|hendrycksTest-moral_disputes|5": "N/A",
509
- "harness|hendrycksTest-moral_scenarios|5": "N/A",
510
- "harness|hendrycksTest-philosophy|5": "N/A",
511
- "harness|hendrycksTest-prehistory|5": "N/A",
512
- "harness|hendrycksTest-professional_law|5": "N/A",
513
- "harness|hendrycksTest-world_religions|5": "N/A",
514
- "harness|hendrycksTest-other|5": "N/A",
515
- "harness|hendrycksTest-business_ethics|5": "N/A",
516
- "harness|hendrycksTest-clinical_knowledge|5": "N/A",
517
- "harness|hendrycksTest-college_medicine|5": "N/A",
518
- "harness|hendrycksTest-global_facts|5": "N/A",
519
- "harness|hendrycksTest-human_aging|5": "N/A",
520
- "harness|hendrycksTest-management|5": "N/A",
521
- "harness|hendrycksTest-marketing|5": "N/A",
522
- "harness|hendrycksTest-medical_genetics|5": "N/A",
523
- "harness|hendrycksTest-miscellaneous|5": "N/A",
524
- "harness|hendrycksTest-nutrition|5": "N/A",
525
- "harness|hendrycksTest-professional_accounting|5": "N/A",
526
- "harness|hendrycksTest-professional_medicine|5": "N/A",
527
- "harness|hendrycksTest-virology|5": "N/A",
528
- "harness|hendrycksTest-social_sciences|5": "N/A",
529
- "harness|hendrycksTest-econometrics|5": "N/A",
530
- "harness|hendrycksTest-high_school_geography|5": "N/A",
531
- "harness|hendrycksTest-high_school_government_and_politics|5": "N/A",
532
- "harness|hendrycksTest-high_school_macroeconomics|5": "N/A",
533
- "harness|hendrycksTest-high_school_microeconomics|5": "N/A",
534
- "harness|hendrycksTest-high_school_psychology|5": "N/A",
535
- "harness|hendrycksTest-human_sexuality|5": "N/A",
536
- "harness|hendrycksTest-professional_psychology|5": "N/A",
537
- "harness|hendrycksTest-public_relations|5": "N/A",
538
- "harness|hendrycksTest-security_studies|5": "N/A",
539
- "harness|hendrycksTest-sociology|5": "N/A",
540
- "harness|hendrycksTest-us_foreign_policy|5": "N/A",
541
- "harness|hendrycksTest-stem|5": "N/A",
542
- "harness|hendrycksTest-abstract_algebra|5": "N/A",
543
- "harness|hendrycksTest-anatomy|5": "N/A",
544
- "harness|hendrycksTest-astronomy|5": "N/A",
545
- "harness|hendrycksTest-college_biology|5": "N/A",
546
- "harness|hendrycksTest-college_chemistry|5": "N/A",
547
- "harness|hendrycksTest-college_computer_science|5": "N/A",
548
- "harness|hendrycksTest-college_mathematics|5": "N/A",
549
- "harness|hendrycksTest-college_physics|5": "N/A",
550
- "harness|hendrycksTest-computer_security|5": "N/A",
551
- "harness|hendrycksTest-conceptual_physics|5": "N/A",
552
- "harness|hendrycksTest-electrical_engineering|5": "N/A",
553
- "harness|hendrycksTest-elementary_mathematics|5": "N/A",
554
- "harness|hendrycksTest-high_school_biology|5": "N/A",
555
- "harness|hendrycksTest-high_school_chemistry|5": "N/A",
556
- "harness|hendrycksTest-high_school_computer_science|5": "N/A",
557
- "harness|hendrycksTest-high_school_mathematics|5": "N/A",
558
- "harness|hendrycksTest-high_school_physics|5": "N/A",
559
- "harness|hendrycksTest-high_school_statistics|5": "N/A",
560
- "harness|hendrycksTest-machine_learning|5": "N/A",
561
- "harness|gsm8k|5": 2.0,
562
- "harness|hellaswag_fr|10": 1.0,
563
- "harness|mmlu_m_de|5": "Yaml",
564
- "harness|arc_challenge_m_es|25": 1.0,
565
- "harness|truthfulqa_mc2_m_fr|0": "Yaml",
566
- "harness|hellaswag_de|10": 1.0,
567
- "harness|arc_challenge_m_it|25": 1.0,
568
- "harness|belebele_fra_Latn|5": 0.0
569
- }
570
- }