barthfab commited on
Commit
84463de
1 Parent(s): eb25619

Delete cstr/llama3-8b-spaetzle-v20

Browse files
cstr/llama3-8b-spaetzle-v20/results_2024_05_31T12-04-09.json DELETED
@@ -1,570 +0,0 @@
1
- {
2
- "config_general": {
3
- "lighteval_sha": "",
4
- "num_few_shot_default": 0,
5
- "num_fewshot_seeds": 1,
6
- "override_batch_size": "auto:6",
7
- "max_samples": "null",
8
- "job_id": "",
9
- "model_name": "cstr/llama3-8b-spaetzle-v20",
10
- "model_sha": "",
11
- "model_dtype": "torch.bfloat16",
12
- "model_size": ""
13
- },
14
- "results": {
15
- "harness|truthfulqa_mc2_m_de|0": {
16
- "acc,none": 0.28426395939086296,
17
- "acc_stderr,none": 0.01607866465713687,
18
- "alias": "truthfulqa_mc2_m_de"
19
- },
20
- "harness|truthfulqa_mc2_m_es|0": {
21
- "acc,none": 0.3155893536121673,
22
- "acc_stderr,none": 0.016556039028040398,
23
- "alias": "truthfulqa_mc2_m_es"
24
- },
25
- "harness|arc_challenge_m_it|25": {
26
- "acc,none": 0.5697177074422584,
27
- "acc_stderr,none": 0.014487223979103696,
28
- "acc_norm,none": 0.5970915312232677,
29
- "acc_norm_stderr,none": 0.014351663146567226,
30
- "alias": "arc_challenge_m_it"
31
- },
32
- "harness|mmlu_m_de|5": {
33
- "acc,none": 0.5723336853220697,
34
- "acc_stderr,none": 0.004296893084996086,
35
- "alias": "mmlu_m_de"
36
- },
37
- "harness|belebele_ita_Latn|5": {
38
- "acc,none": 0.8388888888888889,
39
- "acc_stderr,none": 0.01226126056136015,
40
- "acc_norm,none": 0.8388888888888889,
41
- "acc_norm_stderr,none": 0.01226126056136015,
42
- "alias": "belebele_ita_Latn"
43
- },
44
- "harness|mmlu_m_fr|5": {
45
- "acc,none": 0.583377893209075,
46
- "acc_stderr,none": 0.004308998232721124,
47
- "alias": "mmlu_m_fr"
48
- },
49
- "harness|belebele_eng_Latn|5": {
50
- "acc,none": 0.9177777777777778,
51
- "acc_stderr,none": 0.009161857406673819,
52
- "acc_norm,none": 0.9177777777777778,
53
- "acc_norm_stderr,none": 0.009161857406673819,
54
- "alias": "belebele_eng_Latn"
55
- },
56
- "harness|truthfulqa_mc2_m_it|0": {
57
- "acc,none": 0.2962962962962963,
58
- "acc_stderr,none": 0.016328814422102055,
59
- "alias": "truthfulqa_mc2_m_it"
60
- },
61
- "harness|arc_challenge_m_de|25": {
62
- "acc,none": 0.5243798118049615,
63
- "acc_stderr,none": 0.014612741460569867,
64
- "acc_norm,none": 0.5671514114627887,
65
- "acc_norm_stderr,none": 0.01449759923259859,
66
- "alias": "arc_challenge_m_de"
67
- },
68
- "harness|mmlu_m_es|5": {
69
- "acc,none": 0.5939703014849258,
70
- "acc_stderr,none": 0.004253019032074493,
71
- "alias": "mmlu_m_es"
72
- },
73
- "harness|gsm8k|5": {
74
- "exact_match,get-answer": 0.7482941622441244,
75
- "exact_match_stderr,get-answer": 0.01195432661770502,
76
- "alias": "gsm8k"
77
- },
78
- "harness|arc_challenge_m_es|25": {
79
- "acc,none": 0.5991452991452991,
80
- "acc_stderr,none": 0.014333502054419345,
81
- "acc_norm,none": 0.6213675213675214,
82
- "acc_norm_stderr,none": 0.01418652250076662,
83
- "alias": "arc_challenge_m_es"
84
- },
85
- "harness|belebele_fra_Latn|5": {
86
- "acc,none": 0.8777777777777778,
87
- "acc_stderr,none": 0.01092414693356515,
88
- "acc_norm,none": 0.8777777777777778,
89
- "acc_norm_stderr,none": 0.01092414693356515,
90
- "alias": "belebele_fra_Latn"
91
- },
92
- "harness|arc_challenge_m_fr|25": {
93
- "acc,none": 0.5765611633875107,
94
- "acc_stderr,none": 0.014457613658173296,
95
- "acc_norm,none": 0.6099230111206159,
96
- "acc_norm_stderr,none": 0.01427221086953961,
97
- "alias": "arc_challenge_m_fr"
98
- },
99
- "harness|belebele_spa_Latn|5": {
100
- "acc,none": 0.8644444444444445,
101
- "acc_stderr,none": 0.01141687795501916,
102
- "acc_norm,none": 0.8644444444444445,
103
- "acc_norm_stderr,none": 0.01141687795501916,
104
- "alias": "belebele_spa_Latn"
105
- },
106
- "harness|mmlu_m_it|5": {
107
- "acc,none": 0.5802674321976279,
108
- "acc_stderr,none": 0.004289651918134123,
109
- "alias": "mmlu_m_it"
110
- },
111
- "harness|arc_challenge|25": {
112
- "acc,none": 0.6834470989761092,
113
- "acc_stderr,none": 0.013592431519068075,
114
- "acc_norm,none": 0.7184300341296929,
115
- "acc_norm_stderr,none": 0.013143376735009033,
116
- "alias": "arc_challenge"
117
- },
118
- "harness|hendrycksTest|5": {
119
- "acc,none": 0.6718416180031335,
120
- "acc_stderr,none": 0.12568774578503591,
121
- "alias": "mmlu"
122
- },
123
- "harness|hendrycksTest-humanities|5": {
124
- "acc,none": 0.6718416180031335,
125
- "acc_stderr,none": 0.12568774578503591,
126
- "alias": "mmlu"
127
- },
128
- "harness|hendrycksTest-formal_logic|5": {
129
- "acc,none": 0.6718416180031335,
130
- "acc_stderr,none": 0.12568774578503591,
131
- "alias": "mmlu"
132
- },
133
- "harness|hendrycksTest-high_school_european_history|5": {
134
- "acc,none": 0.6718416180031335,
135
- "acc_stderr,none": 0.12568774578503591,
136
- "alias": "mmlu"
137
- },
138
- "harness|hendrycksTest-high_school_us_history|5": {
139
- "acc,none": 0.6718416180031335,
140
- "acc_stderr,none": 0.12568774578503591,
141
- "alias": "mmlu"
142
- },
143
- "harness|hendrycksTest-high_school_world_history|5": {
144
- "acc,none": 0.6718416180031335,
145
- "acc_stderr,none": 0.12568774578503591,
146
- "alias": "mmlu"
147
- },
148
- "harness|hendrycksTest-international_law|5": {
149
- "acc,none": 0.6718416180031335,
150
- "acc_stderr,none": 0.12568774578503591,
151
- "alias": "mmlu"
152
- },
153
- "harness|hendrycksTest-jurisprudence|5": {
154
- "acc,none": 0.6718416180031335,
155
- "acc_stderr,none": 0.12568774578503591,
156
- "alias": "mmlu"
157
- },
158
- "harness|hendrycksTest-logical_fallacies|5": {
159
- "acc,none": 0.6718416180031335,
160
- "acc_stderr,none": 0.12568774578503591,
161
- "alias": "mmlu"
162
- },
163
- "harness|hendrycksTest-moral_disputes|5": {
164
- "acc,none": 0.6718416180031335,
165
- "acc_stderr,none": 0.12568774578503591,
166
- "alias": "mmlu"
167
- },
168
- "harness|hendrycksTest-moral_scenarios|5": {
169
- "acc,none": 0.6718416180031335,
170
- "acc_stderr,none": 0.12568774578503591,
171
- "alias": "mmlu"
172
- },
173
- "harness|hendrycksTest-philosophy|5": {
174
- "acc,none": 0.6718416180031335,
175
- "acc_stderr,none": 0.12568774578503591,
176
- "alias": "mmlu"
177
- },
178
- "harness|hendrycksTest-prehistory|5": {
179
- "acc,none": 0.6718416180031335,
180
- "acc_stderr,none": 0.12568774578503591,
181
- "alias": "mmlu"
182
- },
183
- "harness|hendrycksTest-professional_law|5": {
184
- "acc,none": 0.6718416180031335,
185
- "acc_stderr,none": 0.12568774578503591,
186
- "alias": "mmlu"
187
- },
188
- "harness|hendrycksTest-world_religions|5": {
189
- "acc,none": 0.6718416180031335,
190
- "acc_stderr,none": 0.12568774578503591,
191
- "alias": "mmlu"
192
- },
193
- "harness|hendrycksTest-other|5": {
194
- "acc,none": 0.6718416180031335,
195
- "acc_stderr,none": 0.12568774578503591,
196
- "alias": "mmlu"
197
- },
198
- "harness|hendrycksTest-business_ethics|5": {
199
- "acc,none": 0.6718416180031335,
200
- "acc_stderr,none": 0.12568774578503591,
201
- "alias": "mmlu"
202
- },
203
- "harness|hendrycksTest-clinical_knowledge|5": {
204
- "acc,none": 0.6718416180031335,
205
- "acc_stderr,none": 0.12568774578503591,
206
- "alias": "mmlu"
207
- },
208
- "harness|hendrycksTest-college_medicine|5": {
209
- "acc,none": 0.6718416180031335,
210
- "acc_stderr,none": 0.12568774578503591,
211
- "alias": "mmlu"
212
- },
213
- "harness|hendrycksTest-global_facts|5": {
214
- "acc,none": 0.6718416180031335,
215
- "acc_stderr,none": 0.12568774578503591,
216
- "alias": "mmlu"
217
- },
218
- "harness|hendrycksTest-human_aging|5": {
219
- "acc,none": 0.6718416180031335,
220
- "acc_stderr,none": 0.12568774578503591,
221
- "alias": "mmlu"
222
- },
223
- "harness|hendrycksTest-management|5": {
224
- "acc,none": 0.6718416180031335,
225
- "acc_stderr,none": 0.12568774578503591,
226
- "alias": "mmlu"
227
- },
228
- "harness|hendrycksTest-marketing|5": {
229
- "acc,none": 0.6718416180031335,
230
- "acc_stderr,none": 0.12568774578503591,
231
- "alias": "mmlu"
232
- },
233
- "harness|hendrycksTest-medical_genetics|5": {
234
- "acc,none": 0.6718416180031335,
235
- "acc_stderr,none": 0.12568774578503591,
236
- "alias": "mmlu"
237
- },
238
- "harness|hendrycksTest-miscellaneous|5": {
239
- "acc,none": 0.6718416180031335,
240
- "acc_stderr,none": 0.12568774578503591,
241
- "alias": "mmlu"
242
- },
243
- "harness|hendrycksTest-nutrition|5": {
244
- "acc,none": 0.6718416180031335,
245
- "acc_stderr,none": 0.12568774578503591,
246
- "alias": "mmlu"
247
- },
248
- "harness|hendrycksTest-professional_accounting|5": {
249
- "acc,none": 0.6718416180031335,
250
- "acc_stderr,none": 0.12568774578503591,
251
- "alias": "mmlu"
252
- },
253
- "harness|hendrycksTest-professional_medicine|5": {
254
- "acc,none": 0.6718416180031335,
255
- "acc_stderr,none": 0.12568774578503591,
256
- "alias": "mmlu"
257
- },
258
- "harness|hendrycksTest-virology|5": {
259
- "acc,none": 0.6718416180031335,
260
- "acc_stderr,none": 0.12568774578503591,
261
- "alias": "mmlu"
262
- },
263
- "harness|hendrycksTest-social_sciences|5": {
264
- "acc,none": 0.6718416180031335,
265
- "acc_stderr,none": 0.12568774578503591,
266
- "alias": "mmlu"
267
- },
268
- "harness|hendrycksTest-econometrics|5": {
269
- "acc,none": 0.6718416180031335,
270
- "acc_stderr,none": 0.12568774578503591,
271
- "alias": "mmlu"
272
- },
273
- "harness|hendrycksTest-high_school_geography|5": {
274
- "acc,none": 0.6718416180031335,
275
- "acc_stderr,none": 0.12568774578503591,
276
- "alias": "mmlu"
277
- },
278
- "harness|hendrycksTest-high_school_government_and_politics|5": {
279
- "acc,none": 0.6718416180031335,
280
- "acc_stderr,none": 0.12568774578503591,
281
- "alias": "mmlu"
282
- },
283
- "harness|hendrycksTest-high_school_macroeconomics|5": {
284
- "acc,none": 0.6718416180031335,
285
- "acc_stderr,none": 0.12568774578503591,
286
- "alias": "mmlu"
287
- },
288
- "harness|hendrycksTest-high_school_microeconomics|5": {
289
- "acc,none": 0.6718416180031335,
290
- "acc_stderr,none": 0.12568774578503591,
291
- "alias": "mmlu"
292
- },
293
- "harness|hendrycksTest-high_school_psychology|5": {
294
- "acc,none": 0.6718416180031335,
295
- "acc_stderr,none": 0.12568774578503591,
296
- "alias": "mmlu"
297
- },
298
- "harness|hendrycksTest-human_sexuality|5": {
299
- "acc,none": 0.6718416180031335,
300
- "acc_stderr,none": 0.12568774578503591,
301
- "alias": "mmlu"
302
- },
303
- "harness|hendrycksTest-professional_psychology|5": {
304
- "acc,none": 0.6718416180031335,
305
- "acc_stderr,none": 0.12568774578503591,
306
- "alias": "mmlu"
307
- },
308
- "harness|hendrycksTest-public_relations|5": {
309
- "acc,none": 0.6718416180031335,
310
- "acc_stderr,none": 0.12568774578503591,
311
- "alias": "mmlu"
312
- },
313
- "harness|hendrycksTest-security_studies|5": {
314
- "acc,none": 0.6718416180031335,
315
- "acc_stderr,none": 0.12568774578503591,
316
- "alias": "mmlu"
317
- },
318
- "harness|hendrycksTest-sociology|5": {
319
- "acc,none": 0.6718416180031335,
320
- "acc_stderr,none": 0.12568774578503591,
321
- "alias": "mmlu"
322
- },
323
- "harness|hendrycksTest-us_foreign_policy|5": {
324
- "acc,none": 0.6718416180031335,
325
- "acc_stderr,none": 0.12568774578503591,
326
- "alias": "mmlu"
327
- },
328
- "harness|hendrycksTest-stem|5": {
329
- "acc,none": 0.6718416180031335,
330
- "acc_stderr,none": 0.12568774578503591,
331
- "alias": "mmlu"
332
- },
333
- "harness|hendrycksTest-abstract_algebra|5": {
334
- "acc,none": 0.6718416180031335,
335
- "acc_stderr,none": 0.12568774578503591,
336
- "alias": "mmlu"
337
- },
338
- "harness|hendrycksTest-anatomy|5": {
339
- "acc,none": 0.6718416180031335,
340
- "acc_stderr,none": 0.12568774578503591,
341
- "alias": "mmlu"
342
- },
343
- "harness|hendrycksTest-astronomy|5": {
344
- "acc,none": 0.6718416180031335,
345
- "acc_stderr,none": 0.12568774578503591,
346
- "alias": "mmlu"
347
- },
348
- "harness|hendrycksTest-college_biology|5": {
349
- "acc,none": 0.6718416180031335,
350
- "acc_stderr,none": 0.12568774578503591,
351
- "alias": "mmlu"
352
- },
353
- "harness|hendrycksTest-college_chemistry|5": {
354
- "acc,none": 0.6718416180031335,
355
- "acc_stderr,none": 0.12568774578503591,
356
- "alias": "mmlu"
357
- },
358
- "harness|hendrycksTest-college_computer_science|5": {
359
- "acc,none": 0.6718416180031335,
360
- "acc_stderr,none": 0.12568774578503591,
361
- "alias": "mmlu"
362
- },
363
- "harness|hendrycksTest-college_mathematics|5": {
364
- "acc,none": 0.6718416180031335,
365
- "acc_stderr,none": 0.12568774578503591,
366
- "alias": "mmlu"
367
- },
368
- "harness|hendrycksTest-college_physics|5": {
369
- "acc,none": 0.6718416180031335,
370
- "acc_stderr,none": 0.12568774578503591,
371
- "alias": "mmlu"
372
- },
373
- "harness|hendrycksTest-computer_security|5": {
374
- "acc,none": 0.6718416180031335,
375
- "acc_stderr,none": 0.12568774578503591,
376
- "alias": "mmlu"
377
- },
378
- "harness|hendrycksTest-conceptual_physics|5": {
379
- "acc,none": 0.6718416180031335,
380
- "acc_stderr,none": 0.12568774578503591,
381
- "alias": "mmlu"
382
- },
383
- "harness|hendrycksTest-electrical_engineering|5": {
384
- "acc,none": 0.6718416180031335,
385
- "acc_stderr,none": 0.12568774578503591,
386
- "alias": "mmlu"
387
- },
388
- "harness|hendrycksTest-elementary_mathematics|5": {
389
- "acc,none": 0.6718416180031335,
390
- "acc_stderr,none": 0.12568774578503591,
391
- "alias": "mmlu"
392
- },
393
- "harness|hendrycksTest-high_school_biology|5": {
394
- "acc,none": 0.6718416180031335,
395
- "acc_stderr,none": 0.12568774578503591,
396
- "alias": "mmlu"
397
- },
398
- "harness|hendrycksTest-high_school_chemistry|5": {
399
- "acc,none": 0.6718416180031335,
400
- "acc_stderr,none": 0.12568774578503591,
401
- "alias": "mmlu"
402
- },
403
- "harness|hendrycksTest-high_school_computer_science|5": {
404
- "acc,none": 0.6718416180031335,
405
- "acc_stderr,none": 0.12568774578503591,
406
- "alias": "mmlu"
407
- },
408
- "harness|hendrycksTest-high_school_mathematics|5": {
409
- "acc,none": 0.6718416180031335,
410
- "acc_stderr,none": 0.12568774578503591,
411
- "alias": "mmlu"
412
- },
413
- "harness|hendrycksTest-high_school_physics|5": {
414
- "acc,none": 0.6718416180031335,
415
- "acc_stderr,none": 0.12568774578503591,
416
- "alias": "mmlu"
417
- },
418
- "harness|hendrycksTest-high_school_statistics|5": {
419
- "acc,none": 0.6718416180031335,
420
- "acc_stderr,none": 0.12568774578503591,
421
- "alias": "mmlu"
422
- },
423
- "harness|hendrycksTest-machine_learning|5": {
424
- "acc,none": 0.6718416180031335,
425
- "acc_stderr,none": 0.12568774578503591,
426
- "alias": "mmlu"
427
- },
428
- "harness|hellaswag|10": {
429
- "acc,none": 0.6730730930093607,
430
- "acc_stderr,none": 0.004681316064444419,
431
- "acc_norm,none": 0.8529177454690301,
432
- "acc_norm_stderr,none": 0.0035346403488165762,
433
- "alias": "hellaswag"
434
- },
435
- "harness|hellaswag_es|10": {
436
- "acc,none": 0.5448047791764455,
437
- "acc_stderr,none": 0.005143751659378115,
438
- "acc_norm,none": 0.7191167057819501,
439
- "acc_norm_stderr,none": 0.004642194529169888,
440
- "alias": "hellaswag_es"
441
- },
442
- "harness|hellaswag_de|10": {
443
- "acc,none": 0.49540990606319385,
444
- "acc_stderr,none": 0.005165964806453526,
445
- "acc_norm,none": 0.645068317677199,
446
- "acc_norm_stderr,none": 0.004943960357536986,
447
- "alias": "hellaswag_de"
448
- },
449
- "harness|hellaswag_it|10": {
450
- "acc,none": 0.5114761231371696,
451
- "acc_stderr,none": 0.0052137544312666545,
452
- "acc_norm,none": 0.6786685521592516,
453
- "acc_norm_stderr,none": 0.004870801940061179,
454
- "alias": "hellaswag_it"
455
- },
456
- "harness|truthfulqa_mc2_m_fr|0": {
457
- "acc,none": 0.312579415501906,
458
- "acc_stderr,none": 0.01653408556216796,
459
- "alias": "truthfulqa_mc2_m_fr"
460
- },
461
- "harness|truthfulqa_mc2|0": {
462
- "acc,none": 0.6092164844893638,
463
- "acc_stderr,none": 0.015197356888966637,
464
- "alias": "truthfulqa_mc2"
465
- },
466
- "harness|hellaswag_fr|10": {
467
- "acc,none": 0.5314842578710645,
468
- "acc_stderr,none": 0.005164206705705013,
469
- "acc_norm,none": 0.6976868708502891,
470
- "acc_norm_stderr,none": 0.004752860855507153,
471
- "alias": "hellaswag_fr"
472
- },
473
- "harness|belebele_deu_Latn|5": {
474
- "acc,none": 0.8811111111111111,
475
- "acc_stderr,none": 0.010794589393035976,
476
- "acc_norm,none": 0.8811111111111111,
477
- "acc_norm_stderr,none": 0.010794589393035976,
478
- "alias": "belebele_deu_Latn"
479
- }
480
- },
481
- "versions": {
482
- "harness|truthfulqa_mc2_m_de|0": "Yaml",
483
- "harness|truthfulqa_mc2_m_es|0": "Yaml",
484
- "harness|arc_challenge_m_it|25": 1.0,
485
- "harness|mmlu_m_de|5": "Yaml",
486
- "harness|belebele_ita_Latn|5": 0.0,
487
- "harness|mmlu_m_fr|5": "Yaml",
488
- "harness|belebele_eng_Latn|5": 0.0,
489
- "harness|truthfulqa_mc2_m_it|0": "Yaml",
490
- "harness|arc_challenge_m_de|25": 1.0,
491
- "harness|mmlu_m_es|5": "Yaml",
492
- "harness|gsm8k|5": 2.0,
493
- "harness|arc_challenge_m_es|25": 1.0,
494
- "harness|belebele_fra_Latn|5": 0.0,
495
- "harness|arc_challenge_m_fr|25": 1.0,
496
- "harness|belebele_spa_Latn|5": 0.0,
497
- "harness|mmlu_m_it|5": "Yaml",
498
- "harness|arc_challenge|25": 1.0,
499
- "harness|hendrycksTest|5": "N/A",
500
- "harness|hendrycksTest-humanities|5": "N/A",
501
- "harness|hendrycksTest-formal_logic|5": "N/A",
502
- "harness|hendrycksTest-high_school_european_history|5": "N/A",
503
- "harness|hendrycksTest-high_school_us_history|5": "N/A",
504
- "harness|hendrycksTest-high_school_world_history|5": "N/A",
505
- "harness|hendrycksTest-international_law|5": "N/A",
506
- "harness|hendrycksTest-jurisprudence|5": "N/A",
507
- "harness|hendrycksTest-logical_fallacies|5": "N/A",
508
- "harness|hendrycksTest-moral_disputes|5": "N/A",
509
- "harness|hendrycksTest-moral_scenarios|5": "N/A",
510
- "harness|hendrycksTest-philosophy|5": "N/A",
511
- "harness|hendrycksTest-prehistory|5": "N/A",
512
- "harness|hendrycksTest-professional_law|5": "N/A",
513
- "harness|hendrycksTest-world_religions|5": "N/A",
514
- "harness|hendrycksTest-other|5": "N/A",
515
- "harness|hendrycksTest-business_ethics|5": "N/A",
516
- "harness|hendrycksTest-clinical_knowledge|5": "N/A",
517
- "harness|hendrycksTest-college_medicine|5": "N/A",
518
- "harness|hendrycksTest-global_facts|5": "N/A",
519
- "harness|hendrycksTest-human_aging|5": "N/A",
520
- "harness|hendrycksTest-management|5": "N/A",
521
- "harness|hendrycksTest-marketing|5": "N/A",
522
- "harness|hendrycksTest-medical_genetics|5": "N/A",
523
- "harness|hendrycksTest-miscellaneous|5": "N/A",
524
- "harness|hendrycksTest-nutrition|5": "N/A",
525
- "harness|hendrycksTest-professional_accounting|5": "N/A",
526
- "harness|hendrycksTest-professional_medicine|5": "N/A",
527
- "harness|hendrycksTest-virology|5": "N/A",
528
- "harness|hendrycksTest-social_sciences|5": "N/A",
529
- "harness|hendrycksTest-econometrics|5": "N/A",
530
- "harness|hendrycksTest-high_school_geography|5": "N/A",
531
- "harness|hendrycksTest-high_school_government_and_politics|5": "N/A",
532
- "harness|hendrycksTest-high_school_macroeconomics|5": "N/A",
533
- "harness|hendrycksTest-high_school_microeconomics|5": "N/A",
534
- "harness|hendrycksTest-high_school_psychology|5": "N/A",
535
- "harness|hendrycksTest-human_sexuality|5": "N/A",
536
- "harness|hendrycksTest-professional_psychology|5": "N/A",
537
- "harness|hendrycksTest-public_relations|5": "N/A",
538
- "harness|hendrycksTest-security_studies|5": "N/A",
539
- "harness|hendrycksTest-sociology|5": "N/A",
540
- "harness|hendrycksTest-us_foreign_policy|5": "N/A",
541
- "harness|hendrycksTest-stem|5": "N/A",
542
- "harness|hendrycksTest-abstract_algebra|5": "N/A",
543
- "harness|hendrycksTest-anatomy|5": "N/A",
544
- "harness|hendrycksTest-astronomy|5": "N/A",
545
- "harness|hendrycksTest-college_biology|5": "N/A",
546
- "harness|hendrycksTest-college_chemistry|5": "N/A",
547
- "harness|hendrycksTest-college_computer_science|5": "N/A",
548
- "harness|hendrycksTest-college_mathematics|5": "N/A",
549
- "harness|hendrycksTest-college_physics|5": "N/A",
550
- "harness|hendrycksTest-computer_security|5": "N/A",
551
- "harness|hendrycksTest-conceptual_physics|5": "N/A",
552
- "harness|hendrycksTest-electrical_engineering|5": "N/A",
553
- "harness|hendrycksTest-elementary_mathematics|5": "N/A",
554
- "harness|hendrycksTest-high_school_biology|5": "N/A",
555
- "harness|hendrycksTest-high_school_chemistry|5": "N/A",
556
- "harness|hendrycksTest-high_school_computer_science|5": "N/A",
557
- "harness|hendrycksTest-high_school_mathematics|5": "N/A",
558
- "harness|hendrycksTest-high_school_physics|5": "N/A",
559
- "harness|hendrycksTest-high_school_statistics|5": "N/A",
560
- "harness|hendrycksTest-machine_learning|5": "N/A",
561
- "harness|hellaswag|10": 1.0,
562
- "harness|hellaswag_es|10": 1.0,
563
- "harness|hellaswag_de|10": 1.0,
564
- "harness|hellaswag_it|10": 1.0,
565
- "harness|truthfulqa_mc2_m_fr|0": "Yaml",
566
- "harness|truthfulqa_mc2|0": 2.0,
567
- "harness|hellaswag_fr|10": 1.0,
568
- "harness|belebele_deu_Latn|5": 0.0
569
- }
570
- }