barthfab commited on
Commit
b363076
1 Parent(s): d62d622

uploading results from galatolo/cerbero-7b

Browse files

This is an automated commit!
The results are from 2024_06_03T12-58-18 of the galatolo/cerbero-7b model.

Following tasks are uploaded: ['harness|hellaswag|10', 'harness|truthfulqa_mc2_m_es|0', 'harness|belebele_deu_Latn|5', 'harness|belebele_eng_Latn|5', 'harness|hellaswag_it|10', 'harness|belebele_fra_Latn|5', 'harness|mmlu_m_de|5', 'harness|belebele_spa_Latn|5', 'harness|hellaswag_es|10', 'harness|truthfulqa_mc2_m_it|0', 'harness|arc_challenge|25', 'harness|arc_challenge_m_fr|25', 'harness|hellaswag_fr|10', 'harness|hendrycksTest|5', 'harness|hendrycksTest-humanities|5', 'harness|hendrycksTest-formal_logic|5', 'harness|hendrycksTest-high_school_european_history|5', 'harness|hendrycksTest-high_school_us_history|5', 'harness|hendrycksTest-high_school_world_history|5', 'harness|hendrycksTest-international_law|5', 'harness|hendrycksTest-jurisprudence|5', 'harness|hendrycksTest-logical_fallacies|5', 'harness|hendrycksTest-moral_disputes|5', 'harness|hendrycksTest-moral_scenarios|5', 'harness|hendrycksTest-philosophy|5', 'harness|hendrycksTest-prehistory|5', 'harness|hendrycksTest-professional_law|5', 'harness|hendrycksTest-world_religions|5', 'harness|hendrycksTest-other|5', 'harness|hendrycksTest-business_ethics|5', 'harness|hendrycksTest-clinical_knowledge|5', 'harness|hendrycksTest-college_medicine|5', 'harness|hendrycksTest-global_facts|5', 'harness|hendrycksTest-human_aging|5', 'harness|hendrycksTest-management|5', 'harness|hendrycksTest-marketing|5', 'harness|hendrycksTest-medical_genetics|5', 'harness|hendrycksTest-miscellaneous|5', 'harness|hendrycksTest-nutrition|5', 'harness|hendrycksTest-professional_accounting|5', 'harness|hendrycksTest-professional_medicine|5', 'harness|hendrycksTest-virology|5', 'harness|hendrycksTest-social_sciences|5', 'harness|hendrycksTest-econometrics|5', 'harness|hendrycksTest-high_school_geography|5', 'harness|hendrycksTest-high_school_government_and_politics|5', 'harness|hendrycksTest-high_school_macroeconomics|5', 'harness|hendrycksTest-high_school_microeconomics|5', 'harness|hendrycksTest-high_school_psychology|5', 'harness|hendrycksTest-human_sexuality|5', 'harness|hendrycksTest-professional_psychology|5', 'harness|hendrycksTest-public_relations|5', 'harness|hendrycksTest-security_studies|5', 'harness|hendrycksTest-sociology|5', 'harness|hendrycksTest-us_foreign_policy|5', 'harness|hendrycksTest-stem|5', 'harness|hendrycksTest-abstract_algebra|5', 'harness|hendrycksTest-anatomy|5', 'harness|hendrycksTest-astronomy|5', 'harness|hendrycksTest-college_biology|5', 'harness|hendrycksTest-college_chemistry|5', 'harness|hendrycksTest-college_computer_science|5', 'harness|hendrycksTest-college_mathematics|5', 'harness|hendrycksTest-college_physics|5', 'harness|hendrycksTest-computer_security|5', 'harness|hendrycksTest-conceptual_physics|5', 'harness|hendrycksTest-electrical_engineering|5', 'harness|hendrycksTest-elementary_mathematics|5', 'harness|hendrycksTest-high_school_biology|5', 'harness|hendrycksTest-high_school_chemistry|5', 'harness|hendrycksTest-high_school_computer_science|5', 'harness|hendrycksTest-high_school_mathematics|5', 'harness|hendrycksTest-high_school_physics|5', 'harness|hendrycksTest-high_school_statistics|5', 'harness|hendrycksTest-machine_learning|5', 'harness|hellaswag_de|10', 'harness|truthfulqa_mc2|0', 'harness|arc_challenge_m_es|25', 'harness|mmlu_m_fr|5', 'harness|arc_challenge_m_de|25', 'harness|mmlu_m_es|5', 'harness|truthfulqa_mc2_m_fr|0', 'harness|mmlu_m_it|5', 'harness|arc_challenge_m_it|25', 'harness|belebele_ita_Latn|5', 'harness|truthfulqa_mc2_m_de|0']

galatolo/cerbero-7b/results_2024_06_03T12-58-18.json ADDED
@@ -0,0 +1,564 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "lighteval_sha": "",
4
+ "num_few_shot_default": 0,
5
+ "num_fewshot_seeds": 1,
6
+ "override_batch_size": "auto:6",
7
+ "max_samples": "null",
8
+ "job_id": "",
9
+ "model_name": "galatolo/cerbero-7b",
10
+ "model_sha": "",
11
+ "model_dtype": "torch.bfloat16",
12
+ "model_size": ""
13
+ },
14
+ "results": {
15
+ "harness|hellaswag|10": {
16
+ "acc,none": 0.6113324039036049,
17
+ "acc_stderr,none": 0.0048645132621943105,
18
+ "acc_norm,none": 0.8103963353913562,
19
+ "acc_norm_stderr,none": 0.003911862797736137,
20
+ "alias": "hellaswag"
21
+ },
22
+ "harness|truthfulqa_mc2_m_es|0": {
23
+ "acc,none": 0.27122940430925224,
24
+ "acc_stderr,none": 0.015838013071561328,
25
+ "alias": "truthfulqa_mc2_m_es"
26
+ },
27
+ "harness|belebele_deu_Latn|5": {
28
+ "acc,none": 0.7133333333333334,
29
+ "acc_stderr,none": 0.015081863703787807,
30
+ "acc_norm,none": 0.7133333333333334,
31
+ "acc_norm_stderr,none": 0.015081863703787807,
32
+ "alias": "belebele_deu_Latn"
33
+ },
34
+ "harness|belebele_eng_Latn|5": {
35
+ "acc,none": 0.8277777777777777,
36
+ "acc_stderr,none": 0.012592780405781533,
37
+ "acc_norm,none": 0.8277777777777777,
38
+ "acc_norm_stderr,none": 0.012592780405781533,
39
+ "alias": "belebele_eng_Latn"
40
+ },
41
+ "harness|hellaswag_it|10": {
42
+ "acc,none": 0.4778635918633743,
43
+ "acc_stderr,none": 0.005210014744449426,
44
+ "acc_norm,none": 0.6315674970085935,
45
+ "acc_norm_stderr,none": 0.005031342082128735,
46
+ "alias": "hellaswag_it"
47
+ },
48
+ "harness|belebele_fra_Latn|5": {
49
+ "acc,none": 0.7355555555555555,
50
+ "acc_stderr,none": 0.014709405413413123,
51
+ "acc_norm,none": 0.7355555555555555,
52
+ "acc_norm_stderr,none": 0.014709405413413123,
53
+ "alias": "belebele_fra_Latn"
54
+ },
55
+ "harness|mmlu_m_de|5": {
56
+ "acc,none": 0.5061849449389048,
57
+ "acc_stderr,none": 0.004342243233169411,
58
+ "alias": "mmlu_m_de"
59
+ },
60
+ "harness|belebele_spa_Latn|5": {
61
+ "acc,none": 0.7211111111111111,
62
+ "acc_stderr,none": 0.014956736888683087,
63
+ "acc_norm,none": 0.7211111111111111,
64
+ "acc_norm_stderr,none": 0.014956736888683087,
65
+ "alias": "belebele_spa_Latn"
66
+ },
67
+ "harness|hellaswag_es|10": {
68
+ "acc,none": 0.47151696180925967,
69
+ "acc_stderr,none": 0.005156142104462169,
70
+ "acc_norm,none": 0.6366545764881587,
71
+ "acc_norm_stderr,none": 0.004967895723013549,
72
+ "alias": "hellaswag_es"
73
+ },
74
+ "harness|truthfulqa_mc2_m_it|0": {
75
+ "acc,none": 0.30268199233716475,
76
+ "acc_stderr,none": 0.016428781581749367,
77
+ "alias": "truthfulqa_mc2_m_it"
78
+ },
79
+ "harness|arc_challenge|25": {
80
+ "acc,none": 0.5699658703071673,
81
+ "acc_stderr,none": 0.014467631559137991,
82
+ "acc_norm,none": 0.613481228668942,
83
+ "acc_norm_stderr,none": 0.014230084761910474,
84
+ "alias": "arc_challenge"
85
+ },
86
+ "harness|arc_challenge_m_fr|25": {
87
+ "acc,none": 0.42172797262617623,
88
+ "acc_stderr,none": 0.014449768025902365,
89
+ "acc_norm,none": 0.4627887082976903,
90
+ "acc_norm_stderr,none": 0.014589571001051864,
91
+ "alias": "arc_challenge_m_fr"
92
+ },
93
+ "harness|hellaswag_fr|10": {
94
+ "acc,none": 0.43842364532019706,
95
+ "acc_stderr,none": 0.005135085885550969,
96
+ "acc_norm,none": 0.6244377811094453,
97
+ "acc_norm_stderr,none": 0.0050116629385795605,
98
+ "alias": "hellaswag_fr"
99
+ },
100
+ "harness|hendrycksTest|5": {
101
+ "acc,none": 0.6004842615012107,
102
+ "acc_stderr,none": 0.13776064162347684,
103
+ "alias": "mmlu"
104
+ },
105
+ "harness|hendrycksTest-humanities|5": {
106
+ "acc,none": 0.6004842615012107,
107
+ "acc_stderr,none": 0.13776064162347684,
108
+ "alias": "mmlu"
109
+ },
110
+ "harness|hendrycksTest-formal_logic|5": {
111
+ "acc,none": 0.6004842615012107,
112
+ "acc_stderr,none": 0.13776064162347684,
113
+ "alias": "mmlu"
114
+ },
115
+ "harness|hendrycksTest-high_school_european_history|5": {
116
+ "acc,none": 0.6004842615012107,
117
+ "acc_stderr,none": 0.13776064162347684,
118
+ "alias": "mmlu"
119
+ },
120
+ "harness|hendrycksTest-high_school_us_history|5": {
121
+ "acc,none": 0.6004842615012107,
122
+ "acc_stderr,none": 0.13776064162347684,
123
+ "alias": "mmlu"
124
+ },
125
+ "harness|hendrycksTest-high_school_world_history|5": {
126
+ "acc,none": 0.6004842615012107,
127
+ "acc_stderr,none": 0.13776064162347684,
128
+ "alias": "mmlu"
129
+ },
130
+ "harness|hendrycksTest-international_law|5": {
131
+ "acc,none": 0.6004842615012107,
132
+ "acc_stderr,none": 0.13776064162347684,
133
+ "alias": "mmlu"
134
+ },
135
+ "harness|hendrycksTest-jurisprudence|5": {
136
+ "acc,none": 0.6004842615012107,
137
+ "acc_stderr,none": 0.13776064162347684,
138
+ "alias": "mmlu"
139
+ },
140
+ "harness|hendrycksTest-logical_fallacies|5": {
141
+ "acc,none": 0.6004842615012107,
142
+ "acc_stderr,none": 0.13776064162347684,
143
+ "alias": "mmlu"
144
+ },
145
+ "harness|hendrycksTest-moral_disputes|5": {
146
+ "acc,none": 0.6004842615012107,
147
+ "acc_stderr,none": 0.13776064162347684,
148
+ "alias": "mmlu"
149
+ },
150
+ "harness|hendrycksTest-moral_scenarios|5": {
151
+ "acc,none": 0.6004842615012107,
152
+ "acc_stderr,none": 0.13776064162347684,
153
+ "alias": "mmlu"
154
+ },
155
+ "harness|hendrycksTest-philosophy|5": {
156
+ "acc,none": 0.6004842615012107,
157
+ "acc_stderr,none": 0.13776064162347684,
158
+ "alias": "mmlu"
159
+ },
160
+ "harness|hendrycksTest-prehistory|5": {
161
+ "acc,none": 0.6004842615012107,
162
+ "acc_stderr,none": 0.13776064162347684,
163
+ "alias": "mmlu"
164
+ },
165
+ "harness|hendrycksTest-professional_law|5": {
166
+ "acc,none": 0.6004842615012107,
167
+ "acc_stderr,none": 0.13776064162347684,
168
+ "alias": "mmlu"
169
+ },
170
+ "harness|hendrycksTest-world_religions|5": {
171
+ "acc,none": 0.6004842615012107,
172
+ "acc_stderr,none": 0.13776064162347684,
173
+ "alias": "mmlu"
174
+ },
175
+ "harness|hendrycksTest-other|5": {
176
+ "acc,none": 0.6004842615012107,
177
+ "acc_stderr,none": 0.13776064162347684,
178
+ "alias": "mmlu"
179
+ },
180
+ "harness|hendrycksTest-business_ethics|5": {
181
+ "acc,none": 0.6004842615012107,
182
+ "acc_stderr,none": 0.13776064162347684,
183
+ "alias": "mmlu"
184
+ },
185
+ "harness|hendrycksTest-clinical_knowledge|5": {
186
+ "acc,none": 0.6004842615012107,
187
+ "acc_stderr,none": 0.13776064162347684,
188
+ "alias": "mmlu"
189
+ },
190
+ "harness|hendrycksTest-college_medicine|5": {
191
+ "acc,none": 0.6004842615012107,
192
+ "acc_stderr,none": 0.13776064162347684,
193
+ "alias": "mmlu"
194
+ },
195
+ "harness|hendrycksTest-global_facts|5": {
196
+ "acc,none": 0.6004842615012107,
197
+ "acc_stderr,none": 0.13776064162347684,
198
+ "alias": "mmlu"
199
+ },
200
+ "harness|hendrycksTest-human_aging|5": {
201
+ "acc,none": 0.6004842615012107,
202
+ "acc_stderr,none": 0.13776064162347684,
203
+ "alias": "mmlu"
204
+ },
205
+ "harness|hendrycksTest-management|5": {
206
+ "acc,none": 0.6004842615012107,
207
+ "acc_stderr,none": 0.13776064162347684,
208
+ "alias": "mmlu"
209
+ },
210
+ "harness|hendrycksTest-marketing|5": {
211
+ "acc,none": 0.6004842615012107,
212
+ "acc_stderr,none": 0.13776064162347684,
213
+ "alias": "mmlu"
214
+ },
215
+ "harness|hendrycksTest-medical_genetics|5": {
216
+ "acc,none": 0.6004842615012107,
217
+ "acc_stderr,none": 0.13776064162347684,
218
+ "alias": "mmlu"
219
+ },
220
+ "harness|hendrycksTest-miscellaneous|5": {
221
+ "acc,none": 0.6004842615012107,
222
+ "acc_stderr,none": 0.13776064162347684,
223
+ "alias": "mmlu"
224
+ },
225
+ "harness|hendrycksTest-nutrition|5": {
226
+ "acc,none": 0.6004842615012107,
227
+ "acc_stderr,none": 0.13776064162347684,
228
+ "alias": "mmlu"
229
+ },
230
+ "harness|hendrycksTest-professional_accounting|5": {
231
+ "acc,none": 0.6004842615012107,
232
+ "acc_stderr,none": 0.13776064162347684,
233
+ "alias": "mmlu"
234
+ },
235
+ "harness|hendrycksTest-professional_medicine|5": {
236
+ "acc,none": 0.6004842615012107,
237
+ "acc_stderr,none": 0.13776064162347684,
238
+ "alias": "mmlu"
239
+ },
240
+ "harness|hendrycksTest-virology|5": {
241
+ "acc,none": 0.6004842615012107,
242
+ "acc_stderr,none": 0.13776064162347684,
243
+ "alias": "mmlu"
244
+ },
245
+ "harness|hendrycksTest-social_sciences|5": {
246
+ "acc,none": 0.6004842615012107,
247
+ "acc_stderr,none": 0.13776064162347684,
248
+ "alias": "mmlu"
249
+ },
250
+ "harness|hendrycksTest-econometrics|5": {
251
+ "acc,none": 0.6004842615012107,
252
+ "acc_stderr,none": 0.13776064162347684,
253
+ "alias": "mmlu"
254
+ },
255
+ "harness|hendrycksTest-high_school_geography|5": {
256
+ "acc,none": 0.6004842615012107,
257
+ "acc_stderr,none": 0.13776064162347684,
258
+ "alias": "mmlu"
259
+ },
260
+ "harness|hendrycksTest-high_school_government_and_politics|5": {
261
+ "acc,none": 0.6004842615012107,
262
+ "acc_stderr,none": 0.13776064162347684,
263
+ "alias": "mmlu"
264
+ },
265
+ "harness|hendrycksTest-high_school_macroeconomics|5": {
266
+ "acc,none": 0.6004842615012107,
267
+ "acc_stderr,none": 0.13776064162347684,
268
+ "alias": "mmlu"
269
+ },
270
+ "harness|hendrycksTest-high_school_microeconomics|5": {
271
+ "acc,none": 0.6004842615012107,
272
+ "acc_stderr,none": 0.13776064162347684,
273
+ "alias": "mmlu"
274
+ },
275
+ "harness|hendrycksTest-high_school_psychology|5": {
276
+ "acc,none": 0.6004842615012107,
277
+ "acc_stderr,none": 0.13776064162347684,
278
+ "alias": "mmlu"
279
+ },
280
+ "harness|hendrycksTest-human_sexuality|5": {
281
+ "acc,none": 0.6004842615012107,
282
+ "acc_stderr,none": 0.13776064162347684,
283
+ "alias": "mmlu"
284
+ },
285
+ "harness|hendrycksTest-professional_psychology|5": {
286
+ "acc,none": 0.6004842615012107,
287
+ "acc_stderr,none": 0.13776064162347684,
288
+ "alias": "mmlu"
289
+ },
290
+ "harness|hendrycksTest-public_relations|5": {
291
+ "acc,none": 0.6004842615012107,
292
+ "acc_stderr,none": 0.13776064162347684,
293
+ "alias": "mmlu"
294
+ },
295
+ "harness|hendrycksTest-security_studies|5": {
296
+ "acc,none": 0.6004842615012107,
297
+ "acc_stderr,none": 0.13776064162347684,
298
+ "alias": "mmlu"
299
+ },
300
+ "harness|hendrycksTest-sociology|5": {
301
+ "acc,none": 0.6004842615012107,
302
+ "acc_stderr,none": 0.13776064162347684,
303
+ "alias": "mmlu"
304
+ },
305
+ "harness|hendrycksTest-us_foreign_policy|5": {
306
+ "acc,none": 0.6004842615012107,
307
+ "acc_stderr,none": 0.13776064162347684,
308
+ "alias": "mmlu"
309
+ },
310
+ "harness|hendrycksTest-stem|5": {
311
+ "acc,none": 0.6004842615012107,
312
+ "acc_stderr,none": 0.13776064162347684,
313
+ "alias": "mmlu"
314
+ },
315
+ "harness|hendrycksTest-abstract_algebra|5": {
316
+ "acc,none": 0.6004842615012107,
317
+ "acc_stderr,none": 0.13776064162347684,
318
+ "alias": "mmlu"
319
+ },
320
+ "harness|hendrycksTest-anatomy|5": {
321
+ "acc,none": 0.6004842615012107,
322
+ "acc_stderr,none": 0.13776064162347684,
323
+ "alias": "mmlu"
324
+ },
325
+ "harness|hendrycksTest-astronomy|5": {
326
+ "acc,none": 0.6004842615012107,
327
+ "acc_stderr,none": 0.13776064162347684,
328
+ "alias": "mmlu"
329
+ },
330
+ "harness|hendrycksTest-college_biology|5": {
331
+ "acc,none": 0.6004842615012107,
332
+ "acc_stderr,none": 0.13776064162347684,
333
+ "alias": "mmlu"
334
+ },
335
+ "harness|hendrycksTest-college_chemistry|5": {
336
+ "acc,none": 0.6004842615012107,
337
+ "acc_stderr,none": 0.13776064162347684,
338
+ "alias": "mmlu"
339
+ },
340
+ "harness|hendrycksTest-college_computer_science|5": {
341
+ "acc,none": 0.6004842615012107,
342
+ "acc_stderr,none": 0.13776064162347684,
343
+ "alias": "mmlu"
344
+ },
345
+ "harness|hendrycksTest-college_mathematics|5": {
346
+ "acc,none": 0.6004842615012107,
347
+ "acc_stderr,none": 0.13776064162347684,
348
+ "alias": "mmlu"
349
+ },
350
+ "harness|hendrycksTest-college_physics|5": {
351
+ "acc,none": 0.6004842615012107,
352
+ "acc_stderr,none": 0.13776064162347684,
353
+ "alias": "mmlu"
354
+ },
355
+ "harness|hendrycksTest-computer_security|5": {
356
+ "acc,none": 0.6004842615012107,
357
+ "acc_stderr,none": 0.13776064162347684,
358
+ "alias": "mmlu"
359
+ },
360
+ "harness|hendrycksTest-conceptual_physics|5": {
361
+ "acc,none": 0.6004842615012107,
362
+ "acc_stderr,none": 0.13776064162347684,
363
+ "alias": "mmlu"
364
+ },
365
+ "harness|hendrycksTest-electrical_engineering|5": {
366
+ "acc,none": 0.6004842615012107,
367
+ "acc_stderr,none": 0.13776064162347684,
368
+ "alias": "mmlu"
369
+ },
370
+ "harness|hendrycksTest-elementary_mathematics|5": {
371
+ "acc,none": 0.6004842615012107,
372
+ "acc_stderr,none": 0.13776064162347684,
373
+ "alias": "mmlu"
374
+ },
375
+ "harness|hendrycksTest-high_school_biology|5": {
376
+ "acc,none": 0.6004842615012107,
377
+ "acc_stderr,none": 0.13776064162347684,
378
+ "alias": "mmlu"
379
+ },
380
+ "harness|hendrycksTest-high_school_chemistry|5": {
381
+ "acc,none": 0.6004842615012107,
382
+ "acc_stderr,none": 0.13776064162347684,
383
+ "alias": "mmlu"
384
+ },
385
+ "harness|hendrycksTest-high_school_computer_science|5": {
386
+ "acc,none": 0.6004842615012107,
387
+ "acc_stderr,none": 0.13776064162347684,
388
+ "alias": "mmlu"
389
+ },
390
+ "harness|hendrycksTest-high_school_mathematics|5": {
391
+ "acc,none": 0.6004842615012107,
392
+ "acc_stderr,none": 0.13776064162347684,
393
+ "alias": "mmlu"
394
+ },
395
+ "harness|hendrycksTest-high_school_physics|5": {
396
+ "acc,none": 0.6004842615012107,
397
+ "acc_stderr,none": 0.13776064162347684,
398
+ "alias": "mmlu"
399
+ },
400
+ "harness|hendrycksTest-high_school_statistics|5": {
401
+ "acc,none": 0.6004842615012107,
402
+ "acc_stderr,none": 0.13776064162347684,
403
+ "alias": "mmlu"
404
+ },
405
+ "harness|hendrycksTest-machine_learning|5": {
406
+ "acc,none": 0.6004842615012107,
407
+ "acc_stderr,none": 0.13776064162347684,
408
+ "alias": "mmlu"
409
+ },
410
+ "harness|hellaswag_de|10": {
411
+ "acc,none": 0.41417591801878734,
412
+ "acc_stderr,none": 0.005089507655638241,
413
+ "acc_norm,none": 0.5672502134927413,
414
+ "acc_norm_stderr,none": 0.005119240170124385,
415
+ "alias": "hellaswag_de"
416
+ },
417
+ "harness|truthfulqa_mc2|0": {
418
+ "acc,none": 0.4809105966670407,
419
+ "acc_stderr,none": 0.015040316417034152,
420
+ "alias": "truthfulqa_mc2"
421
+ },
422
+ "harness|arc_challenge_m_es|25": {
423
+ "acc,none": 0.45384615384615384,
424
+ "acc_stderr,none": 0.014561448289640611,
425
+ "acc_norm,none": 0.5008547008547009,
426
+ "acc_norm_stderr,none": 0.014623863148445852,
427
+ "alias": "arc_challenge_m_es"
428
+ },
429
+ "harness|mmlu_m_fr|5": {
430
+ "acc,none": 0.5164616912382552,
431
+ "acc_stderr,none": 0.004367819439444167,
432
+ "alias": "mmlu_m_fr"
433
+ },
434
+ "harness|arc_challenge_m_de|25": {
435
+ "acc,none": 0.41488451668092385,
436
+ "acc_stderr,none": 0.014416604608618246,
437
+ "acc_norm,none": 0.46877673224978617,
438
+ "acc_norm_stderr,none": 0.014601589903081696,
439
+ "alias": "arc_challenge_m_de"
440
+ },
441
+ "harness|mmlu_m_es|5": {
442
+ "acc,none": 0.5146992650367481,
443
+ "acc_stderr,none": 0.004328309511475916,
444
+ "alias": "mmlu_m_es"
445
+ },
446
+ "harness|truthfulqa_mc2_m_fr|0": {
447
+ "acc,none": 0.2909783989834816,
448
+ "acc_stderr,none": 0.01620126162541297,
449
+ "alias": "truthfulqa_mc2_m_fr"
450
+ },
451
+ "harness|mmlu_m_it|5": {
452
+ "acc,none": 0.5152224824355972,
453
+ "acc_stderr,none": 0.0043440044239749604,
454
+ "alias": "mmlu_m_it"
455
+ },
456
+ "harness|arc_challenge_m_it|25": {
457
+ "acc,none": 0.4833190761334474,
458
+ "acc_stderr,none": 0.01462199936373081,
459
+ "acc_norm,none": 0.5226689478186484,
460
+ "acc_norm_stderr,none": 0.014615099353534883,
461
+ "alias": "arc_challenge_m_it"
462
+ },
463
+ "harness|belebele_ita_Latn|5": {
464
+ "acc,none": 0.7177777777777777,
465
+ "acc_stderr,none": 0.015011039301019264,
466
+ "acc_norm,none": 0.7177777777777777,
467
+ "acc_norm_stderr,none": 0.015011039301019264,
468
+ "alias": "belebele_ita_Latn"
469
+ },
470
+ "harness|truthfulqa_mc2_m_de|0": {
471
+ "acc,none": 0.2715736040609137,
472
+ "acc_stderr,none": 0.0158543794886016,
473
+ "alias": "truthfulqa_mc2_m_de"
474
+ }
475
+ },
476
+ "versions": {
477
+ "harness|hellaswag|10": 1.0,
478
+ "harness|truthfulqa_mc2_m_es|0": "Yaml",
479
+ "harness|belebele_deu_Latn|5": 0.0,
480
+ "harness|belebele_eng_Latn|5": 0.0,
481
+ "harness|hellaswag_it|10": 1.0,
482
+ "harness|belebele_fra_Latn|5": 0.0,
483
+ "harness|mmlu_m_de|5": "Yaml",
484
+ "harness|belebele_spa_Latn|5": 0.0,
485
+ "harness|hellaswag_es|10": 1.0,
486
+ "harness|truthfulqa_mc2_m_it|0": "Yaml",
487
+ "harness|arc_challenge|25": 1.0,
488
+ "harness|arc_challenge_m_fr|25": 1.0,
489
+ "harness|hellaswag_fr|10": 1.0,
490
+ "harness|hendrycksTest|5": "N/A",
491
+ "harness|hendrycksTest-humanities|5": "N/A",
492
+ "harness|hendrycksTest-formal_logic|5": "N/A",
493
+ "harness|hendrycksTest-high_school_european_history|5": "N/A",
494
+ "harness|hendrycksTest-high_school_us_history|5": "N/A",
495
+ "harness|hendrycksTest-high_school_world_history|5": "N/A",
496
+ "harness|hendrycksTest-international_law|5": "N/A",
497
+ "harness|hendrycksTest-jurisprudence|5": "N/A",
498
+ "harness|hendrycksTest-logical_fallacies|5": "N/A",
499
+ "harness|hendrycksTest-moral_disputes|5": "N/A",
500
+ "harness|hendrycksTest-moral_scenarios|5": "N/A",
501
+ "harness|hendrycksTest-philosophy|5": "N/A",
502
+ "harness|hendrycksTest-prehistory|5": "N/A",
503
+ "harness|hendrycksTest-professional_law|5": "N/A",
504
+ "harness|hendrycksTest-world_religions|5": "N/A",
505
+ "harness|hendrycksTest-other|5": "N/A",
506
+ "harness|hendrycksTest-business_ethics|5": "N/A",
507
+ "harness|hendrycksTest-clinical_knowledge|5": "N/A",
508
+ "harness|hendrycksTest-college_medicine|5": "N/A",
509
+ "harness|hendrycksTest-global_facts|5": "N/A",
510
+ "harness|hendrycksTest-human_aging|5": "N/A",
511
+ "harness|hendrycksTest-management|5": "N/A",
512
+ "harness|hendrycksTest-marketing|5": "N/A",
513
+ "harness|hendrycksTest-medical_genetics|5": "N/A",
514
+ "harness|hendrycksTest-miscellaneous|5": "N/A",
515
+ "harness|hendrycksTest-nutrition|5": "N/A",
516
+ "harness|hendrycksTest-professional_accounting|5": "N/A",
517
+ "harness|hendrycksTest-professional_medicine|5": "N/A",
518
+ "harness|hendrycksTest-virology|5": "N/A",
519
+ "harness|hendrycksTest-social_sciences|5": "N/A",
520
+ "harness|hendrycksTest-econometrics|5": "N/A",
521
+ "harness|hendrycksTest-high_school_geography|5": "N/A",
522
+ "harness|hendrycksTest-high_school_government_and_politics|5": "N/A",
523
+ "harness|hendrycksTest-high_school_macroeconomics|5": "N/A",
524
+ "harness|hendrycksTest-high_school_microeconomics|5": "N/A",
525
+ "harness|hendrycksTest-high_school_psychology|5": "N/A",
526
+ "harness|hendrycksTest-human_sexuality|5": "N/A",
527
+ "harness|hendrycksTest-professional_psychology|5": "N/A",
528
+ "harness|hendrycksTest-public_relations|5": "N/A",
529
+ "harness|hendrycksTest-security_studies|5": "N/A",
530
+ "harness|hendrycksTest-sociology|5": "N/A",
531
+ "harness|hendrycksTest-us_foreign_policy|5": "N/A",
532
+ "harness|hendrycksTest-stem|5": "N/A",
533
+ "harness|hendrycksTest-abstract_algebra|5": "N/A",
534
+ "harness|hendrycksTest-anatomy|5": "N/A",
535
+ "harness|hendrycksTest-astronomy|5": "N/A",
536
+ "harness|hendrycksTest-college_biology|5": "N/A",
537
+ "harness|hendrycksTest-college_chemistry|5": "N/A",
538
+ "harness|hendrycksTest-college_computer_science|5": "N/A",
539
+ "harness|hendrycksTest-college_mathematics|5": "N/A",
540
+ "harness|hendrycksTest-college_physics|5": "N/A",
541
+ "harness|hendrycksTest-computer_security|5": "N/A",
542
+ "harness|hendrycksTest-conceptual_physics|5": "N/A",
543
+ "harness|hendrycksTest-electrical_engineering|5": "N/A",
544
+ "harness|hendrycksTest-elementary_mathematics|5": "N/A",
545
+ "harness|hendrycksTest-high_school_biology|5": "N/A",
546
+ "harness|hendrycksTest-high_school_chemistry|5": "N/A",
547
+ "harness|hendrycksTest-high_school_computer_science|5": "N/A",
548
+ "harness|hendrycksTest-high_school_mathematics|5": "N/A",
549
+ "harness|hendrycksTest-high_school_physics|5": "N/A",
550
+ "harness|hendrycksTest-high_school_statistics|5": "N/A",
551
+ "harness|hendrycksTest-machine_learning|5": "N/A",
552
+ "harness|hellaswag_de|10": 1.0,
553
+ "harness|truthfulqa_mc2|0": 2.0,
554
+ "harness|arc_challenge_m_es|25": 1.0,
555
+ "harness|mmlu_m_fr|5": "Yaml",
556
+ "harness|arc_challenge_m_de|25": 1.0,
557
+ "harness|mmlu_m_es|5": "Yaml",
558
+ "harness|truthfulqa_mc2_m_fr|0": "Yaml",
559
+ "harness|mmlu_m_it|5": "Yaml",
560
+ "harness|arc_challenge_m_it|25": 1.0,
561
+ "harness|belebele_ita_Latn|5": 0.0,
562
+ "harness|truthfulqa_mc2_m_de|0": "Yaml"
563
+ }
564
+ }