Xuehao commited on
Commit
d636e5b
1 Parent(s): d02568f

Commit from Azure DevOps update Results

Browse files
cstr/llama3-8b-spaetzle-v33-int4-inc/results_2024-05-29-02-03-16.json ADDED
@@ -0,0 +1,595 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "lighteval_sha": "1.4",
4
+ "num_few_shot_default": null,
5
+ "num_fewshot_seeds": null,
6
+ "override_batch_size": null,
7
+ "max_samples": null,
8
+ "job_id": -1,
9
+ "start_time": null,
10
+ "end_time": "2024-05-29-02-03-16",
11
+ "total_evaluation_time_secondes": "",
12
+ "model_name": "cstr/llama3-8b-spaetzle-v33-int4-inc",
13
+ "model_sha": "",
14
+ "model_dtype": "4bit",
15
+ "model_size": 5.74,
16
+ "model_params": 7.04,
17
+ "quant_type": "GPTQ",
18
+ "precision": "4bit"
19
+ },
20
+ "results": {
21
+ "harness|boolq|0": {
22
+ "acc,none": 0.8409785932721713,
23
+ "acc_stderr,none": 0.006396064074650086,
24
+ "alias": "boolq"
25
+ },
26
+ "harness|hellaswag|0": {
27
+ "acc,none": 0.6246763592909779,
28
+ "acc_stderr,none": 0.004832167854501631,
29
+ "acc_norm,none": 0.8075084644493129,
30
+ "acc_norm_stderr,none": 0.003934512022006092,
31
+ "alias": "hellaswag"
32
+ },
33
+ "harness|arc:easy|0": {
34
+ "acc,none": 0.8526936026936027,
35
+ "acc_stderr,none": 0.007272362176697233,
36
+ "acc_norm,none": 0.8476430976430976,
37
+ "acc_norm_stderr,none": 0.007374044856218303,
38
+ "alias": "arc_easy"
39
+ },
40
+ "harness|truthfulqa:mc1|0": {
41
+ "acc,none": 0.4149326805385557,
42
+ "acc_stderr,none": 0.017248314465805978,
43
+ "alias": "truthfulqa_mc1"
44
+ },
45
+ "harness|arc:challenge|0": {
46
+ "acc,none": 0.5981228668941979,
47
+ "acc_stderr,none": 0.014327268614578276,
48
+ "acc_norm,none": 0.6331058020477816,
49
+ "acc_norm_stderr,none": 0.0140841331181043,
50
+ "alias": "arc_challenge"
51
+ },
52
+ "harness|truthfulqa:mc2|0": {
53
+ "acc,none": 0.5893229590481951,
54
+ "acc_stderr,none": 0.015338508411065052,
55
+ "alias": "truthfulqa_mc2"
56
+ },
57
+ "harness|piqa|0": {
58
+ "acc,none": 0.8030467899891186,
59
+ "acc_stderr,none": 0.009278918898006378,
60
+ "acc_norm,none": 0.8161044613710555,
61
+ "acc_norm_stderr,none": 0.009038656758892243,
62
+ "alias": "piqa"
63
+ },
64
+ "harness|mmlu|0": {
65
+ "acc,none": 0.6410767696909272,
66
+ "acc_stderr,none": 0.0037795647515489967,
67
+ "alias": "mmlu"
68
+ },
69
+ "harness|mmlu_humanities|0": {
70
+ "alias": " - humanities",
71
+ "acc,none": 0.5738575982996812,
72
+ "acc_stderr,none": 0.006620493476238879
73
+ },
74
+ "harness|mmlu_formal_logic|0": {
75
+ "alias": " - formal_logic",
76
+ "acc,none": 0.42063492063492064,
77
+ "acc_stderr,none": 0.04415438226743744
78
+ },
79
+ "harness|mmlu_high_school_european_history|0": {
80
+ "alias": " - high_school_european_history",
81
+ "acc,none": 0.7333333333333333,
82
+ "acc_stderr,none": 0.03453131801885417
83
+ },
84
+ "harness|mmlu_high_school_us_history|0": {
85
+ "alias": " - high_school_us_history",
86
+ "acc,none": 0.8431372549019608,
87
+ "acc_stderr,none": 0.025524722324553332
88
+ },
89
+ "harness|mmlu_high_school_world_history|0": {
90
+ "alias": " - high_school_world_history",
91
+ "acc,none": 0.8523206751054853,
92
+ "acc_stderr,none": 0.023094329582595673
93
+ },
94
+ "harness|mmlu_international_law|0": {
95
+ "alias": " - international_law",
96
+ "acc,none": 0.7520661157024794,
97
+ "acc_stderr,none": 0.039418975265163025
98
+ },
99
+ "harness|mmlu_jurisprudence|0": {
100
+ "alias": " - jurisprudence",
101
+ "acc,none": 0.7962962962962963,
102
+ "acc_stderr,none": 0.03893542518824847
103
+ },
104
+ "harness|mmlu_logical_fallacies|0": {
105
+ "alias": " - logical_fallacies",
106
+ "acc,none": 0.8098159509202454,
107
+ "acc_stderr,none": 0.030833491146281228
108
+ },
109
+ "harness|mmlu_moral_disputes|0": {
110
+ "alias": " - moral_disputes",
111
+ "acc,none": 0.7138728323699421,
112
+ "acc_stderr,none": 0.024332146779134128
113
+ },
114
+ "harness|mmlu_moral_scenarios|0": {
115
+ "alias": " - moral_scenarios",
116
+ "acc,none": 0.2659217877094972,
117
+ "acc_stderr,none": 0.014776765066438886
118
+ },
119
+ "harness|mmlu_philosophy|0": {
120
+ "alias": " - philosophy",
121
+ "acc,none": 0.7202572347266881,
122
+ "acc_stderr,none": 0.0254942593506949
123
+ },
124
+ "harness|mmlu_prehistory|0": {
125
+ "alias": " - prehistory",
126
+ "acc,none": 0.7376543209876543,
127
+ "acc_stderr,none": 0.024477222856135118
128
+ },
129
+ "harness|mmlu_professional_law|0": {
130
+ "alias": " - professional_law",
131
+ "acc,none": 0.4954367666232073,
132
+ "acc_stderr,none": 0.012769704263117531
133
+ },
134
+ "harness|mmlu_world_religions|0": {
135
+ "alias": " - world_religions",
136
+ "acc,none": 0.7894736842105263,
137
+ "acc_stderr,none": 0.031267817146631786
138
+ },
139
+ "harness|mmlu_other|0": {
140
+ "alias": " - other",
141
+ "acc,none": 0.7203089797232056,
142
+ "acc_stderr,none": 0.007735515290510612
143
+ },
144
+ "harness|mmlu_business_ethics|0": {
145
+ "alias": " - business_ethics",
146
+ "acc,none": 0.59,
147
+ "acc_stderr,none": 0.049431107042371025
148
+ },
149
+ "harness|mmlu_clinical_knowledge|0": {
150
+ "alias": " - clinical_knowledge",
151
+ "acc,none": 0.7433962264150943,
152
+ "acc_stderr,none": 0.026880647889051975
153
+ },
154
+ "harness|mmlu_college_medicine|0": {
155
+ "alias": " - college_medicine",
156
+ "acc,none": 0.6589595375722543,
157
+ "acc_stderr,none": 0.036146654241808254
158
+ },
159
+ "harness|mmlu_global_facts|0": {
160
+ "alias": " - global_facts",
161
+ "acc,none": 0.38,
162
+ "acc_stderr,none": 0.048783173121456316
163
+ },
164
+ "harness|mmlu_human_aging|0": {
165
+ "alias": " - human_aging",
166
+ "acc,none": 0.7085201793721974,
167
+ "acc_stderr,none": 0.030500283176545843
168
+ },
169
+ "harness|mmlu_management|0": {
170
+ "alias": " - management",
171
+ "acc,none": 0.8252427184466019,
172
+ "acc_stderr,none": 0.037601780060266196
173
+ },
174
+ "harness|mmlu_marketing|0": {
175
+ "alias": " - marketing",
176
+ "acc,none": 0.8760683760683761,
177
+ "acc_stderr,none": 0.02158649400128138
178
+ },
179
+ "harness|mmlu_medical_genetics|0": {
180
+ "alias": " - medical_genetics",
181
+ "acc,none": 0.79,
182
+ "acc_stderr,none": 0.040936018074033256
183
+ },
184
+ "harness|mmlu_miscellaneous|0": {
185
+ "alias": " - miscellaneous",
186
+ "acc,none": 0.8339719029374202,
187
+ "acc_stderr,none": 0.013306478243066298
188
+ },
189
+ "harness|mmlu_nutrition|0": {
190
+ "alias": " - nutrition",
191
+ "acc,none": 0.7254901960784313,
192
+ "acc_stderr,none": 0.02555316999182651
193
+ },
194
+ "harness|mmlu_professional_accounting|0": {
195
+ "alias": " - professional_accounting",
196
+ "acc,none": 0.5141843971631206,
197
+ "acc_stderr,none": 0.02981549448368206
198
+ },
199
+ "harness|mmlu_professional_medicine|0": {
200
+ "alias": " - professional_medicine",
201
+ "acc,none": 0.7279411764705882,
202
+ "acc_stderr,none": 0.027033041151681456
203
+ },
204
+ "harness|mmlu_virology|0": {
205
+ "alias": " - virology",
206
+ "acc,none": 0.5120481927710844,
207
+ "acc_stderr,none": 0.03891364495835817
208
+ },
209
+ "harness|mmlu_social_sciences|0": {
210
+ "alias": " - social_sciences",
211
+ "acc,none": 0.7546311342216444,
212
+ "acc_stderr,none": 0.00761557074715898
213
+ },
214
+ "harness|mmlu_econometrics|0": {
215
+ "alias": " - econometrics",
216
+ "acc,none": 0.543859649122807,
217
+ "acc_stderr,none": 0.046854730419077895
218
+ },
219
+ "harness|mmlu_high_school_geography|0": {
220
+ "alias": " - high_school_geography",
221
+ "acc,none": 0.803030303030303,
222
+ "acc_stderr,none": 0.02833560973246336
223
+ },
224
+ "harness|mmlu_high_school_government_and_politics|0": {
225
+ "alias": " - high_school_government_and_politics",
226
+ "acc,none": 0.8808290155440415,
227
+ "acc_stderr,none": 0.023381935348121434
228
+ },
229
+ "harness|mmlu_high_school_macroeconomics|0": {
230
+ "alias": " - high_school_macroeconomics",
231
+ "acc,none": 0.6358974358974359,
232
+ "acc_stderr,none": 0.024396672985094767
233
+ },
234
+ "harness|mmlu_high_school_microeconomics|0": {
235
+ "alias": " - high_school_microeconomics",
236
+ "acc,none": 0.7521008403361344,
237
+ "acc_stderr,none": 0.028047967224176892
238
+ },
239
+ "harness|mmlu_high_school_psychology|0": {
240
+ "alias": " - high_school_psychology",
241
+ "acc,none": 0.8458715596330275,
242
+ "acc_stderr,none": 0.015480826865374294
243
+ },
244
+ "harness|mmlu_human_sexuality|0": {
245
+ "alias": " - human_sexuality",
246
+ "acc,none": 0.7480916030534351,
247
+ "acc_stderr,none": 0.038073871163060866
248
+ },
249
+ "harness|mmlu_professional_psychology|0": {
250
+ "alias": " - professional_psychology",
251
+ "acc,none": 0.7058823529411765,
252
+ "acc_stderr,none": 0.018433427649401892
253
+ },
254
+ "harness|mmlu_public_relations|0": {
255
+ "alias": " - public_relations",
256
+ "acc,none": 0.7181818181818181,
257
+ "acc_stderr,none": 0.043091187099464585
258
+ },
259
+ "harness|mmlu_security_studies|0": {
260
+ "alias": " - security_studies",
261
+ "acc,none": 0.7346938775510204,
262
+ "acc_stderr,none": 0.02826388994378461
263
+ },
264
+ "harness|mmlu_sociology|0": {
265
+ "alias": " - sociology",
266
+ "acc,none": 0.835820895522388,
267
+ "acc_stderr,none": 0.026193923544454132
268
+ },
269
+ "harness|mmlu_us_foreign_policy|0": {
270
+ "alias": " - us_foreign_policy",
271
+ "acc,none": 0.86,
272
+ "acc_stderr,none": 0.03487350880197769
273
+ },
274
+ "harness|mmlu_stem|0": {
275
+ "alias": " - stem",
276
+ "acc,none": 0.5524896923564859,
277
+ "acc_stderr,none": 0.008508241430807618
278
+ },
279
+ "harness|mmlu_abstract_algebra|0": {
280
+ "alias": " - abstract_algebra",
281
+ "acc,none": 0.35,
282
+ "acc_stderr,none": 0.0479372485441102
283
+ },
284
+ "harness|mmlu_anatomy|0": {
285
+ "alias": " - anatomy",
286
+ "acc,none": 0.6666666666666666,
287
+ "acc_stderr,none": 0.04072314811876837
288
+ },
289
+ "harness|mmlu_astronomy|0": {
290
+ "alias": " - astronomy",
291
+ "acc,none": 0.6973684210526315,
292
+ "acc_stderr,none": 0.03738520676119668
293
+ },
294
+ "harness|mmlu_college_biology|0": {
295
+ "alias": " - college_biology",
296
+ "acc,none": 0.8125,
297
+ "acc_stderr,none": 0.032639560491693344
298
+ },
299
+ "harness|mmlu_college_chemistry|0": {
300
+ "alias": " - college_chemistry",
301
+ "acc,none": 0.42,
302
+ "acc_stderr,none": 0.049604496374885836
303
+ },
304
+ "harness|mmlu_college_computer_science|0": {
305
+ "alias": " - college_computer_science",
306
+ "acc,none": 0.54,
307
+ "acc_stderr,none": 0.05009082659620333
308
+ },
309
+ "harness|mmlu_college_mathematics|0": {
310
+ "alias": " - college_mathematics",
311
+ "acc,none": 0.33,
312
+ "acc_stderr,none": 0.04725815626252604
313
+ },
314
+ "harness|mmlu_college_physics|0": {
315
+ "alias": " - college_physics",
316
+ "acc,none": 0.4411764705882353,
317
+ "acc_stderr,none": 0.049406356306056595
318
+ },
319
+ "harness|mmlu_computer_security|0": {
320
+ "alias": " - computer_security",
321
+ "acc,none": 0.79,
322
+ "acc_stderr,none": 0.040936018074033256
323
+ },
324
+ "harness|mmlu_conceptual_physics|0": {
325
+ "alias": " - conceptual_physics",
326
+ "acc,none": 0.5446808510638298,
327
+ "acc_stderr,none": 0.032555253593403555
328
+ },
329
+ "harness|mmlu_electrical_engineering|0": {
330
+ "alias": " - electrical_engineering",
331
+ "acc,none": 0.5862068965517241,
332
+ "acc_stderr,none": 0.04104269211806232
333
+ },
334
+ "harness|mmlu_elementary_mathematics|0": {
335
+ "alias": " - elementary_mathematics",
336
+ "acc,none": 0.455026455026455,
337
+ "acc_stderr,none": 0.025646928361049395
338
+ },
339
+ "harness|mmlu_high_school_biology|0": {
340
+ "alias": " - high_school_biology",
341
+ "acc,none": 0.7677419354838709,
342
+ "acc_stderr,none": 0.02402225613030824
343
+ },
344
+ "harness|mmlu_high_school_chemistry|0": {
345
+ "alias": " - high_school_chemistry",
346
+ "acc,none": 0.5073891625615764,
347
+ "acc_stderr,none": 0.035176035403610105
348
+ },
349
+ "harness|mmlu_high_school_computer_science|0": {
350
+ "alias": " - high_school_computer_science",
351
+ "acc,none": 0.74,
352
+ "acc_stderr,none": 0.04408440022768079
353
+ },
354
+ "harness|mmlu_high_school_mathematics|0": {
355
+ "alias": " - high_school_mathematics",
356
+ "acc,none": 0.42592592592592593,
357
+ "acc_stderr,none": 0.03014913560136594
358
+ },
359
+ "harness|mmlu_high_school_physics|0": {
360
+ "alias": " - high_school_physics",
361
+ "acc,none": 0.39072847682119205,
362
+ "acc_stderr,none": 0.039837983066598075
363
+ },
364
+ "harness|mmlu_high_school_statistics|0": {
365
+ "alias": " - high_school_statistics",
366
+ "acc,none": 0.5462962962962963,
367
+ "acc_stderr,none": 0.03395322726375797
368
+ },
369
+ "harness|mmlu_machine_learning|0": {
370
+ "alias": " - machine_learning",
371
+ "acc,none": 0.4375,
372
+ "acc_stderr,none": 0.04708567521880525
373
+ },
374
+ "harness|lambada:openai|0": {
375
+ "perplexity,none": 2.952972764251098,
376
+ "perplexity_stderr,none": 0.07155360568679127,
377
+ "acc,none": 0.7327770230933437,
378
+ "acc_stderr,none": 0.006165025727477477,
379
+ "alias": "lambada_openai"
380
+ },
381
+ "harness|openbookqa|0": {
382
+ "acc,none": 0.372,
383
+ "acc_stderr,none": 0.0216371979857224,
384
+ "acc_norm,none": 0.458,
385
+ "acc_norm_stderr,none": 0.022303966774269948,
386
+ "alias": "openbookqa"
387
+ },
388
+ "harness|winogrande|0": {
389
+ "acc,none": 0.7371744277821626,
390
+ "acc_stderr,none": 0.012370922527262008,
391
+ "alias": "winogrande"
392
+ }
393
+ },
394
+ "task_info": {
395
+ "model": "cstr/llama3-8b-spaetzle-v33-int4-inc",
396
+ "revision": "main",
397
+ "private": false,
398
+ "params": 5.74,
399
+ "architectures": "LlamaForCausalLM",
400
+ "quant_type": "GPTQ",
401
+ "precision": "4bit",
402
+ "model_params": 7.04,
403
+ "model_size": 5.74,
404
+ "weight_dtype": "int4",
405
+ "compute_dtype": "float16",
406
+ "gguf_ftype": "*Q4_0.gguf",
407
+ "hardware": "gpu",
408
+ "status": "Pending",
409
+ "submitted_time": "2024-05-28T16:18:54Z",
410
+ "model_type": "quantization",
411
+ "job_id": -1,
412
+ "job_start_time": null,
413
+ "scripts": "ITREX"
414
+ },
415
+ "quantization_config": {
416
+ "autoround_version": "0.11",
417
+ "bits": 4,
418
+ "damp_percent": 0.01,
419
+ "desc_act": false,
420
+ "enable_minmax_tuning": true,
421
+ "group_size": 128,
422
+ "is_marlin_format": false,
423
+ "iters": 200,
424
+ "lr": 0.005,
425
+ "minmax_lr": 0.005,
426
+ "model_file_base_name": "model",
427
+ "model_name_or_path": null,
428
+ "quant_method": "gptq",
429
+ "scale_dtype": "torch.float32",
430
+ "static_groups": false,
431
+ "sym": false,
432
+ "true_sequential": false,
433
+ "use_quant_input": true
434
+ },
435
+ "versions": {
436
+ "harness|boolq|0": 2.0,
437
+ "harness|hellaswag|0": 1.0,
438
+ "harness|arc:easy|0": 1.0,
439
+ "harness|truthfulqa:mc1|0": 2.0,
440
+ "harness|arc:challenge|0": 1.0,
441
+ "harness|truthfulqa:mc2|0": 2.0,
442
+ "harness|piqa|0": 1.0,
443
+ "harness|mmlu|0": null,
444
+ "harness|mmlu_humanities|0": null,
445
+ "harness|mmlu_formal_logic|0": 0.0,
446
+ "harness|mmlu_high_school_european_history|0": 0.0,
447
+ "harness|mmlu_high_school_us_history|0": 0.0,
448
+ "harness|mmlu_high_school_world_history|0": 0.0,
449
+ "harness|mmlu_international_law|0": 0.0,
450
+ "harness|mmlu_jurisprudence|0": 0.0,
451
+ "harness|mmlu_logical_fallacies|0": 0.0,
452
+ "harness|mmlu_moral_disputes|0": 0.0,
453
+ "harness|mmlu_moral_scenarios|0": 0.0,
454
+ "harness|mmlu_philosophy|0": 0.0,
455
+ "harness|mmlu_prehistory|0": 0.0,
456
+ "harness|mmlu_professional_law|0": 0.0,
457
+ "harness|mmlu_world_religions|0": 0.0,
458
+ "harness|mmlu_other|0": null,
459
+ "harness|mmlu_business_ethics|0": 0.0,
460
+ "harness|mmlu_clinical_knowledge|0": 0.0,
461
+ "harness|mmlu_college_medicine|0": 0.0,
462
+ "harness|mmlu_global_facts|0": 0.0,
463
+ "harness|mmlu_human_aging|0": 0.0,
464
+ "harness|mmlu_management|0": 0.0,
465
+ "harness|mmlu_marketing|0": 0.0,
466
+ "harness|mmlu_medical_genetics|0": 0.0,
467
+ "harness|mmlu_miscellaneous|0": 0.0,
468
+ "harness|mmlu_nutrition|0": 0.0,
469
+ "harness|mmlu_professional_accounting|0": 0.0,
470
+ "harness|mmlu_professional_medicine|0": 0.0,
471
+ "harness|mmlu_virology|0": 0.0,
472
+ "harness|mmlu_social_sciences|0": null,
473
+ "harness|mmlu_econometrics|0": 0.0,
474
+ "harness|mmlu_high_school_geography|0": 0.0,
475
+ "harness|mmlu_high_school_government_and_politics|0": 0.0,
476
+ "harness|mmlu_high_school_macroeconomics|0": 0.0,
477
+ "harness|mmlu_high_school_microeconomics|0": 0.0,
478
+ "harness|mmlu_high_school_psychology|0": 0.0,
479
+ "harness|mmlu_human_sexuality|0": 0.0,
480
+ "harness|mmlu_professional_psychology|0": 0.0,
481
+ "harness|mmlu_public_relations|0": 0.0,
482
+ "harness|mmlu_security_studies|0": 0.0,
483
+ "harness|mmlu_sociology|0": 0.0,
484
+ "harness|mmlu_us_foreign_policy|0": 0.0,
485
+ "harness|mmlu_stem|0": null,
486
+ "harness|mmlu_abstract_algebra|0": 0.0,
487
+ "harness|mmlu_anatomy|0": 0.0,
488
+ "harness|mmlu_astronomy|0": 0.0,
489
+ "harness|mmlu_college_biology|0": 0.0,
490
+ "harness|mmlu_college_chemistry|0": 0.0,
491
+ "harness|mmlu_college_computer_science|0": 0.0,
492
+ "harness|mmlu_college_mathematics|0": 0.0,
493
+ "harness|mmlu_college_physics|0": 0.0,
494
+ "harness|mmlu_computer_security|0": 0.0,
495
+ "harness|mmlu_conceptual_physics|0": 0.0,
496
+ "harness|mmlu_electrical_engineering|0": 0.0,
497
+ "harness|mmlu_elementary_mathematics|0": 0.0,
498
+ "harness|mmlu_high_school_biology|0": 0.0,
499
+ "harness|mmlu_high_school_chemistry|0": 0.0,
500
+ "harness|mmlu_high_school_computer_science|0": 0.0,
501
+ "harness|mmlu_high_school_mathematics|0": 0.0,
502
+ "harness|mmlu_high_school_physics|0": 0.0,
503
+ "harness|mmlu_high_school_statistics|0": 0.0,
504
+ "harness|mmlu_machine_learning|0": 0.0,
505
+ "harness|lambada:openai|0": 1.0,
506
+ "harness|openbookqa|0": 1.0,
507
+ "harness|winogrande|0": 1.0
508
+ },
509
+ "n-shot": {
510
+ "arc_challenge": 0,
511
+ "arc_easy": 0,
512
+ "boolq": 0,
513
+ "hellaswag": 0,
514
+ "lambada_openai": 0,
515
+ "mmlu": 0,
516
+ "mmlu_abstract_algebra": 0,
517
+ "mmlu_anatomy": 0,
518
+ "mmlu_astronomy": 0,
519
+ "mmlu_business_ethics": 0,
520
+ "mmlu_clinical_knowledge": 0,
521
+ "mmlu_college_biology": 0,
522
+ "mmlu_college_chemistry": 0,
523
+ "mmlu_college_computer_science": 0,
524
+ "mmlu_college_mathematics": 0,
525
+ "mmlu_college_medicine": 0,
526
+ "mmlu_college_physics": 0,
527
+ "mmlu_computer_security": 0,
528
+ "mmlu_conceptual_physics": 0,
529
+ "mmlu_econometrics": 0,
530
+ "mmlu_electrical_engineering": 0,
531
+ "mmlu_elementary_mathematics": 0,
532
+ "mmlu_formal_logic": 0,
533
+ "mmlu_global_facts": 0,
534
+ "mmlu_high_school_biology": 0,
535
+ "mmlu_high_school_chemistry": 0,
536
+ "mmlu_high_school_computer_science": 0,
537
+ "mmlu_high_school_european_history": 0,
538
+ "mmlu_high_school_geography": 0,
539
+ "mmlu_high_school_government_and_politics": 0,
540
+ "mmlu_high_school_macroeconomics": 0,
541
+ "mmlu_high_school_mathematics": 0,
542
+ "mmlu_high_school_microeconomics": 0,
543
+ "mmlu_high_school_physics": 0,
544
+ "mmlu_high_school_psychology": 0,
545
+ "mmlu_high_school_statistics": 0,
546
+ "mmlu_high_school_us_history": 0,
547
+ "mmlu_high_school_world_history": 0,
548
+ "mmlu_human_aging": 0,
549
+ "mmlu_human_sexuality": 0,
550
+ "mmlu_humanities": 0,
551
+ "mmlu_international_law": 0,
552
+ "mmlu_jurisprudence": 0,
553
+ "mmlu_logical_fallacies": 0,
554
+ "mmlu_machine_learning": 0,
555
+ "mmlu_management": 0,
556
+ "mmlu_marketing": 0,
557
+ "mmlu_medical_genetics": 0,
558
+ "mmlu_miscellaneous": 0,
559
+ "mmlu_moral_disputes": 0,
560
+ "mmlu_moral_scenarios": 0,
561
+ "mmlu_nutrition": 0,
562
+ "mmlu_other": 0,
563
+ "mmlu_philosophy": 0,
564
+ "mmlu_prehistory": 0,
565
+ "mmlu_professional_accounting": 0,
566
+ "mmlu_professional_law": 0,
567
+ "mmlu_professional_medicine": 0,
568
+ "mmlu_professional_psychology": 0,
569
+ "mmlu_public_relations": 0,
570
+ "mmlu_security_studies": 0,
571
+ "mmlu_social_sciences": 0,
572
+ "mmlu_sociology": 0,
573
+ "mmlu_stem": 0,
574
+ "mmlu_us_foreign_policy": 0,
575
+ "mmlu_virology": 0,
576
+ "mmlu_world_religions": 0,
577
+ "openbookqa": 0,
578
+ "piqa": 0,
579
+ "truthfulqa_mc1": 0,
580
+ "truthfulqa_mc2": 0,
581
+ "winogrande": 0
582
+ },
583
+ "date": 1716915017.667517,
584
+ "config": {
585
+ "model": "hf",
586
+ "model_args": "pretrained=cstr/llama3-8b-spaetzle-v33-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
587
+ "batch_size": 2,
588
+ "batch_sizes": [],
589
+ "device": "cuda",
590
+ "use_cache": null,
591
+ "limit": null,
592
+ "bootstrap_iters": 100000,
593
+ "gen_kwargs": null
594
+ }
595
+ }