Xuehao commited on
Commit
63ffc0a
1 Parent(s): cd88ae0

Commit from Azure DevOps update Results

Browse files
TheBloke/neural-chat-7B-v3-3-AWQ/results_2024-05-24-01-04-23.json ADDED
@@ -0,0 +1,583 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "lighteval_sha": "1.4",
4
+ "num_few_shot_default": null,
5
+ "num_fewshot_seeds": null,
6
+ "override_batch_size": null,
7
+ "max_samples": null,
8
+ "job_id": -1,
9
+ "start_time": null,
10
+ "end_time": "2024-05-24-01-04-23",
11
+ "total_evaluation_time_secondes": "",
12
+ "model_name": "TheBloke/neural-chat-7B-v3-3-AWQ",
13
+ "model_sha": "",
14
+ "model_dtype": "4bit",
15
+ "model_size": 4.15,
16
+ "model_params": 7.04,
17
+ "quant_type": "AWQ",
18
+ "precision": "4bit"
19
+ },
20
+ "results": {
21
+ "harness|piqa|0": {
22
+ "acc,none": 0.8046789989118607,
23
+ "acc_stderr,none": 0.009249776222397593,
24
+ "acc_norm,none": 0.8193688792165397,
25
+ "acc_norm_stderr,none": 0.00897597103733803,
26
+ "alias": "piqa"
27
+ },
28
+ "harness|openbookqa|0": {
29
+ "acc,none": 0.388,
30
+ "acc_stderr,none": 0.021814300984787635,
31
+ "acc_norm,none": 0.462,
32
+ "acc_norm_stderr,none": 0.022318338119870523,
33
+ "alias": "openbookqa"
34
+ },
35
+ "harness|hellaswag|0": {
36
+ "acc,none": 0.655148376817367,
37
+ "acc_stderr,none": 0.004743484528346666,
38
+ "acc_norm,none": 0.8331009759012149,
39
+ "acc_norm_stderr,none": 0.0037212361965025054,
40
+ "alias": "hellaswag"
41
+ },
42
+ "harness|arc:challenge|0": {
43
+ "acc,none": 0.5622866894197952,
44
+ "acc_stderr,none": 0.01449757388110828,
45
+ "acc_norm,none": 0.5750853242320819,
46
+ "acc_norm_stderr,none": 0.014445698968520767,
47
+ "alias": "arc_challenge"
48
+ },
49
+ "harness|arc:easy|0": {
50
+ "acc,none": 0.8164983164983165,
51
+ "acc_stderr,none": 0.007942658063731468,
52
+ "acc_norm,none": 0.7588383838383839,
53
+ "acc_norm_stderr,none": 0.00877802737825802,
54
+ "alias": "arc_easy"
55
+ },
56
+ "harness|mmlu|0": {
57
+ "acc,none": 0.5893747329440251,
58
+ "acc_stderr,none": 0.0039019551919624797,
59
+ "alias": "mmlu"
60
+ },
61
+ "harness|mmlu_humanities|0": {
62
+ "alias": " - humanities",
63
+ "acc,none": 0.5330499468650371,
64
+ "acc_stderr,none": 0.0067882222033944966
65
+ },
66
+ "harness|mmlu_formal_logic|0": {
67
+ "alias": " - formal_logic",
68
+ "acc,none": 0.4365079365079365,
69
+ "acc_stderr,none": 0.04435932892851466
70
+ },
71
+ "harness|mmlu_high_school_european_history|0": {
72
+ "alias": " - high_school_european_history",
73
+ "acc,none": 0.7515151515151515,
74
+ "acc_stderr,none": 0.03374402644139404
75
+ },
76
+ "harness|mmlu_high_school_us_history|0": {
77
+ "alias": " - high_school_us_history",
78
+ "acc,none": 0.7745098039215687,
79
+ "acc_stderr,none": 0.029331162294251728
80
+ },
81
+ "harness|mmlu_high_school_world_history|0": {
82
+ "alias": " - high_school_world_history",
83
+ "acc,none": 0.8059071729957806,
84
+ "acc_stderr,none": 0.025744902532290927
85
+ },
86
+ "harness|mmlu_international_law|0": {
87
+ "alias": " - international_law",
88
+ "acc,none": 0.71900826446281,
89
+ "acc_stderr,none": 0.04103203830514512
90
+ },
91
+ "harness|mmlu_jurisprudence|0": {
92
+ "alias": " - jurisprudence",
93
+ "acc,none": 0.7314814814814815,
94
+ "acc_stderr,none": 0.042844679680521934
95
+ },
96
+ "harness|mmlu_logical_fallacies|0": {
97
+ "alias": " - logical_fallacies",
98
+ "acc,none": 0.7239263803680982,
99
+ "acc_stderr,none": 0.035123852837050475
100
+ },
101
+ "harness|mmlu_moral_disputes|0": {
102
+ "alias": " - moral_disputes",
103
+ "acc,none": 0.630057803468208,
104
+ "acc_stderr,none": 0.025992472029306386
105
+ },
106
+ "harness|mmlu_moral_scenarios|0": {
107
+ "alias": " - moral_scenarios",
108
+ "acc,none": 0.28268156424581004,
109
+ "acc_stderr,none": 0.015060381730018089
110
+ },
111
+ "harness|mmlu_philosophy|0": {
112
+ "alias": " - philosophy",
113
+ "acc,none": 0.6688102893890675,
114
+ "acc_stderr,none": 0.02673062072800492
115
+ },
116
+ "harness|mmlu_prehistory|0": {
117
+ "alias": " - prehistory",
118
+ "acc,none": 0.6944444444444444,
119
+ "acc_stderr,none": 0.025630824975621334
120
+ },
121
+ "harness|mmlu_professional_law|0": {
122
+ "alias": " - professional_law",
123
+ "acc,none": 0.42894393741851367,
124
+ "acc_stderr,none": 0.012640625443067358
125
+ },
126
+ "harness|mmlu_world_religions|0": {
127
+ "alias": " - world_religions",
128
+ "acc,none": 0.783625730994152,
129
+ "acc_stderr,none": 0.031581495393387324
130
+ },
131
+ "harness|mmlu_other|0": {
132
+ "alias": " - other",
133
+ "acc,none": 0.6623752816221435,
134
+ "acc_stderr,none": 0.008140809701151079
135
+ },
136
+ "harness|mmlu_business_ethics|0": {
137
+ "alias": " - business_ethics",
138
+ "acc,none": 0.67,
139
+ "acc_stderr,none": 0.047258156262526094
140
+ },
141
+ "harness|mmlu_clinical_knowledge|0": {
142
+ "alias": " - clinical_knowledge",
143
+ "acc,none": 0.6377358490566037,
144
+ "acc_stderr,none": 0.0295822451283843
145
+ },
146
+ "harness|mmlu_college_medicine|0": {
147
+ "alias": " - college_medicine",
148
+ "acc,none": 0.5549132947976878,
149
+ "acc_stderr,none": 0.03789401760283647
150
+ },
151
+ "harness|mmlu_global_facts|0": {
152
+ "alias": " - global_facts",
153
+ "acc,none": 0.36,
154
+ "acc_stderr,none": 0.048241815132442176
155
+ },
156
+ "harness|mmlu_human_aging|0": {
157
+ "alias": " - human_aging",
158
+ "acc,none": 0.6591928251121076,
159
+ "acc_stderr,none": 0.0318114974705536
160
+ },
161
+ "harness|mmlu_management|0": {
162
+ "alias": " - management",
163
+ "acc,none": 0.7864077669902912,
164
+ "acc_stderr,none": 0.04058042015646033
165
+ },
166
+ "harness|mmlu_marketing|0": {
167
+ "alias": " - marketing",
168
+ "acc,none": 0.8504273504273504,
169
+ "acc_stderr,none": 0.02336505149175371
170
+ },
171
+ "harness|mmlu_medical_genetics|0": {
172
+ "alias": " - medical_genetics",
173
+ "acc,none": 0.66,
174
+ "acc_stderr,none": 0.04760952285695237
175
+ },
176
+ "harness|mmlu_miscellaneous|0": {
177
+ "alias": " - miscellaneous",
178
+ "acc,none": 0.7943805874840357,
179
+ "acc_stderr,none": 0.01445250045678583
180
+ },
181
+ "harness|mmlu_nutrition|0": {
182
+ "alias": " - nutrition",
183
+ "acc,none": 0.6895424836601307,
184
+ "acc_stderr,none": 0.026493033225145894
185
+ },
186
+ "harness|mmlu_professional_accounting|0": {
187
+ "alias": " - professional_accounting",
188
+ "acc,none": 0.425531914893617,
189
+ "acc_stderr,none": 0.02949482760014436
190
+ },
191
+ "harness|mmlu_professional_medicine|0": {
192
+ "alias": " - professional_medicine",
193
+ "acc,none": 0.6102941176470589,
194
+ "acc_stderr,none": 0.029624663581159696
195
+ },
196
+ "harness|mmlu_virology|0": {
197
+ "alias": " - virology",
198
+ "acc,none": 0.46987951807228917,
199
+ "acc_stderr,none": 0.03885425420866767
200
+ },
201
+ "harness|mmlu_social_sciences|0": {
202
+ "alias": " - social_sciences",
203
+ "acc,none": 0.6971075723106922,
204
+ "acc_stderr,none": 0.008081348268523797
205
+ },
206
+ "harness|mmlu_econometrics|0": {
207
+ "alias": " - econometrics",
208
+ "acc,none": 0.4473684210526316,
209
+ "acc_stderr,none": 0.04677473004491199
210
+ },
211
+ "harness|mmlu_high_school_geography|0": {
212
+ "alias": " - high_school_geography",
213
+ "acc,none": 0.6818181818181818,
214
+ "acc_stderr,none": 0.033184773338453315
215
+ },
216
+ "harness|mmlu_high_school_government_and_politics|0": {
217
+ "alias": " - high_school_government_and_politics",
218
+ "acc,none": 0.8290155440414507,
219
+ "acc_stderr,none": 0.027171213683164545
220
+ },
221
+ "harness|mmlu_high_school_macroeconomics|0": {
222
+ "alias": " - high_school_macroeconomics",
223
+ "acc,none": 0.5897435897435898,
224
+ "acc_stderr,none": 0.02493931390694079
225
+ },
226
+ "harness|mmlu_high_school_microeconomics|0": {
227
+ "alias": " - high_school_microeconomics",
228
+ "acc,none": 0.6638655462184874,
229
+ "acc_stderr,none": 0.030684737115135363
230
+ },
231
+ "harness|mmlu_high_school_psychology|0": {
232
+ "alias": " - high_school_psychology",
233
+ "acc,none": 0.818348623853211,
234
+ "acc_stderr,none": 0.016530617409266878
235
+ },
236
+ "harness|mmlu_human_sexuality|0": {
237
+ "alias": " - human_sexuality",
238
+ "acc,none": 0.7251908396946565,
239
+ "acc_stderr,none": 0.039153454088478354
240
+ },
241
+ "harness|mmlu_professional_psychology|0": {
242
+ "alias": " - professional_psychology",
243
+ "acc,none": 0.6111111111111112,
244
+ "acc_stderr,none": 0.019722058939618065
245
+ },
246
+ "harness|mmlu_public_relations|0": {
247
+ "alias": " - public_relations",
248
+ "acc,none": 0.6727272727272727,
249
+ "acc_stderr,none": 0.0449429086625209
250
+ },
251
+ "harness|mmlu_security_studies|0": {
252
+ "alias": " - security_studies",
253
+ "acc,none": 0.6979591836734694,
254
+ "acc_stderr,none": 0.0293936093198798
255
+ },
256
+ "harness|mmlu_sociology|0": {
257
+ "alias": " - sociology",
258
+ "acc,none": 0.8507462686567164,
259
+ "acc_stderr,none": 0.025196929874827054
260
+ },
261
+ "harness|mmlu_us_foreign_policy|0": {
262
+ "alias": " - us_foreign_policy",
263
+ "acc,none": 0.8,
264
+ "acc_stderr,none": 0.04020151261036844
265
+ },
266
+ "harness|mmlu_stem|0": {
267
+ "alias": " - stem",
268
+ "acc,none": 0.4963526799873137,
269
+ "acc_stderr,none": 0.008533338700097036
270
+ },
271
+ "harness|mmlu_abstract_algebra|0": {
272
+ "alias": " - abstract_algebra",
273
+ "acc,none": 0.22,
274
+ "acc_stderr,none": 0.041633319989322695
275
+ },
276
+ "harness|mmlu_anatomy|0": {
277
+ "alias": " - anatomy",
278
+ "acc,none": 0.562962962962963,
279
+ "acc_stderr,none": 0.042849586397534
280
+ },
281
+ "harness|mmlu_astronomy|0": {
282
+ "alias": " - astronomy",
283
+ "acc,none": 0.6710526315789473,
284
+ "acc_stderr,none": 0.03823428969926603
285
+ },
286
+ "harness|mmlu_college_biology|0": {
287
+ "alias": " - college_biology",
288
+ "acc,none": 0.6736111111111112,
289
+ "acc_stderr,none": 0.03921067198982266
290
+ },
291
+ "harness|mmlu_college_chemistry|0": {
292
+ "alias": " - college_chemistry",
293
+ "acc,none": 0.4,
294
+ "acc_stderr,none": 0.049236596391733084
295
+ },
296
+ "harness|mmlu_college_computer_science|0": {
297
+ "alias": " - college_computer_science",
298
+ "acc,none": 0.55,
299
+ "acc_stderr,none": 0.049999999999999996
300
+ },
301
+ "harness|mmlu_college_mathematics|0": {
302
+ "alias": " - college_mathematics",
303
+ "acc,none": 0.36,
304
+ "acc_stderr,none": 0.04824181513244218
305
+ },
306
+ "harness|mmlu_college_physics|0": {
307
+ "alias": " - college_physics",
308
+ "acc,none": 0.43137254901960786,
309
+ "acc_stderr,none": 0.04928099597287534
310
+ },
311
+ "harness|mmlu_computer_security|0": {
312
+ "alias": " - computer_security",
313
+ "acc,none": 0.67,
314
+ "acc_stderr,none": 0.04725815626252609
315
+ },
316
+ "harness|mmlu_conceptual_physics|0": {
317
+ "alias": " - conceptual_physics",
318
+ "acc,none": 0.5063829787234042,
319
+ "acc_stderr,none": 0.032683358999363366
320
+ },
321
+ "harness|mmlu_electrical_engineering|0": {
322
+ "alias": " - electrical_engineering",
323
+ "acc,none": 0.5655172413793104,
324
+ "acc_stderr,none": 0.04130740879555497
325
+ },
326
+ "harness|mmlu_elementary_mathematics|0": {
327
+ "alias": " - elementary_mathematics",
328
+ "acc,none": 0.373015873015873,
329
+ "acc_stderr,none": 0.02490699045899257
330
+ },
331
+ "harness|mmlu_high_school_biology|0": {
332
+ "alias": " - high_school_biology",
333
+ "acc,none": 0.7516129032258064,
334
+ "acc_stderr,none": 0.024580028921481
335
+ },
336
+ "harness|mmlu_high_school_chemistry|0": {
337
+ "alias": " - high_school_chemistry",
338
+ "acc,none": 0.43349753694581283,
339
+ "acc_stderr,none": 0.034867317274198714
340
+ },
341
+ "harness|mmlu_high_school_computer_science|0": {
342
+ "alias": " - high_school_computer_science",
343
+ "acc,none": 0.67,
344
+ "acc_stderr,none": 0.04725815626252609
345
+ },
346
+ "harness|mmlu_high_school_mathematics|0": {
347
+ "alias": " - high_school_mathematics",
348
+ "acc,none": 0.3037037037037037,
349
+ "acc_stderr,none": 0.028037929969114986
350
+ },
351
+ "harness|mmlu_high_school_physics|0": {
352
+ "alias": " - high_school_physics",
353
+ "acc,none": 0.33774834437086093,
354
+ "acc_stderr,none": 0.038615575462551684
355
+ },
356
+ "harness|mmlu_high_school_statistics|0": {
357
+ "alias": " - high_school_statistics",
358
+ "acc,none": 0.5231481481481481,
359
+ "acc_stderr,none": 0.03406315360711507
360
+ },
361
+ "harness|mmlu_machine_learning|0": {
362
+ "alias": " - machine_learning",
363
+ "acc,none": 0.44642857142857145,
364
+ "acc_stderr,none": 0.04718471485219588
365
+ },
366
+ "harness|winogrande|0": {
367
+ "acc,none": 0.7616416732438832,
368
+ "acc_stderr,none": 0.011974948667702302,
369
+ "alias": "winogrande"
370
+ },
371
+ "harness|boolq|0": {
372
+ "acc,none": 0.8660550458715597,
373
+ "acc_stderr,none": 0.005957011888260804,
374
+ "alias": "boolq"
375
+ },
376
+ "harness|truthfulqa:mc1|0": {
377
+ "acc,none": 0.47613219094247244,
378
+ "acc_stderr,none": 0.01748354715696157,
379
+ "alias": "truthfulqa_mc1"
380
+ },
381
+ "harness|lambada:openai|0": {
382
+ "perplexity,none": 3.4398137369791546,
383
+ "perplexity_stderr,none": 0.07782914675635923,
384
+ "acc,none": 0.7201630118377644,
385
+ "acc_stderr,none": 0.006254319132119307,
386
+ "alias": "lambada_openai"
387
+ },
388
+ "harness|truthfulqa:mc2|0": {
389
+ "acc,none": 0.6336761508460098,
390
+ "acc_stderr,none": 0.015232719212668116,
391
+ "alias": "truthfulqa_mc2"
392
+ }
393
+ },
394
+ "task_info": {
395
+ "model": "TheBloke/neural-chat-7B-v3-3-AWQ",
396
+ "revision": "main",
397
+ "private": false,
398
+ "params": 7.04,
399
+ "architectures": "MistralForCausalLM",
400
+ "quant_type": "AWQ",
401
+ "precision": "4bit",
402
+ "model_params": 7.04,
403
+ "model_size": 4.15,
404
+ "weight_dtype": "int4",
405
+ "compute_dtype": "float16",
406
+ "gguf_ftype": "*Q4_0.gguf",
407
+ "hardware": "gpu",
408
+ "status": "Pending",
409
+ "submitted_time": "2024-05-22T05:47:33Z",
410
+ "model_type": "quantization",
411
+ "job_id": -1,
412
+ "job_start_time": null,
413
+ "scripts": "ITREX"
414
+ },
415
+ "quantization_config": {
416
+ "bits": 4,
417
+ "group_size": 128,
418
+ "modules_to_not_convert": [],
419
+ "quant_method": "awq",
420
+ "version": "gemm",
421
+ "zero_point": true
422
+ },
423
+ "versions": {
424
+ "harness|piqa|0": 1.0,
425
+ "harness|openbookqa|0": 1.0,
426
+ "harness|hellaswag|0": 1.0,
427
+ "harness|arc:challenge|0": 1.0,
428
+ "harness|arc:easy|0": 1.0,
429
+ "harness|mmlu|0": null,
430
+ "harness|mmlu_humanities|0": null,
431
+ "harness|mmlu_formal_logic|0": 0.0,
432
+ "harness|mmlu_high_school_european_history|0": 0.0,
433
+ "harness|mmlu_high_school_us_history|0": 0.0,
434
+ "harness|mmlu_high_school_world_history|0": 0.0,
435
+ "harness|mmlu_international_law|0": 0.0,
436
+ "harness|mmlu_jurisprudence|0": 0.0,
437
+ "harness|mmlu_logical_fallacies|0": 0.0,
438
+ "harness|mmlu_moral_disputes|0": 0.0,
439
+ "harness|mmlu_moral_scenarios|0": 0.0,
440
+ "harness|mmlu_philosophy|0": 0.0,
441
+ "harness|mmlu_prehistory|0": 0.0,
442
+ "harness|mmlu_professional_law|0": 0.0,
443
+ "harness|mmlu_world_religions|0": 0.0,
444
+ "harness|mmlu_other|0": null,
445
+ "harness|mmlu_business_ethics|0": 0.0,
446
+ "harness|mmlu_clinical_knowledge|0": 0.0,
447
+ "harness|mmlu_college_medicine|0": 0.0,
448
+ "harness|mmlu_global_facts|0": 0.0,
449
+ "harness|mmlu_human_aging|0": 0.0,
450
+ "harness|mmlu_management|0": 0.0,
451
+ "harness|mmlu_marketing|0": 0.0,
452
+ "harness|mmlu_medical_genetics|0": 0.0,
453
+ "harness|mmlu_miscellaneous|0": 0.0,
454
+ "harness|mmlu_nutrition|0": 0.0,
455
+ "harness|mmlu_professional_accounting|0": 0.0,
456
+ "harness|mmlu_professional_medicine|0": 0.0,
457
+ "harness|mmlu_virology|0": 0.0,
458
+ "harness|mmlu_social_sciences|0": null,
459
+ "harness|mmlu_econometrics|0": 0.0,
460
+ "harness|mmlu_high_school_geography|0": 0.0,
461
+ "harness|mmlu_high_school_government_and_politics|0": 0.0,
462
+ "harness|mmlu_high_school_macroeconomics|0": 0.0,
463
+ "harness|mmlu_high_school_microeconomics|0": 0.0,
464
+ "harness|mmlu_high_school_psychology|0": 0.0,
465
+ "harness|mmlu_human_sexuality|0": 0.0,
466
+ "harness|mmlu_professional_psychology|0": 0.0,
467
+ "harness|mmlu_public_relations|0": 0.0,
468
+ "harness|mmlu_security_studies|0": 0.0,
469
+ "harness|mmlu_sociology|0": 0.0,
470
+ "harness|mmlu_us_foreign_policy|0": 0.0,
471
+ "harness|mmlu_stem|0": null,
472
+ "harness|mmlu_abstract_algebra|0": 0.0,
473
+ "harness|mmlu_anatomy|0": 0.0,
474
+ "harness|mmlu_astronomy|0": 0.0,
475
+ "harness|mmlu_college_biology|0": 0.0,
476
+ "harness|mmlu_college_chemistry|0": 0.0,
477
+ "harness|mmlu_college_computer_science|0": 0.0,
478
+ "harness|mmlu_college_mathematics|0": 0.0,
479
+ "harness|mmlu_college_physics|0": 0.0,
480
+ "harness|mmlu_computer_security|0": 0.0,
481
+ "harness|mmlu_conceptual_physics|0": 0.0,
482
+ "harness|mmlu_electrical_engineering|0": 0.0,
483
+ "harness|mmlu_elementary_mathematics|0": 0.0,
484
+ "harness|mmlu_high_school_biology|0": 0.0,
485
+ "harness|mmlu_high_school_chemistry|0": 0.0,
486
+ "harness|mmlu_high_school_computer_science|0": 0.0,
487
+ "harness|mmlu_high_school_mathematics|0": 0.0,
488
+ "harness|mmlu_high_school_physics|0": 0.0,
489
+ "harness|mmlu_high_school_statistics|0": 0.0,
490
+ "harness|mmlu_machine_learning|0": 0.0,
491
+ "harness|winogrande|0": 1.0,
492
+ "harness|boolq|0": 2.0,
493
+ "harness|truthfulqa:mc1|0": 2.0,
494
+ "harness|lambada:openai|0": 1.0,
495
+ "harness|truthfulqa:mc2|0": 2.0
496
+ },
497
+ "n-shot": {
498
+ "arc_challenge": 0,
499
+ "arc_easy": 0,
500
+ "boolq": 0,
501
+ "hellaswag": 0,
502
+ "lambada_openai": 0,
503
+ "mmlu": 0,
504
+ "mmlu_abstract_algebra": 0,
505
+ "mmlu_anatomy": 0,
506
+ "mmlu_astronomy": 0,
507
+ "mmlu_business_ethics": 0,
508
+ "mmlu_clinical_knowledge": 0,
509
+ "mmlu_college_biology": 0,
510
+ "mmlu_college_chemistry": 0,
511
+ "mmlu_college_computer_science": 0,
512
+ "mmlu_college_mathematics": 0,
513
+ "mmlu_college_medicine": 0,
514
+ "mmlu_college_physics": 0,
515
+ "mmlu_computer_security": 0,
516
+ "mmlu_conceptual_physics": 0,
517
+ "mmlu_econometrics": 0,
518
+ "mmlu_electrical_engineering": 0,
519
+ "mmlu_elementary_mathematics": 0,
520
+ "mmlu_formal_logic": 0,
521
+ "mmlu_global_facts": 0,
522
+ "mmlu_high_school_biology": 0,
523
+ "mmlu_high_school_chemistry": 0,
524
+ "mmlu_high_school_computer_science": 0,
525
+ "mmlu_high_school_european_history": 0,
526
+ "mmlu_high_school_geography": 0,
527
+ "mmlu_high_school_government_and_politics": 0,
528
+ "mmlu_high_school_macroeconomics": 0,
529
+ "mmlu_high_school_mathematics": 0,
530
+ "mmlu_high_school_microeconomics": 0,
531
+ "mmlu_high_school_physics": 0,
532
+ "mmlu_high_school_psychology": 0,
533
+ "mmlu_high_school_statistics": 0,
534
+ "mmlu_high_school_us_history": 0,
535
+ "mmlu_high_school_world_history": 0,
536
+ "mmlu_human_aging": 0,
537
+ "mmlu_human_sexuality": 0,
538
+ "mmlu_humanities": 0,
539
+ "mmlu_international_law": 0,
540
+ "mmlu_jurisprudence": 0,
541
+ "mmlu_logical_fallacies": 0,
542
+ "mmlu_machine_learning": 0,
543
+ "mmlu_management": 0,
544
+ "mmlu_marketing": 0,
545
+ "mmlu_medical_genetics": 0,
546
+ "mmlu_miscellaneous": 0,
547
+ "mmlu_moral_disputes": 0,
548
+ "mmlu_moral_scenarios": 0,
549
+ "mmlu_nutrition": 0,
550
+ "mmlu_other": 0,
551
+ "mmlu_philosophy": 0,
552
+ "mmlu_prehistory": 0,
553
+ "mmlu_professional_accounting": 0,
554
+ "mmlu_professional_law": 0,
555
+ "mmlu_professional_medicine": 0,
556
+ "mmlu_professional_psychology": 0,
557
+ "mmlu_public_relations": 0,
558
+ "mmlu_security_studies": 0,
559
+ "mmlu_social_sciences": 0,
560
+ "mmlu_sociology": 0,
561
+ "mmlu_stem": 0,
562
+ "mmlu_us_foreign_policy": 0,
563
+ "mmlu_virology": 0,
564
+ "mmlu_world_religions": 0,
565
+ "openbookqa": 0,
566
+ "piqa": 0,
567
+ "truthfulqa_mc1": 0,
568
+ "truthfulqa_mc2": 0,
569
+ "winogrande": 0
570
+ },
571
+ "date": 1716460286.6962833,
572
+ "config": {
573
+ "model": "hf",
574
+ "model_args": "pretrained=TheBloke/neural-chat-7B-v3-3-AWQ,trust_remote_code=True,dtype=float16,_commit_hash=main",
575
+ "batch_size": 2,
576
+ "batch_sizes": [],
577
+ "device": "cuda",
578
+ "use_cache": null,
579
+ "limit": null,
580
+ "bootstrap_iters": 100000,
581
+ "gen_kwargs": null
582
+ }
583
+ }