Xuehao commited on
Commit
81c28b8
1 Parent(s): 495f5be

Commit from Azure DevOps update Results

Browse files
Ramikan-BR/tinyllama-coder-py-v12/results_2024-05-29-07-30-46.json ADDED
@@ -0,0 +1,579 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "lighteval_sha": "1.4",
4
+ "num_few_shot_default": null,
5
+ "num_fewshot_seeds": null,
6
+ "override_batch_size": null,
7
+ "max_samples": null,
8
+ "job_id": -1,
9
+ "start_time": null,
10
+ "end_time": "2024-05-29-07-30-46",
11
+ "total_evaluation_time_secondes": "",
12
+ "model_name": "Ramikan-BR/tinyllama-coder-py-v12",
13
+ "model_sha": "",
14
+ "model_dtype": "16bit",
15
+ "model_size": 2.2,
16
+ "model_params": 1.1,
17
+ "quant_type": null,
18
+ "precision": "16bit"
19
+ },
20
+ "results": {
21
+ "harness|piqa|0": {
22
+ "acc,none": 0.7170837867247007,
23
+ "acc_stderr,none": 0.010508949177489683,
24
+ "acc_norm,none": 0.7002176278563657,
25
+ "acc_norm_stderr,none": 0.01068968696713809,
26
+ "alias": "piqa"
27
+ },
28
+ "harness|openbookqa|0": {
29
+ "acc,none": 0.222,
30
+ "acc_stderr,none": 0.01860441475825008,
31
+ "acc_norm,none": 0.336,
32
+ "acc_norm_stderr,none": 0.02114479142504886,
33
+ "alias": "openbookqa"
34
+ },
35
+ "harness|hellaswag|0": {
36
+ "acc,none": 0.42013543118900615,
37
+ "acc_stderr,none": 0.0049257170080997115,
38
+ "acc_norm,none": 0.5390360485958973,
39
+ "acc_norm_stderr,none": 0.0049745511794839475,
40
+ "alias": "hellaswag"
41
+ },
42
+ "harness|truthfulqa:mc2|0": {
43
+ "acc,none": 0.4081831444027335,
44
+ "acc_stderr,none": 0.014357220831164353,
45
+ "alias": "truthfulqa_mc2"
46
+ },
47
+ "harness|boolq|0": {
48
+ "acc,none": 0.6027522935779817,
49
+ "acc_stderr,none": 0.008558401855851156,
50
+ "alias": "boolq"
51
+ },
52
+ "harness|arc:challenge|0": {
53
+ "acc,none": 0.2687713310580205,
54
+ "acc_stderr,none": 0.0129550659637107,
55
+ "acc_norm,none": 0.28924914675767915,
56
+ "acc_norm_stderr,none": 0.013250012579393443,
57
+ "alias": "arc_challenge"
58
+ },
59
+ "harness|lambada:openai|0": {
60
+ "perplexity,none": 7.234604905029736,
61
+ "perplexity_stderr,none": 0.20590635823186307,
62
+ "acc,none": 0.5722879875800505,
63
+ "acc_stderr,none": 0.006892791023967238,
64
+ "alias": "lambada_openai"
65
+ },
66
+ "harness|mmlu|0": {
67
+ "acc,none": 0.23999430280586811,
68
+ "acc_stderr,none": 0.0036006300406121335,
69
+ "alias": "mmlu"
70
+ },
71
+ "harness|mmlu_humanities|0": {
72
+ "alias": " - humanities",
73
+ "acc,none": 0.24739638682252924,
74
+ "acc_stderr,none": 0.006292615190785722
75
+ },
76
+ "harness|mmlu_formal_logic|0": {
77
+ "alias": " - formal_logic",
78
+ "acc,none": 0.30158730158730157,
79
+ "acc_stderr,none": 0.04104947269903394
80
+ },
81
+ "harness|mmlu_high_school_european_history|0": {
82
+ "alias": " - high_school_european_history",
83
+ "acc,none": 0.2727272727272727,
84
+ "acc_stderr,none": 0.03477691162163659
85
+ },
86
+ "harness|mmlu_high_school_us_history|0": {
87
+ "alias": " - high_school_us_history",
88
+ "acc,none": 0.25,
89
+ "acc_stderr,none": 0.03039153369274154
90
+ },
91
+ "harness|mmlu_high_school_world_history|0": {
92
+ "alias": " - high_school_world_history",
93
+ "acc,none": 0.25316455696202533,
94
+ "acc_stderr,none": 0.0283046579430353
95
+ },
96
+ "harness|mmlu_international_law|0": {
97
+ "alias": " - international_law",
98
+ "acc,none": 0.2231404958677686,
99
+ "acc_stderr,none": 0.03800754475228732
100
+ },
101
+ "harness|mmlu_jurisprudence|0": {
102
+ "alias": " - jurisprudence",
103
+ "acc,none": 0.28703703703703703,
104
+ "acc_stderr,none": 0.04373313040914761
105
+ },
106
+ "harness|mmlu_logical_fallacies|0": {
107
+ "alias": " - logical_fallacies",
108
+ "acc,none": 0.22699386503067484,
109
+ "acc_stderr,none": 0.03291099578615768
110
+ },
111
+ "harness|mmlu_moral_disputes|0": {
112
+ "alias": " - moral_disputes",
113
+ "acc,none": 0.2543352601156069,
114
+ "acc_stderr,none": 0.023445826276545543
115
+ },
116
+ "harness|mmlu_moral_scenarios|0": {
117
+ "alias": " - moral_scenarios",
118
+ "acc,none": 0.23687150837988827,
119
+ "acc_stderr,none": 0.014219570788103984
120
+ },
121
+ "harness|mmlu_philosophy|0": {
122
+ "alias": " - philosophy",
123
+ "acc,none": 0.2057877813504823,
124
+ "acc_stderr,none": 0.022961339906764237
125
+ },
126
+ "harness|mmlu_prehistory|0": {
127
+ "alias": " - prehistory",
128
+ "acc,none": 0.25,
129
+ "acc_stderr,none": 0.02409347123262133
130
+ },
131
+ "harness|mmlu_professional_law|0": {
132
+ "alias": " - professional_law",
133
+ "acc,none": 0.2470664928292047,
134
+ "acc_stderr,none": 0.011015752255279338
135
+ },
136
+ "harness|mmlu_world_religions|0": {
137
+ "alias": " - world_religions",
138
+ "acc,none": 0.2982456140350877,
139
+ "acc_stderr,none": 0.03508771929824565
140
+ },
141
+ "harness|mmlu_other|0": {
142
+ "alias": " - other",
143
+ "acc,none": 0.246218216929514,
144
+ "acc_stderr,none": 0.0077130736571925
145
+ },
146
+ "harness|mmlu_business_ethics|0": {
147
+ "alias": " - business_ethics",
148
+ "acc,none": 0.26,
149
+ "acc_stderr,none": 0.04408440022768077
150
+ },
151
+ "harness|mmlu_clinical_knowledge|0": {
152
+ "alias": " - clinical_knowledge",
153
+ "acc,none": 0.21132075471698114,
154
+ "acc_stderr,none": 0.025125766484827856
155
+ },
156
+ "harness|mmlu_college_medicine|0": {
157
+ "alias": " - college_medicine",
158
+ "acc,none": 0.23121387283236994,
159
+ "acc_stderr,none": 0.032147373020294696
160
+ },
161
+ "harness|mmlu_global_facts|0": {
162
+ "alias": " - global_facts",
163
+ "acc,none": 0.35,
164
+ "acc_stderr,none": 0.047937248544110196
165
+ },
166
+ "harness|mmlu_human_aging|0": {
167
+ "alias": " - human_aging",
168
+ "acc,none": 0.27802690582959644,
169
+ "acc_stderr,none": 0.030069584874494043
170
+ },
171
+ "harness|mmlu_management|0": {
172
+ "alias": " - management",
173
+ "acc,none": 0.1941747572815534,
174
+ "acc_stderr,none": 0.03916667762822584
175
+ },
176
+ "harness|mmlu_marketing|0": {
177
+ "alias": " - marketing",
178
+ "acc,none": 0.2905982905982906,
179
+ "acc_stderr,none": 0.029745048572674054
180
+ },
181
+ "harness|mmlu_medical_genetics|0": {
182
+ "alias": " - medical_genetics",
183
+ "acc,none": 0.32,
184
+ "acc_stderr,none": 0.04688261722621503
185
+ },
186
+ "harness|mmlu_miscellaneous|0": {
187
+ "alias": " - miscellaneous",
188
+ "acc,none": 0.23627075351213284,
189
+ "acc_stderr,none": 0.015190473717037512
190
+ },
191
+ "harness|mmlu_nutrition|0": {
192
+ "alias": " - nutrition",
193
+ "acc,none": 0.25163398692810457,
194
+ "acc_stderr,none": 0.0248480182638752
195
+ },
196
+ "harness|mmlu_professional_accounting|0": {
197
+ "alias": " - professional_accounting",
198
+ "acc,none": 0.24822695035460993,
199
+ "acc_stderr,none": 0.025770015644290396
200
+ },
201
+ "harness|mmlu_professional_medicine|0": {
202
+ "alias": " - professional_medicine",
203
+ "acc,none": 0.17279411764705882,
204
+ "acc_stderr,none": 0.022966067585581777
205
+ },
206
+ "harness|mmlu_virology|0": {
207
+ "alias": " - virology",
208
+ "acc,none": 0.28313253012048195,
209
+ "acc_stderr,none": 0.03507295431370518
210
+ },
211
+ "harness|mmlu_social_sciences|0": {
212
+ "alias": " - social_sciences",
213
+ "acc,none": 0.22261943451413715,
214
+ "acc_stderr,none": 0.007496107836856349
215
+ },
216
+ "harness|mmlu_econometrics|0": {
217
+ "alias": " - econometrics",
218
+ "acc,none": 0.2719298245614035,
219
+ "acc_stderr,none": 0.04185774424022057
220
+ },
221
+ "harness|mmlu_high_school_geography|0": {
222
+ "alias": " - high_school_geography",
223
+ "acc,none": 0.16666666666666666,
224
+ "acc_stderr,none": 0.026552207828215293
225
+ },
226
+ "harness|mmlu_high_school_government_and_politics|0": {
227
+ "alias": " - high_school_government_and_politics",
228
+ "acc,none": 0.19689119170984457,
229
+ "acc_stderr,none": 0.028697873971860688
230
+ },
231
+ "harness|mmlu_high_school_macroeconomics|0": {
232
+ "alias": " - high_school_macroeconomics",
233
+ "acc,none": 0.2230769230769231,
234
+ "acc_stderr,none": 0.02110773012724399
235
+ },
236
+ "harness|mmlu_high_school_microeconomics|0": {
237
+ "alias": " - high_school_microeconomics",
238
+ "acc,none": 0.21428571428571427,
239
+ "acc_stderr,none": 0.026653531596715487
240
+ },
241
+ "harness|mmlu_high_school_psychology|0": {
242
+ "alias": " - high_school_psychology",
243
+ "acc,none": 0.20550458715596331,
244
+ "acc_stderr,none": 0.017324352325016022
245
+ },
246
+ "harness|mmlu_human_sexuality|0": {
247
+ "alias": " - human_sexuality",
248
+ "acc,none": 0.2366412213740458,
249
+ "acc_stderr,none": 0.03727673575596919
250
+ },
251
+ "harness|mmlu_professional_psychology|0": {
252
+ "alias": " - professional_psychology",
253
+ "acc,none": 0.2565359477124183,
254
+ "acc_stderr,none": 0.01766784161237898
255
+ },
256
+ "harness|mmlu_public_relations|0": {
257
+ "alias": " - public_relations",
258
+ "acc,none": 0.2545454545454545,
259
+ "acc_stderr,none": 0.041723430387053825
260
+ },
261
+ "harness|mmlu_security_studies|0": {
262
+ "alias": " - security_studies",
263
+ "acc,none": 0.1836734693877551,
264
+ "acc_stderr,none": 0.02478907133200765
265
+ },
266
+ "harness|mmlu_sociology|0": {
267
+ "alias": " - sociology",
268
+ "acc,none": 0.22885572139303484,
269
+ "acc_stderr,none": 0.02970528405677241
270
+ },
271
+ "harness|mmlu_us_foreign_policy|0": {
272
+ "alias": " - us_foreign_policy",
273
+ "acc,none": 0.26,
274
+ "acc_stderr,none": 0.0440844002276808
275
+ },
276
+ "harness|mmlu_stem|0": {
277
+ "alias": " - stem",
278
+ "acc,none": 0.23977164605137963,
279
+ "acc_stderr,none": 0.007594861491941249
280
+ },
281
+ "harness|mmlu_abstract_algebra|0": {
282
+ "alias": " - abstract_algebra",
283
+ "acc,none": 0.2,
284
+ "acc_stderr,none": 0.04020151261036843
285
+ },
286
+ "harness|mmlu_anatomy|0": {
287
+ "alias": " - anatomy",
288
+ "acc,none": 0.2222222222222222,
289
+ "acc_stderr,none": 0.035914440841969694
290
+ },
291
+ "harness|mmlu_astronomy|0": {
292
+ "alias": " - astronomy",
293
+ "acc,none": 0.20394736842105263,
294
+ "acc_stderr,none": 0.032790004063100515
295
+ },
296
+ "harness|mmlu_college_biology|0": {
297
+ "alias": " - college_biology",
298
+ "acc,none": 0.2847222222222222,
299
+ "acc_stderr,none": 0.03773809990686935
300
+ },
301
+ "harness|mmlu_college_chemistry|0": {
302
+ "alias": " - college_chemistry",
303
+ "acc,none": 0.25,
304
+ "acc_stderr,none": 0.04351941398892446
305
+ },
306
+ "harness|mmlu_college_computer_science|0": {
307
+ "alias": " - college_computer_science",
308
+ "acc,none": 0.2,
309
+ "acc_stderr,none": 0.04020151261036845
310
+ },
311
+ "harness|mmlu_college_mathematics|0": {
312
+ "alias": " - college_mathematics",
313
+ "acc,none": 0.27,
314
+ "acc_stderr,none": 0.0446196043338474
315
+ },
316
+ "harness|mmlu_college_physics|0": {
317
+ "alias": " - college_physics",
318
+ "acc,none": 0.2647058823529412,
319
+ "acc_stderr,none": 0.04389869956808777
320
+ },
321
+ "harness|mmlu_computer_security|0": {
322
+ "alias": " - computer_security",
323
+ "acc,none": 0.3,
324
+ "acc_stderr,none": 0.046056618647183814
325
+ },
326
+ "harness|mmlu_conceptual_physics|0": {
327
+ "alias": " - conceptual_physics",
328
+ "acc,none": 0.28085106382978725,
329
+ "acc_stderr,none": 0.029379170464124818
330
+ },
331
+ "harness|mmlu_electrical_engineering|0": {
332
+ "alias": " - electrical_engineering",
333
+ "acc,none": 0.2482758620689655,
334
+ "acc_stderr,none": 0.0360010569272777
335
+ },
336
+ "harness|mmlu_elementary_mathematics|0": {
337
+ "alias": " - elementary_mathematics",
338
+ "acc,none": 0.24074074074074073,
339
+ "acc_stderr,none": 0.02201908001221789
340
+ },
341
+ "harness|mmlu_high_school_biology|0": {
342
+ "alias": " - high_school_biology",
343
+ "acc,none": 0.18387096774193548,
344
+ "acc_stderr,none": 0.022037217340267833
345
+ },
346
+ "harness|mmlu_high_school_chemistry|0": {
347
+ "alias": " - high_school_chemistry",
348
+ "acc,none": 0.1921182266009852,
349
+ "acc_stderr,none": 0.027719315709614785
350
+ },
351
+ "harness|mmlu_high_school_computer_science|0": {
352
+ "alias": " - high_school_computer_science",
353
+ "acc,none": 0.27,
354
+ "acc_stderr,none": 0.044619604333847394
355
+ },
356
+ "harness|mmlu_high_school_mathematics|0": {
357
+ "alias": " - high_school_mathematics",
358
+ "acc,none": 0.28888888888888886,
359
+ "acc_stderr,none": 0.027634907264178544
360
+ },
361
+ "harness|mmlu_high_school_physics|0": {
362
+ "alias": " - high_school_physics",
363
+ "acc,none": 0.25165562913907286,
364
+ "acc_stderr,none": 0.035433042343899844
365
+ },
366
+ "harness|mmlu_high_school_statistics|0": {
367
+ "alias": " - high_school_statistics",
368
+ "acc,none": 0.18981481481481483,
369
+ "acc_stderr,none": 0.026744714834691936
370
+ },
371
+ "harness|mmlu_machine_learning|0": {
372
+ "alias": " - machine_learning",
373
+ "acc,none": 0.2857142857142857,
374
+ "acc_stderr,none": 0.04287858751340455
375
+ },
376
+ "harness|arc:easy|0": {
377
+ "acc,none": 0.5505050505050505,
378
+ "acc_stderr,none": 0.010207308833916044,
379
+ "acc_norm,none": 0.4764309764309764,
380
+ "acc_norm_stderr,none": 0.01024837858555403,
381
+ "alias": "arc_easy"
382
+ },
383
+ "harness|truthfulqa:mc1|0": {
384
+ "acc,none": 0.25458996328029376,
385
+ "acc_stderr,none": 0.015250117079156503,
386
+ "alias": "truthfulqa_mc1"
387
+ },
388
+ "harness|winogrande|0": {
389
+ "acc,none": 0.5769534333070244,
390
+ "acc_stderr,none": 0.013885055359056481,
391
+ "alias": "winogrande"
392
+ }
393
+ },
394
+ "task_info": {
395
+ "model": "Ramikan-BR/tinyllama-coder-py-v12",
396
+ "revision": "abd0469",
397
+ "private": false,
398
+ "params": 2.2,
399
+ "architectures": "LlamaForCausalLM",
400
+ "quant_type": null,
401
+ "precision": "16bit",
402
+ "model_params": 1.1,
403
+ "model_size": 2.2,
404
+ "weight_dtype": "float16",
405
+ "compute_dtype": "float16",
406
+ "gguf_ftype": "*Q4_0.gguf",
407
+ "hardware": "gpu",
408
+ "status": "Pending",
409
+ "submitted_time": "2024-05-28T21:46:32Z",
410
+ "model_type": "original",
411
+ "job_id": -1,
412
+ "job_start_time": null,
413
+ "scripts": "ITREX"
414
+ },
415
+ "quantization_config": {
416
+ "quant_method": null,
417
+ "ftype": "*Q4_0.gguf"
418
+ },
419
+ "versions": {
420
+ "harness|piqa|0": 1.0,
421
+ "harness|openbookqa|0": 1.0,
422
+ "harness|hellaswag|0": 1.0,
423
+ "harness|truthfulqa:mc2|0": 2.0,
424
+ "harness|boolq|0": 2.0,
425
+ "harness|arc:challenge|0": 1.0,
426
+ "harness|lambada:openai|0": 1.0,
427
+ "harness|mmlu|0": null,
428
+ "harness|mmlu_humanities|0": null,
429
+ "harness|mmlu_formal_logic|0": 0.0,
430
+ "harness|mmlu_high_school_european_history|0": 0.0,
431
+ "harness|mmlu_high_school_us_history|0": 0.0,
432
+ "harness|mmlu_high_school_world_history|0": 0.0,
433
+ "harness|mmlu_international_law|0": 0.0,
434
+ "harness|mmlu_jurisprudence|0": 0.0,
435
+ "harness|mmlu_logical_fallacies|0": 0.0,
436
+ "harness|mmlu_moral_disputes|0": 0.0,
437
+ "harness|mmlu_moral_scenarios|0": 0.0,
438
+ "harness|mmlu_philosophy|0": 0.0,
439
+ "harness|mmlu_prehistory|0": 0.0,
440
+ "harness|mmlu_professional_law|0": 0.0,
441
+ "harness|mmlu_world_religions|0": 0.0,
442
+ "harness|mmlu_other|0": null,
443
+ "harness|mmlu_business_ethics|0": 0.0,
444
+ "harness|mmlu_clinical_knowledge|0": 0.0,
445
+ "harness|mmlu_college_medicine|0": 0.0,
446
+ "harness|mmlu_global_facts|0": 0.0,
447
+ "harness|mmlu_human_aging|0": 0.0,
448
+ "harness|mmlu_management|0": 0.0,
449
+ "harness|mmlu_marketing|0": 0.0,
450
+ "harness|mmlu_medical_genetics|0": 0.0,
451
+ "harness|mmlu_miscellaneous|0": 0.0,
452
+ "harness|mmlu_nutrition|0": 0.0,
453
+ "harness|mmlu_professional_accounting|0": 0.0,
454
+ "harness|mmlu_professional_medicine|0": 0.0,
455
+ "harness|mmlu_virology|0": 0.0,
456
+ "harness|mmlu_social_sciences|0": null,
457
+ "harness|mmlu_econometrics|0": 0.0,
458
+ "harness|mmlu_high_school_geography|0": 0.0,
459
+ "harness|mmlu_high_school_government_and_politics|0": 0.0,
460
+ "harness|mmlu_high_school_macroeconomics|0": 0.0,
461
+ "harness|mmlu_high_school_microeconomics|0": 0.0,
462
+ "harness|mmlu_high_school_psychology|0": 0.0,
463
+ "harness|mmlu_human_sexuality|0": 0.0,
464
+ "harness|mmlu_professional_psychology|0": 0.0,
465
+ "harness|mmlu_public_relations|0": 0.0,
466
+ "harness|mmlu_security_studies|0": 0.0,
467
+ "harness|mmlu_sociology|0": 0.0,
468
+ "harness|mmlu_us_foreign_policy|0": 0.0,
469
+ "harness|mmlu_stem|0": null,
470
+ "harness|mmlu_abstract_algebra|0": 0.0,
471
+ "harness|mmlu_anatomy|0": 0.0,
472
+ "harness|mmlu_astronomy|0": 0.0,
473
+ "harness|mmlu_college_biology|0": 0.0,
474
+ "harness|mmlu_college_chemistry|0": 0.0,
475
+ "harness|mmlu_college_computer_science|0": 0.0,
476
+ "harness|mmlu_college_mathematics|0": 0.0,
477
+ "harness|mmlu_college_physics|0": 0.0,
478
+ "harness|mmlu_computer_security|0": 0.0,
479
+ "harness|mmlu_conceptual_physics|0": 0.0,
480
+ "harness|mmlu_electrical_engineering|0": 0.0,
481
+ "harness|mmlu_elementary_mathematics|0": 0.0,
482
+ "harness|mmlu_high_school_biology|0": 0.0,
483
+ "harness|mmlu_high_school_chemistry|0": 0.0,
484
+ "harness|mmlu_high_school_computer_science|0": 0.0,
485
+ "harness|mmlu_high_school_mathematics|0": 0.0,
486
+ "harness|mmlu_high_school_physics|0": 0.0,
487
+ "harness|mmlu_high_school_statistics|0": 0.0,
488
+ "harness|mmlu_machine_learning|0": 0.0,
489
+ "harness|arc:easy|0": 1.0,
490
+ "harness|truthfulqa:mc1|0": 2.0,
491
+ "harness|winogrande|0": 1.0
492
+ },
493
+ "n-shot": {
494
+ "arc_challenge": 0,
495
+ "arc_easy": 0,
496
+ "boolq": 0,
497
+ "hellaswag": 0,
498
+ "lambada_openai": 0,
499
+ "mmlu": 0,
500
+ "mmlu_abstract_algebra": 0,
501
+ "mmlu_anatomy": 0,
502
+ "mmlu_astronomy": 0,
503
+ "mmlu_business_ethics": 0,
504
+ "mmlu_clinical_knowledge": 0,
505
+ "mmlu_college_biology": 0,
506
+ "mmlu_college_chemistry": 0,
507
+ "mmlu_college_computer_science": 0,
508
+ "mmlu_college_mathematics": 0,
509
+ "mmlu_college_medicine": 0,
510
+ "mmlu_college_physics": 0,
511
+ "mmlu_computer_security": 0,
512
+ "mmlu_conceptual_physics": 0,
513
+ "mmlu_econometrics": 0,
514
+ "mmlu_electrical_engineering": 0,
515
+ "mmlu_elementary_mathematics": 0,
516
+ "mmlu_formal_logic": 0,
517
+ "mmlu_global_facts": 0,
518
+ "mmlu_high_school_biology": 0,
519
+ "mmlu_high_school_chemistry": 0,
520
+ "mmlu_high_school_computer_science": 0,
521
+ "mmlu_high_school_european_history": 0,
522
+ "mmlu_high_school_geography": 0,
523
+ "mmlu_high_school_government_and_politics": 0,
524
+ "mmlu_high_school_macroeconomics": 0,
525
+ "mmlu_high_school_mathematics": 0,
526
+ "mmlu_high_school_microeconomics": 0,
527
+ "mmlu_high_school_physics": 0,
528
+ "mmlu_high_school_psychology": 0,
529
+ "mmlu_high_school_statistics": 0,
530
+ "mmlu_high_school_us_history": 0,
531
+ "mmlu_high_school_world_history": 0,
532
+ "mmlu_human_aging": 0,
533
+ "mmlu_human_sexuality": 0,
534
+ "mmlu_humanities": 0,
535
+ "mmlu_international_law": 0,
536
+ "mmlu_jurisprudence": 0,
537
+ "mmlu_logical_fallacies": 0,
538
+ "mmlu_machine_learning": 0,
539
+ "mmlu_management": 0,
540
+ "mmlu_marketing": 0,
541
+ "mmlu_medical_genetics": 0,
542
+ "mmlu_miscellaneous": 0,
543
+ "mmlu_moral_disputes": 0,
544
+ "mmlu_moral_scenarios": 0,
545
+ "mmlu_nutrition": 0,
546
+ "mmlu_other": 0,
547
+ "mmlu_philosophy": 0,
548
+ "mmlu_prehistory": 0,
549
+ "mmlu_professional_accounting": 0,
550
+ "mmlu_professional_law": 0,
551
+ "mmlu_professional_medicine": 0,
552
+ "mmlu_professional_psychology": 0,
553
+ "mmlu_public_relations": 0,
554
+ "mmlu_security_studies": 0,
555
+ "mmlu_social_sciences": 0,
556
+ "mmlu_sociology": 0,
557
+ "mmlu_stem": 0,
558
+ "mmlu_us_foreign_policy": 0,
559
+ "mmlu_virology": 0,
560
+ "mmlu_world_religions": 0,
561
+ "openbookqa": 0,
562
+ "piqa": 0,
563
+ "truthfulqa_mc1": 0,
564
+ "truthfulqa_mc2": 0,
565
+ "winogrande": 0
566
+ },
567
+ "date": 1716937238.399211,
568
+ "config": {
569
+ "model": "hf",
570
+ "model_args": "pretrained=Ramikan-BR/tinyllama-coder-py-v12,trust_remote_code=True,dtype=float16,_commit_hash=abd0469",
571
+ "batch_size": 4,
572
+ "batch_sizes": [],
573
+ "device": "cuda",
574
+ "use_cache": null,
575
+ "limit": null,
576
+ "bootstrap_iters": 100000,
577
+ "gen_kwargs": null
578
+ }
579
+ }