File size: 136,318 Bytes
d19887c
 
 
 
9e889b2
d19887c
 
 
 
 
 
 
 
ad1cf36
9e889b2
 
 
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
b96f2eb
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
9e889b2
d19887c
9e889b2
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cd5b155
 
 
 
d19887c
 
ded8a93
d19887c
 
 
 
 
 
 
ded8a93
d19887c
 
 
 
 
 
 
ded8a93
d19887c
 
 
 
 
 
 
ded8a93
 
 
 
 
 
 
 
 
d19887c
 
 
 
 
 
 
ded8a93
d19887c
 
 
 
 
 
 
ded8a93
d19887c
 
 
 
 
 
 
ded8a93
d19887c
 
 
 
 
 
 
ded8a93
d19887c
 
 
 
 
 
 
ded8a93
d19887c
 
 
 
 
 
 
ded8a93
a8d3efc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d19887c
 
 
 
 
 
 
a8d3efc
d19887c
 
 
 
 
 
 
a8d3efc
d19887c
 
 
 
 
 
 
a8d3efc
d19887c
 
 
 
 
 
 
a8d3efc
d19887c
 
 
 
 
 
 
a8d3efc
d19887c
 
 
 
 
 
 
a8d3efc
d19887c
c5c298a
 
 
 
d19887c
 
a8d3efc
d19887c
a8d3efc
 
 
 
d19887c
 
a8d3efc
d19887c
 
 
 
 
 
 
a8d3efc
d19887c
 
 
 
 
 
 
a8d3efc
6116fcb
a8d3efc
 
 
 
6116fcb
 
a8d3efc
d19887c
 
 
 
 
 
 
a8d3efc
9380ccd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bdc7415
 
 
 
 
 
 
 
ad1cf36
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9e889b2
 
 
 
 
 
 
 
ded8a93
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
 
9e889b2
d19887c
b96f2eb
 
 
 
d19887c
 
9e889b2
d19887c
 
 
 
 
 
9e889b2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d19887c
 
 
 
 
 
cd5b155
ded8a93
 
 
 
 
 
 
 
 
 
a8d3efc
 
 
 
 
 
 
 
 
 
 
 
c5c298a
a8d3efc
 
 
 
 
9380ccd
 
bdc7415
ad1cf36
 
9e889b2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d19887c
 
cd5b155
ded8a93
 
 
 
 
 
 
 
 
 
a8d3efc
 
 
 
 
 
 
 
 
 
 
 
c5c298a
a8d3efc
 
 
 
 
9380ccd
 
bdc7415
ad1cf36
 
9e889b2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d19887c
 
9e889b2
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b96f2eb
 
 
 
d19887c
 
 
 
 
 
 
 
 
c5c298a
 
d19887c
 
 
 
 
b96f2eb
 
 
 
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c5c298a
 
 
 
 
 
 
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c5c298a
 
 
 
 
 
 
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9e889b2
d19887c
 
 
 
 
 
9e889b2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
2d59889
 
 
 
 
 
d19887c
 
 
d7a4441
 
d19887c
 
 
 
 
 
 
 
 
6116fcb
465de2b
6116fcb
 
d19887c
d7a4441
 
 
 
d19887c
 
 
 
9f640f3
d19887c
b96f2eb
9f09eda
3adcbdf
9380ccd
 
 
 
 
b96f2eb
 
 
 
 
6116fcb
2d59889
6116fcb
 
2d59889
 
 
 
9380ccd
 
 
 
d19887c
 
 
b96f2eb
 
 
 
 
 
 
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 145,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Defaulting to user installation because normal site-packages is not writeable\n",
      "Requirement already satisfied: pandas in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (2.2.0)\n",
      "Requirement already satisfied: numpy<2,>=1.22.4 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (1.26.1)\n",
      "Requirement already satisfied: python-dateutil>=2.8.2 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2.8.2)\n",
      "Requirement already satisfied: pytz>=2020.1 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2024.1)\n",
      "Requirement already satisfied: tzdata>=2022.7 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2024.1)\n",
      "Requirement already satisfied: six>=1.5 in /Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/site-packages (from python-dateutil>=2.8.2->pandas) (1.15.0)\n",
      "\u001b[33mWARNING: You are using pip version 21.2.4; however, version 24.0 is available.\n",
      "You should consider upgrading via the '/Library/Developer/CommandLineTools/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\n"
     ]
    }
   ],
   "source": [
    "!pip3 install pandas"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Get the filelist\n",
    "\n",
    "For the full results.json"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 146,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Found 5267 results.json files\n"
     ]
    }
   ],
   "source": [
    "import glob\n",
    "\n",
    "# Specify the path to the folder containing the results.json files\n",
    "folder_path = \"lm-eval-output\"\n",
    "\n",
    "# Use glob to find all the results.json files\n",
    "results_json_files = glob.glob(f\"{folder_path}/**/results.json\", recursive=True)\n",
    "\n",
    "# Show total number of results.json files found\n",
    "print(f\"Found {len(results_json_files)} results.json files\")\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Process all the results.json\n",
    "\n",
    "One file at a time"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 147,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Processed example:  {'name': 'mistralai/Mistral-7B-v0.1', 'config': {'dtype=bfloat16,trust_remote_code=True': {'confStr': 'dtype=bfloat16,trust_remote_code=True', 'confObj': {'dtype': 'bfloat16', 'trust_remote_code': 'True'}, 'results': {'cola': {'mcc,none': -0.05103445794224817, 'mcc_stderr,none': 0.03076801823137936, 'alias': 'cola'}, 'anli': {'acc,none': 0.3803125, 'acc_stderr,none': 0.015133650384246593, 'alias': 'anli'}, 'anli_r1': {'acc,none': 0.377, 'acc_stderr,none': 0.01533317012577988, 'alias': ' - anli_r1'}, 'anli_r2': {'acc,none': 0.376, 'acc_stderr,none': 0.015325105508898125, 'alias': ' - anli_r2'}, 'anli_r3': {'acc,none': 0.38666666666666666, 'acc_stderr,none': 0.014063941778353468, 'alias': ' - anli_r3'}, 'wsc': {'acc,none': 0.40384615384615385, 'acc_stderr,none': 0.04834688952654018, 'alias': ' - wsc'}, 'lambada_cloze': {'perplexity,none': 92.9621493847383, 'perplexity_stderr,none': 4.749192287461508, 'acc,none': 0.08354356685425965, 'acc_stderr,none': 0.017359537873426072, 'alias': 'lambada_cloze'}, 'lambada_openai_cloze_yaml': {'perplexity,none': 84.91463890370277, 'perplexity_stderr,none': 2.1929498791439825, 'acc,none': 0.04967979817581991, 'acc_stderr,none': 0.0030271710751734893, 'alias': ' - lambada_openai_cloze_yaml'}, 'lambada_standard_cloze_yaml': {'perplexity,none': 101.0096598657738, 'perplexity_stderr,none': 2.813806288232997, 'acc,none': 0.1174073355326994, 'acc_stderr,none': 0.004484766596365691, 'alias': ' - lambada_standard_cloze_yaml'}, 'glue': {'acc,none': 0.5149773701762745, 'acc_stderr,none': 0.0011586493887209115, 'f1,none': 0.3490504972495486, 'f1_stderr,none': 0.0013384442216884647, 'mcc,none': -0.04847021005996873, 'mcc_stderr,none': 0.030783455837743674, 'alias': 'glue'}, 'mnli': {'acc,none': 0.4542027508914926, 'acc_stderr,none': 0.005025942602094432, 'alias': 'mnli'}, 'mnli_mismatch': {'acc,none': 0.4631814483319772, 'acc_stderr,none': 0.005029102510704409, 'alias': 'mnli_mismatch'}, 'mrpc': {'acc,none': 0.6568627450980392, 'acc_stderr,none': 0.023532824020694156, 'f1,none': 0.7426470588235294, 'f1_stderr,none': 0.02115915180153455, 'alias': 'mrpc'}, 'qnli': {'acc,none': 0.49789492952590153, 'acc_stderr,none': 0.00676535059208955, 'alias': 'qnli'}, 'qqp': {'acc,none': 0.5387336136532278, 'acc_stderr,none': 0.0024792278452134536, 'f1,none': 0.3456720816813445, 'f1_stderr,none': 0.0036333557486569264, 'alias': 'qqp'}, 'rte': {'acc,none': 0.6750902527075813, 'acc_stderr,none': 0.028190822551170353, 'alias': 'rte'}, 'sst2': {'acc,none': 0.6708715596330275, 'acc_stderr,none': 0.01592184233279754, 'alias': 'sst2'}, 'wnli': {'acc,none': 0.5774647887323944, 'acc_stderr,none': 0.05903984205682581, 'alias': 'wnli'}, 'blimp': {'acc,none': 0.8289253731343283, 'acc_stderr,none': 0.1651542013152315, 'alias': ' - blimp'}, 'blimp_adjunct_island': {'acc,none': 0.901, 'acc_stderr,none': 0.009449248027662753, 'alias': '  - blimp_adjunct_island'}, 'blimp_anaphor_gender_agreement': {'acc,none': 0.99, 'acc_stderr,none': 0.003148000938676753, 'alias': '  - blimp_anaphor_gender_agreement'}, 'blimp_anaphor_number_agreement': {'acc,none': 0.996, 'acc_stderr,none': 0.0019969947390987295, 'alias': '  - blimp_anaphor_number_agreement'}, 'blimp_animate_subject_passive': {'acc,none': 0.813, 'acc_stderr,none': 0.012336254828074118, 'alias': '  - blimp_animate_subject_passive'}, 'blimp_animate_subject_trans': {'acc,none': 0.898, 'acc_stderr,none': 0.009575368801653886, 'alias': '  - blimp_animate_subject_trans'}, 'blimp_causative': {'acc,none': 0.758, 'acc_stderr,none': 0.013550631705555965, 'alias': '  - blimp_causative'}, 'blimp_complex_NP_island': {'acc,none': 0.573, 'acc_stderr,none': 0.01564978964446222, 'alias': '  - blimp_complex_NP_island'}, 'blimp_coordinate_structure_constraint_complex_left_branch': {'acc,none': 0.794, 'acc_stderr,none': 0.012795613612786548, 'alias': '  - blimp_coordinate_structure_constraint_complex_left_branch'}, 'blimp_coordinate_structure_constraint_object_extraction': {'acc,none': 0.878, 'acc_stderr,none': 0.010354864712936701, 'alias': '  - blimp_coordinate_structure_constraint_object_extraction'}, 'blimp_determiner_noun_agreement_1': {'acc,none': 0.997, 'acc_stderr,none': 0.0017303161543469276, 'alias': '  - blimp_determiner_noun_agreement_1'}, 'blimp_determiner_noun_agreement_2': {'acc,none': 0.99, 'acc_stderr,none': 0.00314800093867677, 'alias': '  - blimp_determiner_noun_agreement_2'}, 'blimp_determiner_noun_agreement_irregular_1': {'acc,none': 0.964, 'acc_stderr,none': 0.005893957816165549, 'alias': '  - blimp_determiner_noun_agreement_irregular_1'}, 'blimp_determiner_noun_agreement_irregular_2': {'acc,none': 0.956, 'acc_stderr,none': 0.00648892179842742, 'alias': '  - blimp_determiner_noun_agreement_irregular_2'}, 'blimp_determiner_noun_agreement_with_adj_2': {'acc,none': 0.962, 'acc_stderr,none': 0.006049181150584937, 'alias': '  - blimp_determiner_noun_agreement_with_adj_2'}, 'blimp_determiner_noun_agreement_with_adj_irregular_1': {'acc,none': 0.935, 'acc_stderr,none': 0.007799733061832013, 'alias': '  - blimp_determiner_noun_agreement_with_adj_irregular_1'}, 'blimp_determiner_noun_agreement_with_adj_irregular_2': {'acc,none': 0.939, 'acc_stderr,none': 0.007572076091557415, 'alias': '  - blimp_determiner_noun_agreement_with_adj_irregular_2'}, 'blimp_determiner_noun_agreement_with_adjective_1': {'acc,none': 0.982, 'acc_stderr,none': 0.004206387249611458, 'alias': '  - blimp_determiner_noun_agreement_with_adjective_1'}, 'blimp_distractor_agreement_relational_noun': {'acc,none': 0.923, 'acc_stderr,none': 0.00843458014024064, 'alias': '  - blimp_distractor_agreement_relational_noun'}, 'blimp_distractor_agreement_relative_clause': {'acc,none': 0.79, 'acc_stderr,none': 0.012886662332274534, 'alias': '  - blimp_distractor_agreement_relative_clause'}, 'blimp_drop_argument': {'acc,none': 0.753, 'acc_stderr,none': 0.013644675781314121, 'alias': '  - blimp_drop_argument'}, 'blimp_ellipsis_n_bar_1': {'acc,none': 0.811, 'acc_stderr,none': 0.01238678458811771, 'alias': '  - blimp_ellipsis_n_bar_1'}, 'blimp_ellipsis_n_bar_2': {'acc,none': 0.945, 'acc_stderr,none': 0.007212976294639237, 'alias': '  - blimp_ellipsis_n_bar_2'}, 'blimp_existential_there_object_raising': {'acc,none': 0.87, 'acc_stderr,none': 0.010640169792499368, 'alias': '  - blimp_existential_there_object_raising'}, 'blimp_existential_there_quantifiers_1': {'acc,none': 0.987, 'acc_stderr,none': 0.0035838308894036337, 'alias': '  - blimp_existential_there_quantifiers_1'}, 'blimp_existential_there_quantifiers_2': {'acc,none': 0.178, 'acc_stderr,none': 0.012102167676183587, 'alias': '  - blimp_existential_there_quantifiers_2'}, 'blimp_existential_there_subject_raising': {'acc,none': 0.906, 'acc_stderr,none': 0.009233052000787733, 'alias': '  - blimp_existential_there_subject_raising'}, 'blimp_expletive_it_object_raising': {'acc,none': 0.815, 'acc_stderr,none': 0.012285191326386675, 'alias': '  - blimp_expletive_it_object_raising'}, 'blimp_inchoative': {'acc,none': 0.649, 'acc_stderr,none': 0.015100563798316405, 'alias': '  - blimp_inchoative'}, 'blimp_intransitive': {'acc,none': 0.794, 'acc_stderr,none': 0.012795613612786555, 'alias': '  - blimp_intransitive'}, 'blimp_irregular_past_participle_adjectives': {'acc,none': 0.99, 'acc_stderr,none': 0.0031480009386767676, 'alias': '  - blimp_irregular_past_participle_adjectives'}, 'blimp_irregular_past_participle_verbs': {'acc,none': 0.928, 'acc_stderr,none': 0.008178195576218681, 'alias': '  - blimp_irregular_past_participle_verbs'}, 'blimp_irregular_plural_subject_verb_agreement_1': {'acc,none': 0.923, 'acc_stderr,none': 0.00843458014024065, 'alias': '  - blimp_irregular_plural_subject_verb_agreement_1'}, 'blimp_irregular_plural_subject_verb_agreement_2': {'acc,none': 0.921, 'acc_stderr,none': 0.008534156773333452, 'alias': '  - blimp_irregular_plural_subject_verb_agreement_2'}, 'blimp_left_branch_island_echo_question': {'acc,none': 0.549, 'acc_stderr,none': 0.01574315237958554, 'alias': '  - blimp_left_branch_island_echo_question'}, 'blimp_left_branch_island_simple_question': {'acc,none': 0.93, 'acc_stderr,none': 0.008072494358323508, 'alias': '  - blimp_left_branch_island_simple_question'}, 'blimp_matrix_question_npi_licensor_present': {'acc,none': 0.546, 'acc_stderr,none': 0.015752210388771847, 'alias': '  - blimp_matrix_question_npi_licensor_present'}, 'blimp_npi_present_1': {'acc,none': 0.638, 'acc_stderr,none': 0.0152048409129195, 'alias': '  - blimp_npi_present_1'}, 'blimp_npi_present_2': {'acc,none': 0.629, 'acc_stderr,none': 0.015283736211823188, 'alias': '  - blimp_npi_present_2'}, 'blimp_only_npi_licensor_present': {'acc,none': 0.991, 'acc_stderr,none': 0.002987963843142655, 'alias': '  - blimp_only_npi_licensor_present'}, 'blimp_only_npi_scope': {'acc,none': 0.754, 'acc_stderr,none': 0.013626065817750641, 'alias': '  - blimp_only_npi_scope'}, 'blimp_passive_1': {'acc,none': 0.901, 'acc_stderr,none': 0.009449248027662742, 'alias': '  - blimp_passive_1'}, 'blimp_passive_2': {'acc,none': 0.904, 'acc_stderr,none': 0.00932045443478322, 'alias': '  - blimp_passive_2'}, 'blimp_principle_A_c_command': {'acc,none': 0.812, 'acc_stderr,none': 0.01236158601510377, 'alias': '  - blimp_principle_A_c_command'}, 'blimp_principle_A_case_1': {'acc,none': 1.0, 'acc_stderr,none': 0.0, 'alias': '  - blimp_principle_A_case_1'}, 'blimp_principle_A_case_2': {'acc,none': 0.94, 'acc_stderr,none': 0.007513751157474927, 'alias': '  - blimp_principle_A_case_2'}, 'blimp_principle_A_domain_1': {'acc,none': 0.999, 'acc_stderr,none': 0.0010000000000000132, 'alias': '  - blimp_principle_A_domain_1'}, 'blimp_principle_A_domain_2': {'acc,none': 0.869, 'acc_stderr,none': 0.01067487484483796, 'alias': '  - blimp_principle_A_domain_2'}, 'blimp_principle_A_domain_3': {'acc,none': 0.634, 'acc_stderr,none': 0.015240612726405756, 'alias': '  - blimp_principle_A_domain_3'}, 'blimp_principle_A_reconstruction': {'acc,none': 0.48, 'acc_stderr,none': 0.01580663942303517, 'alias': '  - blimp_principle_A_reconstruction'}, 'blimp_regular_plural_subject_verb_agreement_1': {'acc,none': 0.964, 'acc_stderr,none': 0.005893957816165545, 'alias': '  - blimp_regular_plural_subject_verb_agreement_1'}, 'blimp_regular_plural_subject_verb_agreement_2': {'acc,none': 0.879, 'acc_stderr,none': 0.010318210380946088, 'alias': '  - blimp_regular_plural_subject_verb_agreement_2'}, 'blimp_sentential_negation_npi_licensor_present': {'acc,none': 0.996, 'acc_stderr,none': 0.0019969947390987295, 'alias': '  - blimp_sentential_negation_npi_licensor_present'}, 'blimp_sentential_negation_npi_scope': {'acc,none': 0.751, 'acc_stderr,none': 0.01368160027870233, 'alias': '  - blimp_sentential_negation_npi_scope'}, 'blimp_sentential_subject_island': {'acc,none': 0.502, 'acc_stderr,none': 0.015819173374302702, 'alias': '  - blimp_sentential_subject_island'}, 'blimp_superlative_quantifiers_1': {'acc,none': 0.955, 'acc_stderr,none': 0.0065588122414061215, 'alias': '  - blimp_superlative_quantifiers_1'}, 'blimp_superlative_quantifiers_2': {'acc,none': 0.963, 'acc_stderr,none': 0.005972157622389623, 'alias': '  - blimp_superlative_quantifiers_2'}, 'blimp_tough_vs_raising_1': {'acc,none': 0.594, 'acc_stderr,none': 0.015537226438634595, 'alias': '  - blimp_tough_vs_raising_1'}, 'blimp_tough_vs_raising_2': {'acc,none': 0.885, 'acc_stderr,none': 0.01009340759490462, 'alias': '  - blimp_tough_vs_raising_2'}, 'blimp_transitive': {'acc,none': 0.886, 'acc_stderr,none': 0.010055103435823335, 'alias': '  - blimp_transitive'}, 'blimp_wh_island': {'acc,none': 0.744, 'acc_stderr,none': 0.013807775152234195, 'alias': '  - blimp_wh_island'}, 'blimp_wh_questions_object_gap': {'acc,none': 0.852, 'acc_stderr,none': 0.011234866364235247, 'alias': '  - blimp_wh_questions_object_gap'}, 'blimp_wh_questions_subject_gap': {'acc,none': 0.922, 'acc_stderr,none': 0.008484573530118581, 'alias': '  - blimp_wh_questions_subject_gap'}, 'blimp_wh_questions_subject_gap_long_distance': {'acc,none': 0.92, 'acc_stderr,none': 0.008583336977753653, 'alias': '  - blimp_wh_questions_subject_gap_long_distance'}, 'blimp_wh_vs_that_no_gap': {'acc,none': 0.977, 'acc_stderr,none': 0.004742730594656796, 'alias': '  - blimp_wh_vs_that_no_gap'}, 'blimp_wh_vs_that_no_gap_long_distance': {'acc,none': 0.97, 'acc_stderr,none': 0.005397140829099193, 'alias': '  - blimp_wh_vs_that_no_gap_long_distance'}, 'blimp_wh_vs_that_with_gap': {'acc,none': 0.364, 'acc_stderr,none': 0.01522286884052202, 'alias': '  - blimp_wh_vs_that_with_gap'}, 'blimp_wh_vs_that_with_gap_long_distance': {'acc,none': 0.329, 'acc_stderr,none': 0.014865395385928355, 'alias': '  - blimp_wh_vs_that_with_gap_long_distance'}, 'sciq': {'acc,none': 0.959, 'acc_stderr,none': 0.006273624021118743, 'acc_norm,none': 0.939, 'acc_norm_stderr,none': 0.007572076091557429, 'alias': ' - sciq'}, 'xcopa': {'acc,none': 0.5594545454545454, 'acc_stderr,none': 0.05387910421610255, 'alias': 'xcopa'}, 'xcopa_et': {'acc,none': 0.472, 'acc_stderr,none': 0.022347949832668093, 'alias': ' - xcopa_et'}, 'xcopa_ht': {'acc,none': 0.516, 'acc_stderr,none': 0.022371610982580396, 'alias': ' - xcopa_ht'}, 'xcopa_id': {'acc,none': 0.584, 'acc_stderr,none': 0.022064943313928866, 'alias': ' - xcopa_id'}, 'xcopa_it': {'acc,none': 0.66, 'acc_stderr,none': 0.021206117013673066, 'alias': ' - xcopa_it'}, 'xcopa_qu': {'acc,none': 0.484, 'acc_stderr,none': 0.022371610982580396, 'alias': ' - xcopa_qu'}, 'xcopa_sw': {'acc,none': 0.518, 'acc_stderr,none': 0.02236856511738799, 'alias': ' - xcopa_sw'}, 'xcopa_ta': {'acc,none': 0.546, 'acc_stderr,none': 0.022288147591176945, 'alias': ' - xcopa_ta'}, 'xcopa_th': {'acc,none': 0.566, 'acc_stderr,none': 0.02218721580302901, 'alias': ' - xcopa_th'}, 'xcopa_tr': {'acc,none': 0.57, 'acc_stderr,none': 0.022162634426652835, 'alias': ' - xcopa_tr'}, 'xcopa_vi': {'acc,none': 0.588, 'acc_stderr,none': 0.022033677993740862, 'alias': ' - xcopa_vi'}, 'xcopa_zh': {'acc,none': 0.65, 'acc_stderr,none': 0.021352091786223104, 'alias': ' - xcopa_zh'}, 'crows_pairs': {'likelihood_diff,none': 4.039303237651172, 'likelihood_diff_stderr,none': 0.4554088930609172, 'pct_stereotype,none': 0.5945140131186643, 'pct_stereotype_stderr,none': 0.08619149455497793, 'alias': 'crows_pairs'}, 'crows_pairs_english': {'likelihood_diff,none': 4.172200931419414, 'likelihood_diff_stderr,none': 0.09444066856881643, 'pct_stereotype,none': 0.6583184257602862, 'pct_stereotype_stderr,none': 0.0115848863578411, 'alias': ' - crows_pairs_english'}, 'crows_pairs_english_age': {'likelihood_diff,none': 3.81760980794718, 'likelihood_diff_stderr,none': 0.39572701594612664, 'pct_stereotype,none': 0.7032967032967034, 'pct_stereotype_stderr,none': 0.04815143362682777, 'alias': ' - crows_pairs_english_age'}, 'crows_pairs_english_autre': {'likelihood_diff,none': 8.978192589499734, 'likelihood_diff_stderr,none': 2.383266706466439, 'pct_stereotype,none': 0.8181818181818182, 'pct_stereotype_stderr,none': 0.12196734422726126, 'alias': ' - crows_pairs_english_autre'}, 'crows_pairs_english_disability': {'likelihood_diff,none': 6.123607283372145, 'likelihood_diff_stderr,none': 0.6269717764216254, 'pct_stereotype,none': 0.7538461538461538, 'pct_stereotype_stderr,none': 0.05384615384615383, 'alias': ' - crows_pairs_english_disability'}, 'crows_pairs_english_gender': {'likelihood_diff,none': 3.593177890777588, 'likelihood_diff_stderr,none': 0.1891835945318628, 'pct_stereotype,none': 0.565625, 'pct_stereotype_stderr,none': 0.02775245248136475, 'alias': ' - crows_pairs_english_gender'}, 'crows_pairs_english_nationality': {'likelihood_diff,none': 3.503406215597082, 'likelihood_diff_stderr,none': 0.2223140250179109, 'pct_stereotype,none': 0.5648148148148148, 'pct_stereotype_stderr,none': 0.03381200005643525, 'alias': ' - crows_pairs_english_nationality'}, 'crows_pairs_english_physical_appearance': {'likelihood_diff,none': 4.352519141303168, 'likelihood_diff_stderr,none': 0.429502683647509, 'pct_stereotype,none': 0.7777777777777778, 'pct_stereotype_stderr,none': 0.04933922619854289, 'alias': ' - crows_pairs_english_physical_appearance'}, 'crows_pairs_english_race_color': {'likelihood_diff,none': 4.192574343343419, 'likelihood_diff_stderr,none': 0.18220038540379538, 'pct_stereotype,none': 0.6220472440944882, 'pct_stereotype_stderr,none': 0.02153408701954117, 'alias': ' - crows_pairs_english_race_color'}, 'crows_pairs_english_religion': {'likelihood_diff,none': 4.5383266930107595, 'likelihood_diff_stderr,none': 0.34676935247597424, 'pct_stereotype,none': 0.8198198198198198, 'pct_stereotype_stderr,none': 0.03664513893725976, 'alias': ' - crows_pairs_english_religion'}, 'crows_pairs_english_sexual_orientation': {'likelihood_diff,none': 4.885875209685294, 'likelihood_diff_stderr,none': 0.40901674280141453, 'pct_stereotype,none': 0.8817204301075269, 'pct_stereotype_stderr,none': 0.033668704543479824, 'alias': ' - crows_pairs_english_sexual_orientation'}, 'crows_pairs_english_socioeconomic': {'likelihood_diff,none': 4.445691620676141, 'likelihood_diff_stderr,none': 0.25789243822181585, 'pct_stereotype,none': 0.7052631578947368, 'pct_stereotype_stderr,none': 0.03316361842984287, 'alias': ' - crows_pairs_english_socioeconomic'}, 'crows_pairs_french': {'likelihood_diff,none': 3.9064055438829306, 'likelihood_diff_stderr,none': 0.09471848282380547, 'pct_stereotype,none': 0.5307096004770423, 'pct_stereotype_stderr,none': 0.012190241226841262, 'alias': ' - crows_pairs_french'}, 'crows_pairs_french_age': {'likelihood_diff,none': 3.0736910078260635, 'likelihood_diff_stderr,none': 0.3236843030438557, 'pct_stereotype,none': 0.5222222222222223, 'pct_stereotype_stderr,none': 0.05294752255076824, 'alias': ' - crows_pairs_french_age'}, 'crows_pairs_french_autre': {'likelihood_diff,none': 3.385772411639874, 'likelihood_diff_stderr,none': 0.9269473104034913, 'pct_stereotype,none': 0.6923076923076923, 'pct_stereotype_stderr,none': 0.13323467750529824, 'alias': ' - crows_pairs_french_autre'}, 'crows_pairs_french_disability': {'likelihood_diff,none': 4.82528576706395, 'likelihood_diff_stderr,none': 0.432435922942141, 'pct_stereotype,none': 0.6363636363636364, 'pct_stereotype_stderr,none': 0.05966637484671758, 'alias': ' - crows_pairs_french_disability'}, 'crows_pairs_french_gender': {'likelihood_diff,none': 3.4997918078461168, 'likelihood_diff_stderr,none': 0.1914324136821373, 'pct_stereotype,none': 0.5171339563862928, 'pct_stereotype_stderr,none': 0.027934433698537306, 'alias': ' - crows_pairs_french_gender'}, 'crows_pairs_french_nationality': {'likelihood_diff,none': 4.111823522997468, 'likelihood_diff_stderr,none': 0.2296054767953655, 'pct_stereotype,none': 0.3675889328063241, 'pct_stereotype_stderr,none': 0.030372509322709233, 'alias': ' - crows_pairs_french_nationality'}, 'crows_pairs_french_physical_appearance': {'likelihood_diff,none': 3.6255602306789823, 'likelihood_diff_stderr,none': 0.43425524662184634, 'pct_stereotype,none': 0.5972222222222222, 'pct_stereotype_stderr,none': 0.05820650942569532, 'alias': ' - crows_pairs_french_physical_appearance'}, 'crows_pairs_french_race_color': {'likelihood_diff,none': 4.240377683224885, 'likelihood_diff_stderr,none': 0.2098809816569495, 'pct_stereotype,none': 0.4652173913043478, 'pct_stereotype_stderr,none': 0.023281462893244318, 'alias': ' - crows_pairs_french_race_color'}, 'crows_pairs_french_religion': {'likelihood_diff,none': 3.6430626578952956, 'likelihood_diff_stderr,none': 0.32613230548605726, 'pct_stereotype,none': 0.7130434782608696, 'pct_stereotype_stderr,none': 0.042365626207479204, 'alias': ' - crows_pairs_french_religion'}, 'crows_pairs_french_sexual_orientation': {'likelihood_diff,none': 3.6517183492471883, 'likelihood_diff_stderr,none': 0.3279010024708823, 'pct_stereotype,none': 0.7252747252747253, 'pct_stereotype_stderr,none': 0.047052133987784385, 'alias': ' - crows_pairs_french_sexual_orientation'}, 'crows_pairs_french_socioeconomic': {'likelihood_diff,none': 4.006779777760408, 'likelihood_diff_stderr,none': 0.30335267862395343, 'pct_stereotype,none': 0.6530612244897959, 'pct_stereotype_stderr,none': 0.03408678678944596, 'alias': ' - crows_pairs_french_socioeconomic'}, 'logieval': {'exact_match,get-answer': 0.494910941475827, 'exact_match_stderr,get-answer': 0.012614191372690004, 'alias': 'logieval'}, 'xnli': {'acc,none': 0.43052208835341366, 'acc_stderr,none': 0.05866526647573086, 'alias': 'xnli'}, 'xnli_ar': {'acc,none': 0.3337349397590361, 'acc_stderr,none': 0.009451743112667055, 'alias': ' - xnli_ar'}, 'xnli_bg': {'acc,none': 0.4570281124497992, 'acc_stderr,none': 0.009984991084561275, 'alias': ' - xnli_bg'}, 'xnli_de': {'acc,none': 0.4971887550200803, 'acc_stderr,none': 0.010021914455122174, 'alias': ' - xnli_de'}, 'xnli_el': {'acc,none': 0.41124497991967873, 'acc_stderr,none': 0.00986291222354463, 'alias': ' - xnli_el'}, 'xnli_en': {'acc,none': 0.5662650602409639, 'acc_stderr,none': 0.009933667945702098, 'alias': ' - xnli_en'}, 'xnli_es': {'acc,none': 0.457429718875502, 'acc_stderr,none': 0.00998568222022746, 'alias': ' - xnli_es'}, 'xnli_fr': {'acc,none': 0.5088353413654618, 'acc_stderr,none': 0.010020508033762627, 'alias': ' - xnli_fr'}, 'xnli_hi': {'acc,none': 0.43012048192771085, 'acc_stderr,none': 0.00992371167540806, 'alias': ' - xnli_hi'}, 'xnli_ru': {'acc,none': 0.4975903614457831, 'acc_stderr,none': 0.010021956483068082, 'alias': ' - xnli_ru'}, 'xnli_sw': {'acc,none': 0.35542168674698793, 'acc_stderr,none': 0.009593947957927139, 'alias': ' - xnli_sw'}, 'xnli_th': {'acc,none': 0.3887550200803213, 'acc_stderr,none': 0.009770869423441493, 'alias': ' - xnli_th'}, 'xnli_tr': {'acc,none': 0.43092369477911646, 'acc_stderr,none': 0.009925970741520641, 'alias': ' - xnli_tr'}, 'xnli_ur': {'acc,none': 0.3385542168674699, 'acc_stderr,none': 0.00948525020851688, 'alias': ' - xnli_ur'}, 'xnli_vi': {'acc,none': 0.40923694779116465, 'acc_stderr,none': 0.009855567414480236, 'alias': ' - xnli_vi'}, 'xnli_zh': {'acc,none': 0.3755020080321285, 'acc_stderr,none': 0.009706422844379824, 'alias': ' - xnli_zh'}, 'truthfulqa': {'acc,none': 0.35372429452780935, 'acc_stderr,none': 0.0015289998228574559, 'bleu_max,none': 30.506553066296135, 'bleu_max_stderr,none': 0.8545048499620589, 'bleu_acc,none': 0.41370869033047736, 'bleu_acc_stderr,none': 0.0172408618120998, 'bleu_diff,none': -1.4792658300937298, 'bleu_diff_stderr,none': 1.0365996246291096, 'rouge1_max,none': 55.752927992652076, 'rouge1_max_stderr,none': 0.9304517781051922, 'rouge1_acc,none': 0.397796817625459, 'rouge1_acc_stderr,none': 0.017133934248559652, 'rouge1_diff,none': -1.916659079286129, 'rouge1_diff_stderr,none': 1.2644440329641677, 'rouge2_max,none': 40.96291082037781, 'rouge2_max_stderr,none': 1.1007195429525476, 'rouge2_acc,none': 0.3537331701346389, 'rouge2_acc_stderr,none': 0.016737814358846147, 'rouge2_diff,none': -2.594294632926603, 'rouge2_diff_stderr,none': 1.4182491490828897, 'rougeL_max,none': 53.07319351290984, 'rougeL_max_stderr,none': 0.9465416042598798, 'rougeL_acc,none': 0.40269277845777235, 'rougeL_acc_stderr,none': 0.017168830935187215, 'rougeL_diff,none': -2.232349573503825, 'rougeL_diff_stderr,none': 1.2801381361084667, 'alias': 'truthfulqa'}, 'truthfulqa_gen': {'bleu_max,none': 30.506553066296135, 'bleu_max_stderr,none': 0.8545048499620589, 'bleu_acc,none': 0.41370869033047736, 'bleu_acc_stderr,none': 0.0172408618120998, 'bleu_diff,none': -1.4792658300937298, 'bleu_diff_stderr,none': 1.0365996246291096, 'rouge1_max,none': 55.752927992652076, 'rouge1_max_stderr,none': 0.9304517781051922, 'rouge1_acc,none': 0.397796817625459, 'rouge1_acc_stderr,none': 0.017133934248559652, 'rouge1_diff,none': -1.916659079286129, 'rouge1_diff_stderr,none': 1.2644440329641677, 'rouge2_max,none': 40.96291082037781, 'rouge2_max_stderr,none': 1.1007195429525476, 'rouge2_acc,none': 0.3537331701346389, 'rouge2_acc_stderr,none': 0.016737814358846147, 'rouge2_diff,none': -2.594294632926603, 'rouge2_diff_stderr,none': 1.4182491490828897, 'rougeL_max,none': 53.07319351290984, 'rougeL_max_stderr,none': 0.9465416042598798, 'rougeL_acc,none': 0.40269277845777235, 'rougeL_acc_stderr,none': 0.017168830935187215, 'rougeL_diff,none': -2.232349573503825, 'rougeL_diff_stderr,none': 1.2801381361084667, 'alias': ' - truthfulqa_gen'}, 'truthfulqa_mc1': {'acc,none': 0.28151774785801714, 'acc_stderr,none': 0.01574402724825605, 'alias': ' - truthfulqa_mc1'}, 'truthfulqa_mc2': {'acc,none': 0.4259308411976015, 'acc_stderr,none': 0.01420956064029871, 'alias': ' - truthfulqa_mc2'}, 'multirc': {'acc,none': 0.5691006600660066, 'acc_stderr,none': 0.007112887654223405, 'alias': 'multirc'}, 'ceval-valid': {'acc,none': 0.40713224368499257, 'acc_stderr,none': 0.14694388399894986, 'acc_norm,none': 0.40713224368499257, 'acc_norm_stderr,none': 0.14694388399894986, 'alias': 'ceval-valid'}, 'ceval-valid_accountant': {'acc,none': 0.30612244897959184, 'acc_stderr,none': 0.066522473522476, 'acc_norm,none': 0.30612244897959184, 'acc_norm_stderr,none': 0.066522473522476, 'alias': ' - ceval-valid_accountant'}, 'ceval-valid_advanced_mathematics': {'acc,none': 0.42105263157894735, 'acc_stderr,none': 0.11637279966159299, 'acc_norm,none': 0.42105263157894735, 'acc_norm_stderr,none': 0.11637279966159299, 'alias': ' - ceval-valid_advanced_mathematics'}, 'ceval-valid_art_studies': {'acc,none': 0.30303030303030304, 'acc_stderr,none': 0.08124094920275461, 'acc_norm,none': 0.30303030303030304, 'acc_norm_stderr,none': 0.08124094920275461, 'alias': ' - ceval-valid_art_studies'}, 'ceval-valid_basic_medicine': {'acc,none': 0.5263157894736842, 'acc_stderr,none': 0.11768778828946262, 'acc_norm,none': 0.5263157894736842, 'acc_norm_stderr,none': 0.11768778828946262, 'alias': ' - ceval-valid_basic_medicine'}, 'ceval-valid_business_administration': {'acc,none': 0.36363636363636365, 'acc_stderr,none': 0.08503766788122594, 'acc_norm,none': 0.36363636363636365, 'acc_norm_stderr,none': 0.08503766788122594, 'alias': ' - ceval-valid_business_administration'}, 'ceval-valid_chinese_language_and_literature': {'acc,none': 0.2608695652173913, 'acc_stderr,none': 0.09361833424764437, 'acc_norm,none': 0.2608695652173913, 'acc_norm_stderr,none': 0.09361833424764437, 'alias': ' - ceval-valid_chinese_language_and_literature'}, 'ceval-valid_civil_servant': {'acc,none': 0.3404255319148936, 'acc_stderr,none': 0.06986570800554745, 'acc_norm,none': 0.3404255319148936, 'acc_norm_stderr,none': 0.06986570800554745, 'alias': ' - ceval-valid_civil_servant'}, 'ceval-valid_clinical_medicine': {'acc,none': 0.3181818181818182, 'acc_stderr,none': 0.10163945352271771, 'acc_norm,none': 0.3181818181818182, 'acc_norm_stderr,none': 0.10163945352271771, 'alias': ' - ceval-valid_clinical_medicine'}, 'ceval-valid_college_chemistry': {'acc,none': 0.375, 'acc_stderr,none': 0.10094660663590604, 'acc_norm,none': 0.375, 'acc_norm_stderr,none': 0.10094660663590604, 'alias': ' - ceval-valid_college_chemistry'}, 'ceval-valid_college_economics': {'acc,none': 0.36363636363636365, 'acc_stderr,none': 0.06546202725664503, 'acc_norm,none': 0.36363636363636365, 'acc_norm_stderr,none': 0.06546202725664503, 'alias': ' - ceval-valid_college_economics'}, 'ceval-valid_college_physics': {'acc,none': 0.3684210526315789, 'acc_stderr,none': 0.1136972052352256, 'acc_norm,none': 0.3684210526315789, 'acc_norm_stderr,none': 0.1136972052352256, 'alias': ' - ceval-valid_college_physics'}, 'ceval-valid_college_programming': {'acc,none': 0.5675675675675675, 'acc_stderr,none': 0.08256893144064577, 'acc_norm,none': 0.5675675675675675, 'acc_norm_stderr,none': 0.08256893144064577, 'alias': ' - ceval-valid_college_programming'}, 'ceval-valid_computer_architecture': {'acc,none': 0.5238095238095238, 'acc_stderr,none': 0.11167656571008164, 'acc_norm,none': 0.5238095238095238, 'acc_norm_stderr,none': 0.11167656571008164, 'alias': ' - ceval-valid_computer_architecture'}, 'ceval-valid_computer_network': {'acc,none': 0.5263157894736842, 'acc_stderr,none': 0.1176877882894626, 'acc_norm,none': 0.5263157894736842, 'acc_norm_stderr,none': 0.1176877882894626, 'alias': ' - ceval-valid_computer_network'}, 'ceval-valid_discrete_mathematics': {'acc,none': 0.125, 'acc_stderr,none': 0.08539125638299665, 'acc_norm,none': 0.125, 'acc_norm_stderr,none': 0.08539125638299665, 'alias': ' - ceval-valid_discrete_mathematics'}, 'ceval-valid_education_science': {'acc,none': 0.4827586206896552, 'acc_stderr,none': 0.09443492370778725, 'acc_norm,none': 0.4827586206896552, 'acc_norm_stderr,none': 0.09443492370778725, 'alias': ' - ceval-valid_education_science'}, 'ceval-valid_electrical_engineer': {'acc,none': 0.3783783783783784, 'acc_stderr,none': 0.08083044344561426, 'acc_norm,none': 0.3783783783783784, 'acc_norm_stderr,none': 0.08083044344561426, 'alias': ' - ceval-valid_electrical_engineer'}, 'ceval-valid_environmental_impact_assessment_engineer': {'acc,none': 0.41935483870967744, 'acc_stderr,none': 0.0900918712501222, 'acc_norm,none': 0.41935483870967744, 'acc_norm_stderr,none': 0.0900918712501222, 'alias': ' - ceval-valid_environmental_impact_assessment_engineer'}, 'ceval-valid_fire_engineer': {'acc,none': 0.5161290322580645, 'acc_stderr,none': 0.09123958466923197, 'acc_norm,none': 0.5161290322580645, 'acc_norm_stderr,none': 0.09123958466923197, 'alias': ' - ceval-valid_fire_engineer'}, 'ceval-valid_high_school_biology': {'acc,none': 0.3684210526315789, 'acc_stderr,none': 0.11369720523522558, 'acc_norm,none': 0.3684210526315789, 'acc_norm_stderr,none': 0.11369720523522558, 'alias': ' - ceval-valid_high_school_biology'}, 'ceval-valid_high_school_chemistry': {'acc,none': 0.47368421052631576, 'acc_stderr,none': 0.1176877882894626, 'acc_norm,none': 0.47368421052631576, 'acc_norm_stderr,none': 0.1176877882894626, 'alias': ' - ceval-valid_high_school_chemistry'}, 'ceval-valid_high_school_chinese': {'acc,none': 0.21052631578947367, 'acc_stderr,none': 0.09609167675529229, 'acc_norm,none': 0.21052631578947367, 'acc_norm_stderr,none': 0.09609167675529229, 'alias': ' - ceval-valid_high_school_chinese'}, 'ceval-valid_high_school_geography': {'acc,none': 0.42105263157894735, 'acc_stderr,none': 0.11637279966159299, 'acc_norm,none': 0.42105263157894735, 'acc_norm_stderr,none': 0.11637279966159299, 'alias': ' - ceval-valid_high_school_geography'}, 'ceval-valid_high_school_history': {'acc,none': 0.7, 'acc_stderr,none': 0.10513149660756935, 'acc_norm,none': 0.7, 'acc_norm_stderr,none': 0.10513149660756935, 'alias': ' - ceval-valid_high_school_history'}, 'ceval-valid_high_school_mathematics': {'acc,none': 0.05555555555555555, 'acc_stderr,none': 0.05555555555555556, 'acc_norm,none': 0.05555555555555555, 'acc_norm_stderr,none': 0.05555555555555556, 'alias': ' - ceval-valid_high_school_mathematics'}, 'ceval-valid_high_school_physics': {'acc,none': 0.2631578947368421, 'acc_stderr,none': 0.10379087338771256, 'acc_norm,none': 0.2631578947368421, 'acc_norm_stderr,none': 0.10379087338771256, 'alias': ' - ceval-valid_high_school_physics'}, 'ceval-valid_high_school_politics': {'acc,none': 0.631578947368421, 'acc_stderr,none': 0.11369720523522563, 'acc_norm,none': 0.631578947368421, 'acc_norm_stderr,none': 0.11369720523522563, 'alias': ' - ceval-valid_high_school_politics'}, 'ceval-valid_ideological_and_moral_cultivation': {'acc,none': 0.5789473684210527, 'acc_stderr,none': 0.11637279966159299, 'acc_norm,none': 0.5789473684210527, 'acc_norm_stderr,none': 0.11637279966159299, 'alias': ' - ceval-valid_ideological_and_moral_cultivation'}, 'ceval-valid_law': {'acc,none': 0.2916666666666667, 'acc_stderr,none': 0.09477598811252413, 'acc_norm,none': 0.2916666666666667, 'acc_norm_stderr,none': 0.09477598811252413, 'alias': ' - ceval-valid_law'}, 'ceval-valid_legal_professional': {'acc,none': 0.391304347826087, 'acc_stderr,none': 0.10405096111532161, 'acc_norm,none': 0.391304347826087, 'acc_norm_stderr,none': 0.10405096111532161, 'alias': ' - ceval-valid_legal_professional'}, 'ceval-valid_logic': {'acc,none': 0.45454545454545453, 'acc_stderr,none': 0.10865714630312667, 'acc_norm,none': 0.45454545454545453, 'acc_norm_stderr,none': 0.10865714630312667, 'alias': ' - ceval-valid_logic'}, 'ceval-valid_mao_zedong_thought': {'acc,none': 0.4583333333333333, 'acc_stderr,none': 0.10389457216622949, 'acc_norm,none': 0.4583333333333333, 'acc_norm_stderr,none': 0.10389457216622949, 'alias': ' - ceval-valid_mao_zedong_thought'}, 'ceval-valid_marxism': {'acc,none': 0.3684210526315789, 'acc_stderr,none': 0.11369720523522558, 'acc_norm,none': 0.3684210526315789, 'acc_norm_stderr,none': 0.11369720523522558, 'alias': ' - ceval-valid_marxism'}, 'ceval-valid_metrology_engineer': {'acc,none': 0.5416666666666666, 'acc_stderr,none': 0.10389457216622949, 'acc_norm,none': 0.5416666666666666, 'acc_norm_stderr,none': 0.10389457216622949, 'alias': ' - ceval-valid_metrology_engineer'}, 'ceval-valid_middle_school_biology': {'acc,none': 0.6666666666666666, 'acc_stderr,none': 0.10540925533894598, 'acc_norm,none': 0.6666666666666666, 'acc_norm_stderr,none': 0.10540925533894598, 'alias': ' - ceval-valid_middle_school_biology'}, 'ceval-valid_middle_school_chemistry': {'acc,none': 0.25, 'acc_stderr,none': 0.09933992677987828, 'acc_norm,none': 0.25, 'acc_norm_stderr,none': 0.09933992677987828, 'alias': ' - ceval-valid_middle_school_chemistry'}, 'ceval-valid_middle_school_geography': {'acc,none': 0.3333333333333333, 'acc_stderr,none': 0.14213381090374033, 'acc_norm,none': 0.3333333333333333, 'acc_norm_stderr,none': 0.14213381090374033, 'alias': ' - ceval-valid_middle_school_geography'}, 'ceval-valid_middle_school_history': {'acc,none': 0.36363636363636365, 'acc_stderr,none': 0.10497277621629558, 'acc_norm,none': 0.36363636363636365, 'acc_norm_stderr,none': 0.10497277621629558, 'alias': ' - ceval-valid_middle_school_history'}, 'ceval-valid_middle_school_mathematics': {'acc,none': 0.21052631578947367, 'acc_stderr,none': 0.0960916767552923, 'acc_norm,none': 0.21052631578947367, 'acc_norm_stderr,none': 0.0960916767552923, 'alias': ' - ceval-valid_middle_school_mathematics'}, 'ceval-valid_middle_school_physics': {'acc,none': 0.5263157894736842, 'acc_stderr,none': 0.1176877882894626, 'acc_norm,none': 0.5263157894736842, 'acc_norm_stderr,none': 0.1176877882894626, 'alias': ' - ceval-valid_middle_school_physics'}, 'ceval-valid_middle_school_politics': {'acc,none': 0.47619047619047616, 'acc_stderr,none': 0.11167656571008164, 'acc_norm,none': 0.47619047619047616, 'acc_norm_stderr,none': 0.11167656571008164, 'alias': ' - ceval-valid_middle_school_politics'}, 'ceval-valid_modern_chinese_history': {'acc,none': 0.43478260869565216, 'acc_stderr,none': 0.10568965974008647, 'acc_norm,none': 0.43478260869565216, 'acc_norm_stderr,none': 0.10568965974008647, 'alias': ' - ceval-valid_modern_chinese_history'}, 'ceval-valid_operating_system': {'acc,none': 0.3684210526315789, 'acc_stderr,none': 0.11369720523522558, 'acc_norm,none': 0.3684210526315789, 'acc_norm_stderr,none': 0.11369720523522558, 'alias': ' - ceval-valid_operating_system'}, 'ceval-valid_physician': {'acc,none': 0.40816326530612246, 'acc_stderr,none': 0.07094099868916398, 'acc_norm,none': 0.40816326530612246, 'acc_norm_stderr,none': 0.07094099868916398, 'alias': ' - ceval-valid_physician'}, 'ceval-valid_plant_protection': {'acc,none': 0.45454545454545453, 'acc_stderr,none': 0.10865714630312667, 'acc_norm,none': 0.45454545454545453, 'acc_norm_stderr,none': 0.10865714630312667, 'alias': ' - ceval-valid_plant_protection'}, 'ceval-valid_probability_and_statistics': {'acc,none': 0.2777777777777778, 'acc_stderr,none': 0.1086324845659782, 'acc_norm,none': 0.2777777777777778, 'acc_norm_stderr,none': 0.1086324845659782, 'alias': ' - ceval-valid_probability_and_statistics'}, 'ceval-valid_professional_tour_guide': {'acc,none': 0.3793103448275862, 'acc_stderr,none': 0.09169709590633637, 'acc_norm,none': 0.3793103448275862, 'acc_norm_stderr,none': 0.09169709590633637, 'alias': ' - ceval-valid_professional_tour_guide'}, 'ceval-valid_sports_science': {'acc,none': 0.2631578947368421, 'acc_stderr,none': 0.10379087338771256, 'acc_norm,none': 0.2631578947368421, 'acc_norm_stderr,none': 0.10379087338771256, 'alias': ' - ceval-valid_sports_science'}, 'ceval-valid_tax_accountant': {'acc,none': 0.30612244897959184, 'acc_stderr,none': 0.06652247352247599, 'acc_norm,none': 0.30612244897959184, 'acc_norm_stderr,none': 0.06652247352247599, 'alias': ' - ceval-valid_tax_accountant'}, 'ceval-valid_teacher_qualification': {'acc,none': 0.5681818181818182, 'acc_stderr,none': 0.07553702921752882, 'acc_norm,none': 0.5681818181818182, 'acc_norm_stderr,none': 0.07553702921752882, 'alias': ' - ceval-valid_teacher_qualification'}, 'ceval-valid_urban_and_rural_planner': {'acc,none': 0.5217391304347826, 'acc_stderr,none': 0.07446511639805872, 'acc_norm,none': 0.5217391304347826, 'acc_norm_stderr,none': 0.07446511639805872, 'alias': ' - ceval-valid_urban_and_rural_planner'}, 'ceval-valid_veterinary_medicine': {'acc,none': 0.5217391304347826, 'acc_stderr,none': 0.10649955403405124, 'acc_norm,none': 0.5217391304347826, 'acc_norm_stderr,none': 0.10649955403405124, 'alias': ' - ceval-valid_veterinary_medicine'}, 'copa': {'acc,none': 0.93, 'acc_stderr,none': 0.0256432399976243, 'alias': 'copa'}, 'freebase': {'exact_match,none': 0.15403543307086615, 'exact_match_stderr,none': 0.008009980186286517, 'alias': 'freebase'}, 'webqs': {'exact_match,none': 0.15403543307086615, 'exact_match_stderr,none': 0.008009980186286517, 'alias': 'webqs'}, 'cmmlu': {'acc,none': 0.3979450872042825, 'acc_stderr,none': 0.09095017592357733, 'acc_norm,none': 0.3979450872042825, 'acc_norm_stderr,none': 0.09095017592357733, 'alias': 'cmmlu'}, 'cmmlu_agronomy': {'acc,none': 0.35502958579881655, 'acc_stderr,none': 0.036918795945769134, 'acc_norm,none': 0.35502958579881655, 'acc_norm_stderr,none': 0.036918795945769134, 'alias': ' - cmmlu_agronomy'}, 'cmmlu_anatomy': {'acc,none': 0.2702702702702703, 'acc_stderr,none': 0.03662869876642905, 'acc_norm,none': 0.2702702702702703, 'acc_norm_stderr,none': 0.03662869876642905, 'alias': ' - cmmlu_anatomy'}, 'cmmlu_ancient_chinese': {'acc,none': 0.25609756097560976, 'acc_stderr,none': 0.03418746588364998, 'acc_norm,none': 0.25609756097560976, 'acc_norm_stderr,none': 0.03418746588364998, 'alias': ' - cmmlu_ancient_chinese'}, 'cmmlu_arts': {'acc,none': 0.39375, 'acc_stderr,none': 0.03874695666685831, 'acc_norm,none': 0.39375, 'acc_norm_stderr,none': 0.03874695666685831, 'alias': ' - cmmlu_arts'}, 'cmmlu_astronomy': {'acc,none': 0.3151515151515151, 'acc_stderr,none': 0.0362773057502241, 'acc_norm,none': 0.3151515151515151, 'acc_norm_stderr,none': 0.0362773057502241, 'alias': ' - cmmlu_astronomy'}, 'cmmlu_business_ethics': {'acc,none': 0.4354066985645933, 'acc_stderr,none': 0.03437824847655481, 'acc_norm,none': 0.4354066985645933, 'acc_norm_stderr,none': 0.03437824847655481, 'alias': ' - cmmlu_business_ethics'}, 'cmmlu_chinese_civil_service_exam': {'acc,none': 0.3625, 'acc_stderr,none': 0.038123743406448904, 'acc_norm,none': 0.3625, 'acc_norm_stderr,none': 0.038123743406448904, 'alias': ' - cmmlu_chinese_civil_service_exam'}, 'cmmlu_chinese_driving_rule': {'acc,none': 0.5114503816793893, 'acc_stderr,none': 0.04384140024078016, 'acc_norm,none': 0.5114503816793893, 'acc_norm_stderr,none': 0.04384140024078016, 'alias': ' - cmmlu_chinese_driving_rule'}, 'cmmlu_chinese_food_culture': {'acc,none': 0.35294117647058826, 'acc_stderr,none': 0.041129758751770655, 'acc_norm,none': 0.35294117647058826, 'acc_norm_stderr,none': 0.041129758751770655, 'alias': ' - cmmlu_chinese_food_culture'}, 'cmmlu_chinese_foreign_policy': {'acc,none': 0.45794392523364486, 'acc_stderr,none': 0.04839219555189162, 'acc_norm,none': 0.45794392523364486, 'acc_norm_stderr,none': 0.04839219555189162, 'alias': ' - cmmlu_chinese_foreign_policy'}, 'cmmlu_chinese_history': {'acc,none': 0.4086687306501548, 'acc_stderr,none': 0.027395118985328946, 'acc_norm,none': 0.4086687306501548, 'acc_norm_stderr,none': 0.027395118985328946, 'alias': ' - cmmlu_chinese_history'}, 'cmmlu_chinese_literature': {'acc,none': 0.35294117647058826, 'acc_stderr,none': 0.03354092437591519, 'acc_norm,none': 0.35294117647058826, 'acc_norm_stderr,none': 0.03354092437591519, 'alias': ' - cmmlu_chinese_literature'}, 'cmmlu_chinese_teacher_qualification': {'acc,none': 0.46368715083798884, 'acc_stderr,none': 0.03737761880538031, 'acc_norm,none': 0.46368715083798884, 'acc_norm_stderr,none': 0.03737761880538031, 'alias': ' - cmmlu_chinese_teacher_qualification'}, 'cmmlu_clinical_knowledge': {'acc,none': 0.31223628691983124, 'acc_stderr,none': 0.030165137867847008, 'acc_norm,none': 0.31223628691983124, 'acc_norm_stderr,none': 0.030165137867847008, 'alias': ' - cmmlu_clinical_knowledge'}, 'cmmlu_college_actuarial_science': {'acc,none': 0.25471698113207547, 'acc_stderr,none': 0.0425201622376331, 'acc_norm,none': 0.25471698113207547, 'acc_norm_stderr,none': 0.0425201622376331, 'alias': ' - cmmlu_college_actuarial_science'}, 'cmmlu_college_education': {'acc,none': 0.4672897196261682, 'acc_stderr,none': 0.04846025774523467, 'acc_norm,none': 0.4672897196261682, 'acc_norm_stderr,none': 0.04846025774523467, 'alias': ' - cmmlu_college_education'}, 'cmmlu_college_engineering_hydrology': {'acc,none': 0.37735849056603776, 'acc_stderr,none': 0.04730439022852895, 'acc_norm,none': 0.37735849056603776, 'acc_norm_stderr,none': 0.04730439022852895, 'alias': ' - cmmlu_college_engineering_hydrology'}, 'cmmlu_college_law': {'acc,none': 0.37037037037037035, 'acc_stderr,none': 0.04668408033024931, 'acc_norm,none': 0.37037037037037035, 'acc_norm_stderr,none': 0.04668408033024931, 'alias': ' - cmmlu_college_law'}, 'cmmlu_college_mathematics': {'acc,none': 0.26666666666666666, 'acc_stderr,none': 0.04336290903919941, 'acc_norm,none': 0.26666666666666666, 'acc_norm_stderr,none': 0.04336290903919941, 'alias': ' - cmmlu_college_mathematics'}, 'cmmlu_college_medical_statistics': {'acc,none': 0.44339622641509435, 'acc_stderr,none': 0.0484813182297548, 'acc_norm,none': 0.44339622641509435, 'acc_norm_stderr,none': 0.0484813182297548, 'alias': ' - cmmlu_college_medical_statistics'}, 'cmmlu_college_medicine': {'acc,none': 0.3076923076923077, 'acc_stderr,none': 0.027984879811884515, 'acc_norm,none': 0.3076923076923077, 'acc_norm_stderr,none': 0.027984879811884515, 'alias': ' - cmmlu_college_medicine'}, 'cmmlu_computer_science': {'acc,none': 0.47058823529411764, 'acc_stderr,none': 0.03503235296367994, 'acc_norm,none': 0.47058823529411764, 'acc_norm_stderr,none': 0.03503235296367994, 'alias': ' - cmmlu_computer_science'}, 'cmmlu_computer_security': {'acc,none': 0.4853801169590643, 'acc_stderr,none': 0.038331852752130205, 'acc_norm,none': 0.4853801169590643, 'acc_norm_stderr,none': 0.038331852752130205, 'alias': ' - cmmlu_computer_security'}, 'cmmlu_conceptual_physics': {'acc,none': 0.3945578231292517, 'acc_stderr,none': 0.040449693713112876, 'acc_norm,none': 0.3945578231292517, 'acc_norm_stderr,none': 0.040449693713112876, 'alias': ' - cmmlu_conceptual_physics'}, 'cmmlu_construction_project_management': {'acc,none': 0.41007194244604317, 'acc_stderr,none': 0.04186875148834218, 'acc_norm,none': 0.41007194244604317, 'acc_norm_stderr,none': 0.04186875148834218, 'alias': ' - cmmlu_construction_project_management'}, 'cmmlu_economics': {'acc,none': 0.5220125786163522, 'acc_stderr,none': 0.03973929649561243, 'acc_norm,none': 0.5220125786163522, 'acc_norm_stderr,none': 0.03973929649561243, 'alias': ' - cmmlu_economics'}, 'cmmlu_education': {'acc,none': 0.4723926380368098, 'acc_stderr,none': 0.03922378290610991, 'acc_norm,none': 0.4723926380368098, 'acc_norm_stderr,none': 0.03922378290610991, 'alias': ' - cmmlu_education'}, 'cmmlu_electrical_engineering': {'acc,none': 0.37790697674418605, 'acc_stderr,none': 0.03707849218723281, 'acc_norm,none': 0.37790697674418605, 'acc_norm_stderr,none': 0.03707849218723281, 'alias': ' - cmmlu_electrical_engineering'}, 'cmmlu_elementary_chinese': {'acc,none': 0.25396825396825395, 'acc_stderr,none': 0.02747460833869741, 'acc_norm,none': 0.25396825396825395, 'acc_norm_stderr,none': 0.02747460833869741, 'alias': ' - cmmlu_elementary_chinese'}, 'cmmlu_elementary_commonsense': {'acc,none': 0.3838383838383838, 'acc_stderr,none': 0.03464881675016339, 'acc_norm,none': 0.3838383838383838, 'acc_norm_stderr,none': 0.03464881675016339, 'alias': ' - cmmlu_elementary_commonsense'}, 'cmmlu_elementary_information_and_technology': {'acc,none': 0.6260504201680672, 'acc_stderr,none': 0.031429466378837076, 'acc_norm,none': 0.6260504201680672, 'acc_norm_stderr,none': 0.031429466378837076, 'alias': ' - cmmlu_elementary_information_and_technology'}, 'cmmlu_elementary_mathematics': {'acc,none': 0.2782608695652174, 'acc_stderr,none': 0.029614094221633722, 'acc_norm,none': 0.2782608695652174, 'acc_norm_stderr,none': 0.029614094221633722, 'alias': ' - cmmlu_elementary_mathematics'}, 'cmmlu_ethnology': {'acc,none': 0.3037037037037037, 'acc_stderr,none': 0.039725528847851375, 'acc_norm,none': 0.3037037037037037, 'acc_norm_stderr,none': 0.039725528847851375, 'alias': ' - cmmlu_ethnology'}, 'cmmlu_food_science': {'acc,none': 0.46853146853146854, 'acc_stderr,none': 0.041875883974458995, 'acc_norm,none': 0.46853146853146854, 'acc_norm_stderr,none': 0.041875883974458995, 'alias': ' - cmmlu_food_science'}, 'cmmlu_genetics': {'acc,none': 0.32954545454545453, 'acc_stderr,none': 0.035532299023675745, 'acc_norm,none': 0.32954545454545453, 'acc_norm_stderr,none': 0.035532299023675745, 'alias': ' - cmmlu_genetics'}, 'cmmlu_global_facts': {'acc,none': 0.40268456375838924, 'acc_stderr,none': 0.04031377823191209, 'acc_norm,none': 0.40268456375838924, 'acc_norm_stderr,none': 0.04031377823191209, 'alias': ' - cmmlu_global_facts'}, 'cmmlu_high_school_biology': {'acc,none': 0.3727810650887574, 'acc_stderr,none': 0.03730627281928549, 'acc_norm,none': 0.3727810650887574, 'acc_norm_stderr,none': 0.03730627281928549, 'alias': ' - cmmlu_high_school_biology'}, 'cmmlu_high_school_chemistry': {'acc,none': 0.29545454545454547, 'acc_stderr,none': 0.03986246938961656, 'acc_norm,none': 0.29545454545454547, 'acc_norm_stderr,none': 0.03986246938961656, 'alias': ' - cmmlu_high_school_chemistry'}, 'cmmlu_high_school_geography': {'acc,none': 0.4067796610169492, 'acc_stderr,none': 0.045414517088615894, 'acc_norm,none': 0.4067796610169492, 'acc_norm_stderr,none': 0.045414517088615894, 'alias': ' - cmmlu_high_school_geography'}, 'cmmlu_high_school_mathematics': {'acc,none': 0.2621951219512195, 'acc_stderr,none': 0.0344500028917346, 'acc_norm,none': 0.2621951219512195, 'acc_norm_stderr,none': 0.0344500028917346, 'alias': ' - cmmlu_high_school_mathematics'}, 'cmmlu_high_school_physics': {'acc,none': 0.3181818181818182, 'acc_stderr,none': 0.04461272175910508, 'acc_norm,none': 0.3181818181818182, 'acc_norm_stderr,none': 0.04461272175910508, 'alias': ' - cmmlu_high_school_physics'}, 'cmmlu_high_school_politics': {'acc,none': 0.3776223776223776, 'acc_stderr,none': 0.040682878492098076, 'acc_norm,none': 0.3776223776223776, 'acc_norm_stderr,none': 0.040682878492098076, 'alias': ' - cmmlu_high_school_politics'}, 'cmmlu_human_sexuality': {'acc,none': 0.4444444444444444, 'acc_stderr,none': 0.044444444444444495, 'acc_norm,none': 0.4444444444444444, 'acc_norm_stderr,none': 0.044444444444444495, 'alias': ' - cmmlu_human_sexuality'}, 'cmmlu_international_law': {'acc,none': 0.32972972972972975, 'acc_stderr,none': 0.03465733148032954, 'acc_norm,none': 0.32972972972972975, 'acc_norm_stderr,none': 0.03465733148032954, 'alias': ' - cmmlu_international_law'}, 'cmmlu_journalism': {'acc,none': 0.42441860465116277, 'acc_stderr,none': 0.037796581784641, 'acc_norm,none': 0.42441860465116277, 'acc_norm_stderr,none': 0.037796581784641, 'alias': ' - cmmlu_journalism'}, 'cmmlu_jurisprudence': {'acc,none': 0.39172749391727496, 'acc_stderr,none': 0.024107334397898715, 'acc_norm,none': 0.39172749391727496, 'acc_norm_stderr,none': 0.024107334397898715, 'alias': ' - cmmlu_jurisprudence'}, 'cmmlu_legal_and_moral_basis': {'acc,none': 0.6588785046728972, 'acc_stderr,none': 0.03248384363697549, 'acc_norm,none': 0.6588785046728972, 'acc_norm_stderr,none': 0.03248384363697549, 'alias': ' - cmmlu_legal_and_moral_basis'}, 'cmmlu_logical': {'acc,none': 0.3902439024390244, 'acc_stderr,none': 0.04416377855732609, 'acc_norm,none': 0.3902439024390244, 'acc_norm_stderr,none': 0.04416377855732609, 'alias': ' - cmmlu_logical'}, 'cmmlu_machine_learning': {'acc,none': 0.4262295081967213, 'acc_stderr,none': 0.04495708831296081, 'acc_norm,none': 0.4262295081967213, 'acc_norm_stderr,none': 0.04495708831296081, 'alias': ' - cmmlu_machine_learning'}, 'cmmlu_management': {'acc,none': 0.4857142857142857, 'acc_stderr,none': 0.034571603689472506, 'acc_norm,none': 0.4857142857142857, 'acc_norm_stderr,none': 0.034571603689472506, 'alias': ' - cmmlu_management'}, 'cmmlu_marketing': {'acc,none': 0.5277777777777778, 'acc_stderr,none': 0.037314037607574575, 'acc_norm,none': 0.5277777777777778, 'acc_norm_stderr,none': 0.037314037607574575, 'alias': ' - cmmlu_marketing'}, 'cmmlu_marxist_theory': {'acc,none': 0.5291005291005291, 'acc_stderr,none': 0.036404433270336836, 'acc_norm,none': 0.5291005291005291, 'acc_norm_stderr,none': 0.036404433270336836, 'alias': ' - cmmlu_marxist_theory'}, 'cmmlu_modern_chinese': {'acc,none': 0.3275862068965517, 'acc_stderr,none': 0.04376552980994349, 'acc_norm,none': 0.3275862068965517, 'acc_norm_stderr,none': 0.04376552980994349, 'alias': ' - cmmlu_modern_chinese'}, 'cmmlu_nutrition': {'acc,none': 0.45517241379310347, 'acc_stderr,none': 0.04149886942192118, 'acc_norm,none': 0.45517241379310347, 'acc_norm_stderr,none': 0.04149886942192118, 'alias': ' - cmmlu_nutrition'}, 'cmmlu_philosophy': {'acc,none': 0.41904761904761906, 'acc_stderr,none': 0.0483821637528253, 'acc_norm,none': 0.41904761904761906, 'acc_norm_stderr,none': 0.0483821637528253, 'alias': ' - cmmlu_philosophy'}, 'cmmlu_professional_accounting': {'acc,none': 0.42857142857142855, 'acc_stderr,none': 0.03751612367420645, 'acc_norm,none': 0.42857142857142855, 'acc_norm_stderr,none': 0.03751612367420645, 'alias': ' - cmmlu_professional_accounting'}, 'cmmlu_professional_law': {'acc,none': 0.32701421800947866, 'acc_stderr,none': 0.03237252797910212, 'acc_norm,none': 0.32701421800947866, 'acc_norm_stderr,none': 0.03237252797910212, 'alias': ' - cmmlu_professional_law'}, 'cmmlu_professional_medicine': {'acc,none': 0.3271276595744681, 'acc_stderr,none': 0.024227541017929646, 'acc_norm,none': 0.3271276595744681, 'acc_norm_stderr,none': 0.024227541017929646, 'alias': ' - cmmlu_professional_medicine'}, 'cmmlu_professional_psychology': {'acc,none': 0.4396551724137931, 'acc_stderr,none': 0.03265711286547217, 'acc_norm,none': 0.4396551724137931, 'acc_norm_stderr,none': 0.03265711286547217, 'alias': ' - cmmlu_professional_psychology'}, 'cmmlu_public_relations': {'acc,none': 0.4885057471264368, 'acc_stderr,none': 0.03800425000198232, 'acc_norm,none': 0.4885057471264368, 'acc_norm_stderr,none': 0.03800425000198232, 'alias': ' - cmmlu_public_relations'}, 'cmmlu_security_study': {'acc,none': 0.4444444444444444, 'acc_stderr,none': 0.04292596718256981, 'acc_norm,none': 0.4444444444444444, 'acc_norm_stderr,none': 0.04292596718256981, 'alias': ' - cmmlu_security_study'}, 'cmmlu_sociology': {'acc,none': 0.40707964601769914, 'acc_stderr,none': 0.03275266284786317, 'acc_norm,none': 0.40707964601769914, 'acc_norm_stderr,none': 0.03275266284786317, 'alias': ' - cmmlu_sociology'}, 'cmmlu_sports_science': {'acc,none': 0.3878787878787879, 'acc_stderr,none': 0.038049136539710114, 'acc_norm,none': 0.3878787878787879, 'acc_norm_stderr,none': 0.038049136539710114, 'alias': ' - cmmlu_sports_science'}, 'cmmlu_traditional_chinese_medicine': {'acc,none': 0.2864864864864865, 'acc_stderr,none': 0.03333068663336699, 'acc_norm,none': 0.2864864864864865, 'acc_norm_stderr,none': 0.03333068663336699, 'alias': ' - cmmlu_traditional_chinese_medicine'}, 'cmmlu_virology': {'acc,none': 0.47928994082840237, 'acc_stderr,none': 0.03854273242663734, 'acc_norm,none': 0.47928994082840237, 'acc_norm_stderr,none': 0.03854273242663734, 'alias': ' - cmmlu_virology'}, 'cmmlu_world_history': {'acc,none': 0.453416149068323, 'acc_stderr,none': 0.03935653891289664, 'acc_norm,none': 0.453416149068323, 'acc_norm_stderr,none': 0.03935653891289664, 'alias': ' - cmmlu_world_history'}, 'cmmlu_world_religions': {'acc,none': 0.425, 'acc_stderr,none': 0.0392039498715957, 'acc_norm,none': 0.425, 'acc_norm_stderr,none': 0.0392039498715957, 'alias': ' - cmmlu_world_religions'}, 'lambada': {'perplexity,none': 3.477999261370286, 'perplexity_stderr,none': 0.16329169211004324, 'acc,none': 0.7254997089074325, 'acc_stderr,none': 0.01676361499326025, 'alias': 'lambada'}, 'lambada_openai': {'perplexity,none': 3.180029723439131, 'perplexity_stderr,none': 0.05830830767398106, 'acc,none': 0.755288181641762, 'acc_stderr,none': 0.005989573373070082, 'alias': ' - lambada_openai'}, 'lambada_standard': {'perplexity,none': 3.776606949709117, 'perplexity_stderr,none': 0.07308622187542482, 'acc,none': 0.694352804191733, 'acc_stderr,none': 0.006418187162765869, 'alias': ' - lambada_standard'}, 'piqa': {'acc,none': 0.8052230685527747, 'acc_stderr,none': 0.009240006693317723, 'acc_norm,none': 0.8204570184983678, 'acc_norm_stderr,none': 0.00895483432920114, 'alias': ' - piqa'}, 'mutual': {'r@1,none': 0.22573363431151242, 'r@1_stderr,none': 0.014053085820407435, 'r@2,none': 0.42099322799097066, 'r@2_stderr,none': 0.016596164895518038, 'mrr,none': 0.7204665161775772, 'mrr_stderr,none': 0.010218811328814581, 'alias': 'mutual'}, 'prost': {'acc,none': 0.3476729291204099, 'acc_stderr,none': 0.0034792952996372042, 'acc_norm,none': 0.3074295473953886, 'acc_norm_stderr,none': 0.0033711488878894512, 'alias': 'prost'}, 'wsc273': {'acc,none': 0.8901098901098901, 'acc_stderr,none': 0.018963420053918545, 'alias': 'wsc273'}, 'sycophancy': {'acc,none': 0.803766929553093, 'acc_stderr,none': 0.08597393979979331, 'alias': 'sycophancy'}, 'sycophancy_on_nlp_survey': {'acc,none': 0.9355969551282052, 'acc_stderr,none': 0.0024567845065233285, 'alias': ' - sycophancy_on_nlp_survey'}, 'sycophancy_on_philpapers2020': {'acc,none': 0.8834498834498834, 'acc_stderr,none': 0.0032305521742775297, 'alias': ' - sycophancy_on_philpapers2020'}, 'sycophancy_on_political_typology_quiz': {'acc,none': 0.5976470588235294, 'acc_stderr,none': 0.0048556479063216655, 'alias': ' - sycophancy_on_political_typology_quiz'}, 'qa4mre': {'acc,none': 0.4875886524822695, 'acc_stderr,none': 0.04415178467483081, 'acc_norm,none': 0.5372340425531915, 'acc_norm_stderr,none': 0.07588716246805031, 'alias': 'qa4mre'}, 'qa4mre_2011': {'acc,none': 0.5416666666666666, 'acc_stderr,none': 0.04567549854280212, 'acc_norm,none': 0.6833333333333333, 'acc_norm_stderr,none': 0.04264263153554635, 'alias': ' - qa4mre_2011'}, 'qa4mre_2012': {'acc,none': 0.5, 'acc_stderr,none': 0.03965257928590721, 'acc_norm,none': 0.56875, 'acc_norm_stderr,none': 0.03927594984018919, 'alias': ' - qa4mre_2012'}, 'qa4mre_2013': {'acc,none': 0.45774647887323944, 'acc_stderr,none': 0.029615596117597787, 'acc_norm,none': 0.45774647887323944, 'acc_norm_stderr,none': 0.02961559611759778, 'alias': ' - qa4mre_2013'}, 'ai2_arc': {'acc,none': 0.7065952649379932, 'acc_stderr,none': 0.09736868212773775, 'acc_norm,none': 0.7130777903043969, 'acc_norm_stderr,none': 0.08088060213614173, 'alias': ' - ai2_arc'}, 'arc_challenge': {'acc,none': 0.5008532423208191, 'acc_stderr,none': 0.014611369529813262, 'acc_norm,none': 0.5426621160409556, 'acc_norm_stderr,none': 0.014558106543924068, 'alias': '  - arc_challenge'}, 'arc_easy': {'acc,none': 0.8080808080808081, 'acc_stderr,none': 0.008080808080807977, 'acc_norm,none': 0.7971380471380471, 'acc_norm_stderr,none': 0.008251544823606903, 'alias': '  - arc_easy'}, 'logiqa': {'acc,none': 0.24270353302611367, 'acc_stderr,none': 0.01681567620647953, 'acc_norm,none': 0.30261136712749614, 'acc_norm_stderr,none': 0.018018696598158846, 'alias': ' - logiqa'}, 'winogrande': {'acc,none': 0.7363851617995264, 'acc_stderr,none': 0.012382849299658464, 'alias': ' - winogrande'}, 'boolq': {'acc,none': 0.8363914373088684, 'acc_stderr,none': 0.006469941343840766, 'alias': 'boolq'}, 'logiqa2': {'acc,none': 0.30216284987277353, 'acc_stderr,none': 0.011585358690310618, 'acc_norm,none': 0.30916030534351147, 'acc_norm_stderr,none': 0.011659835223676902, 'alias': 'logiqa2'}, 'openbookqa': {'acc,none': 0.33, 'acc_stderr,none': 0.021049612166134803, 'acc_norm,none': 0.442, 'acc_norm_stderr,none': 0.02223197069632112, 'alias': 'openbookqa'}, 'pawsx': {'acc,none': 0.4147857142857143, 'acc_stderr,none': 0.057291271279150895, 'alias': 'pawsx'}, 'paws_de': {'acc,none': 0.3825, 'acc_stderr,none': 0.01086995643857379, 'alias': ' - paws_de'}, 'paws_en': {'acc,none': 0.3145, 'acc_stderr,none': 0.010385027655220813, 'alias': ' - paws_en'}, 'paws_es': {'acc,none': 0.346, 'acc_stderr,none': 0.010639483037236658, 'alias': ' - paws_es'}, 'paws_fr': {'acc,none': 0.4905, 'acc_stderr,none': 0.011181117282805228, 'alias': ' - paws_fr'}, 'paws_ja': {'acc,none': 0.536, 'acc_stderr,none': 0.011154111668060216, 'alias': ' - paws_ja'}, 'paws_ko': {'acc,none': 0.418, 'acc_stderr,none': 0.01103172014804208, 'alias': ' - paws_ko'}, 'paws_zh': {'acc,none': 0.416, 'acc_stderr,none': 0.011024190055654281, 'alias': ' - paws_zh'}, 'multimedqa': {'alias': 'stem', 'acc,none': 0.5325762952448545, 'acc_stderr,none': 0.06628005425577563, 'acc_norm,none': 0.4918553467234212, 'acc_norm_stderr,none': 0.0001327699679693839}, 'medmcqa': {'acc,none': 0.4819507530480516, 'acc_stderr,none': 0.007726714059604551, 'acc_norm,none': 0.4819507530480516, 'acc_norm_stderr,none': 0.007726714059604551, 'alias': 'medmcqa'}, 'medqa_4options': {'acc,none': 0.5106048703849175, 'acc_stderr,none': 0.014016150183915747, 'acc_norm,none': 0.5106048703849175, 'acc_norm_stderr,none': 0.014016150183915747, 'alias': 'medqa_4options'}, 'mmlu_anatomy': {'alias': '  - anatomy', 'acc,none': 0.5555555555555556, 'acc_stderr,none': 0.042925967182569816}, 'mmlu_clinical_knowledge': {'alias': '  - clinical_knowledge', 'acc,none': 0.6830188679245283, 'acc_stderr,none': 0.0286372356398009}, 'mmlu_college_biology': {'alias': '  - college_biology', 'acc,none': 0.6805555555555556, 'acc_stderr,none': 0.03899073687357336}, 'mmlu_college_medicine': {'alias': '  - college_medicine', 'acc,none': 0.5895953757225434, 'acc_stderr,none': 0.03750757044895537}, 'mmlu_medical_genetics': {'alias': '  - medical_genetics', 'acc,none': 0.7, 'acc_stderr,none': 0.046056618647183814}, 'mmlu_professional_medicine': {'alias': '  - professional_medicine', 'acc,none': 0.6801470588235294, 'acc_stderr,none': 0.028332959514031232}, 'pubmedqa': {'acc,none': 0.754, 'acc_stderr,none': 0.019279819056352475, 'alias': 'pubmedqa'}, 'mc_taco': {'acc,none': 0.6929675916119467, 'acc_stderr,none': 0.004747222342042236, 'f1,none': 0.5734883036633809, 'f1_stderr,none': 0.007155483444682045, 'alias': 'mc_taco'}, 'kmmlu': {'acc,none': 0.33245740687265374, 'acc_stderr,none': 0.047791173115529544, 'acc_norm,none': 0.33245740687265374, 'acc_norm_stderr,none': 0.047791173115529544, 'alias': 'kmmlu'}, 'kmmlu_accounting': {'acc,none': 0.31, 'acc_stderr,none': 0.04648231987117316, 'acc_norm,none': 0.31, 'acc_norm_stderr,none': 0.04648231987117316, 'alias': ' - kmmlu_accounting'}, 'kmmlu_agricultural_sciences': {'acc,none': 0.286, 'acc_stderr,none': 0.014297146862517908, 'acc_norm,none': 0.286, 'acc_norm_stderr,none': 0.014297146862517908, 'alias': ' - kmmlu_agricultural_sciences'}, 'kmmlu_aviation_engineering_and_maintenance': {'acc,none': 0.34, 'acc_stderr,none': 0.014987482264363935, 'acc_norm,none': 0.34, 'acc_norm_stderr,none': 0.014987482264363935, 'alias': ' - kmmlu_aviation_engineering_and_maintenance'}, 'kmmlu_biology': {'acc,none': 0.259, 'acc_stderr,none': 0.013860415257527911, 'acc_norm,none': 0.259, 'acc_norm_stderr,none': 0.013860415257527911, 'alias': ' - kmmlu_biology'}, 'kmmlu_chemical_engineering': {'acc,none': 0.311, 'acc_stderr,none': 0.0146455963857227, 'acc_norm,none': 0.311, 'acc_norm_stderr,none': 0.0146455963857227, 'alias': ' - kmmlu_chemical_engineering'}, 'kmmlu_chemistry': {'acc,none': 0.32, 'acc_stderr,none': 0.019059698848626565, 'acc_norm,none': 0.32, 'acc_norm_stderr,none': 0.019059698848626565, 'alias': ' - kmmlu_chemistry'}, 'kmmlu_civil_engineering': {'acc,none': 0.348, 'acc_stderr,none': 0.01507060460376841, 'acc_norm,none': 0.348, 'acc_norm_stderr,none': 0.01507060460376841, 'alias': ' - kmmlu_civil_engineering'}, 'kmmlu_computer_science': {'acc,none': 0.5, 'acc_stderr,none': 0.015819299929208316, 'acc_norm,none': 0.5, 'acc_norm_stderr,none': 0.015819299929208316, 'alias': ' - kmmlu_computer_science'}, 'kmmlu_construction': {'acc,none': 0.329, 'acc_stderr,none': 0.014865395385928362, 'acc_norm,none': 0.329, 'acc_norm_stderr,none': 0.014865395385928362, 'alias': ' - kmmlu_construction'}, 'kmmlu_criminal_law': {'acc,none': 0.285, 'acc_stderr,none': 0.03199992148231577, 'acc_norm,none': 0.285, 'acc_norm_stderr,none': 0.03199992148231577, 'alias': ' - kmmlu_criminal_law'}, 'kmmlu_ecology': {'acc,none': 0.338, 'acc_stderr,none': 0.014965960710224487, 'acc_norm,none': 0.338, 'acc_norm_stderr,none': 0.014965960710224487, 'alias': ' - kmmlu_ecology'}, 'kmmlu_economics': {'acc,none': 0.27692307692307694, 'acc_stderr,none': 0.039398253452664685, 'acc_norm,none': 0.27692307692307694, 'acc_norm_stderr,none': 0.039398253452664685, 'alias': ' - kmmlu_economics'}, 'kmmlu_education': {'acc,none': 0.32, 'acc_stderr,none': 0.04688261722621504, 'acc_norm,none': 0.32, 'acc_norm_stderr,none': 0.04688261722621504, 'alias': ' - kmmlu_education'}, 'kmmlu_electrical_engineering': {'acc,none': 0.348, 'acc_stderr,none': 0.015070604603768408, 'acc_norm,none': 0.348, 'acc_norm_stderr,none': 0.015070604603768408, 'alias': ' - kmmlu_electrical_engineering'}, 'kmmlu_electronics_engineering': {'acc,none': 0.391, 'acc_stderr,none': 0.015438826294681782, 'acc_norm,none': 0.391, 'acc_norm_stderr,none': 0.015438826294681782, 'alias': ' - kmmlu_electronics_engineering'}, 'kmmlu_energy_management': {'acc,none': 0.3, 'acc_stderr,none': 0.014498627873361425, 'acc_norm,none': 0.3, 'acc_norm_stderr,none': 0.014498627873361425, 'alias': ' - kmmlu_energy_management'}, 'kmmlu_environmental_science': {'acc,none': 0.324, 'acc_stderr,none': 0.01480686473373886, 'acc_norm,none': 0.324, 'acc_norm_stderr,none': 0.01480686473373886, 'alias': ' - kmmlu_environmental_science'}, 'kmmlu_fashion': {'acc,none': 0.329, 'acc_stderr,none': 0.014865395385928364, 'acc_norm,none': 0.329, 'acc_norm_stderr,none': 0.014865395385928364, 'alias': ' - kmmlu_fashion'}, 'kmmlu_food_processing': {'acc,none': 0.311, 'acc_stderr,none': 0.014645596385722694, 'acc_norm,none': 0.311, 'acc_norm_stderr,none': 0.014645596385722694, 'alias': ' - kmmlu_food_processing'}, 'kmmlu_gas_technology_and_engineering': {'acc,none': 0.335, 'acc_stderr,none': 0.014933117490932568, 'acc_norm,none': 0.335, 'acc_norm_stderr,none': 0.014933117490932568, 'alias': ' - kmmlu_gas_technology_and_engineering'}, 'kmmlu_geomatics': {'acc,none': 0.359, 'acc_stderr,none': 0.015177264224798594, 'acc_norm,none': 0.359, 'acc_norm_stderr,none': 0.015177264224798594, 'alias': ' - kmmlu_geomatics'}, 'kmmlu_health': {'acc,none': 0.24, 'acc_stderr,none': 0.042923469599092816, 'acc_norm,none': 0.24, 'acc_norm_stderr,none': 0.042923469599092816, 'alias': ' - kmmlu_health'}, 'kmmlu_industrial_engineer': {'acc,none': 0.345, 'acc_stderr,none': 0.015039986742055235, 'acc_norm,none': 0.345, 'acc_norm_stderr,none': 0.015039986742055235, 'alias': ' - kmmlu_industrial_engineer'}, 'kmmlu_information_technology': {'acc,none': 0.437, 'acc_stderr,none': 0.015693223928730377, 'acc_norm,none': 0.437, 'acc_norm_stderr,none': 0.015693223928730377, 'alias': ' - kmmlu_information_technology'}, 'kmmlu_interior_architecture_and_design': {'acc,none': 0.386, 'acc_stderr,none': 0.01540263747678438, 'acc_norm,none': 0.386, 'acc_norm_stderr,none': 0.01540263747678438, 'alias': ' - kmmlu_interior_architecture_and_design'}, 'kmmlu_law': {'acc,none': 0.294, 'acc_stderr,none': 0.014414290540008218, 'acc_norm,none': 0.294, 'acc_norm_stderr,none': 0.014414290540008218, 'alias': ' - kmmlu_law'}, 'kmmlu_machine_design_and_manufacturing': {'acc,none': 0.351, 'acc_stderr,none': 0.015100563798316403, 'acc_norm,none': 0.351, 'acc_norm_stderr,none': 0.015100563798316403, 'alias': ' - kmmlu_machine_design_and_manufacturing'}, 'kmmlu_management': {'acc,none': 0.305, 'acc_stderr,none': 0.014566646394664385, 'acc_norm,none': 0.305, 'acc_norm_stderr,none': 0.014566646394664385, 'alias': ' - kmmlu_management'}, 'kmmlu_maritime_engineering': {'acc,none': 0.32166666666666666, 'acc_stderr,none': 0.019085836431523086, 'acc_norm,none': 0.32166666666666666, 'acc_norm_stderr,none': 0.019085836431523086, 'alias': ' - kmmlu_maritime_engineering'}, 'kmmlu_marketing': {'acc,none': 0.346, 'acc_stderr,none': 0.015050266127564455, 'acc_norm,none': 0.346, 'acc_norm_stderr,none': 0.015050266127564455, 'alias': ' - kmmlu_marketing'}, 'kmmlu_materials_engineering': {'acc,none': 0.304, 'acc_stderr,none': 0.014553205687950432, 'acc_norm,none': 0.304, 'acc_norm_stderr,none': 0.014553205687950432, 'alias': ' - kmmlu_materials_engineering'}, 'kmmlu_mechanical_engineering': {'acc,none': 0.321, 'acc_stderr,none': 0.01477082181793464, 'acc_norm,none': 0.321, 'acc_norm_stderr,none': 0.01477082181793464, 'alias': ' - kmmlu_mechanical_engineering'}, 'kmmlu_nondestructive_testing': {'acc,none': 0.291, 'acc_stderr,none': 0.014370995982377946, 'acc_norm,none': 0.291, 'acc_norm_stderr,none': 0.014370995982377946, 'alias': ' - kmmlu_nondestructive_testing'}, 'kmmlu_patent': {'acc,none': 0.26, 'acc_stderr,none': 0.04408440022768077, 'acc_norm,none': 0.26, 'acc_norm_stderr,none': 0.04408440022768077, 'alias': ' - kmmlu_patent'}, 'kmmlu_political_science_and_sociology': {'acc,none': 0.30666666666666664, 'acc_stderr,none': 0.026666666666666658, 'acc_norm,none': 0.30666666666666664, 'acc_norm_stderr,none': 0.026666666666666658, 'alias': ' - kmmlu_political_science_and_sociology'}, 'kmmlu_psychology': {'acc,none': 0.243, 'acc_stderr,none': 0.013569640199177462, 'acc_norm,none': 0.243, 'acc_norm_stderr,none': 0.013569640199177462, 'alias': ' - kmmlu_psychology'}, 'kmmlu_public_safety': {'acc,none': 0.335, 'acc_stderr,none': 0.014933117490932573, 'acc_norm,none': 0.335, 'acc_norm_stderr,none': 0.014933117490932573, 'alias': ' - kmmlu_public_safety'}, 'kmmlu_railway_and_automotive_engineering': {'acc,none': 0.303, 'acc_stderr,none': 0.014539683710535253, 'acc_norm,none': 0.303, 'acc_norm_stderr,none': 0.014539683710535253, 'alias': ' - kmmlu_railway_and_automotive_engineering'}, 'kmmlu_real_estate': {'acc,none': 0.305, 'acc_stderr,none': 0.03263741725420571, 'acc_norm,none': 0.305, 'acc_norm_stderr,none': 0.03263741725420571, 'alias': ' - kmmlu_real_estate'}, 'kmmlu_refrigerating_machinery': {'acc,none': 0.298, 'acc_stderr,none': 0.01447084674113472, 'acc_norm,none': 0.298, 'acc_norm_stderr,none': 0.01447084674113472, 'alias': ' - kmmlu_refrigerating_machinery'}, 'kmmlu_social_welfare': {'acc,none': 0.328, 'acc_stderr,none': 0.01485384248727033, 'acc_norm,none': 0.328, 'acc_norm_stderr,none': 0.01485384248727033, 'alias': ' - kmmlu_social_welfare'}, 'kmmlu_taxation': {'acc,none': 0.29, 'acc_stderr,none': 0.03216633903375033, 'acc_norm,none': 0.29, 'acc_norm_stderr,none': 0.03216633903375033, 'alias': ' - kmmlu_taxation'}, 'kmmlu_telecommunications_and_wireless_technology': {'acc,none': 0.416, 'acc_stderr,none': 0.015594460144140601, 'acc_norm,none': 0.416, 'acc_norm_stderr,none': 0.015594460144140601, 'alias': ' - kmmlu_telecommunications_and_wireless_technology'}, 'gsm8k': {'exact_match,get-answer': 0.38817285822592873, 'exact_match_stderr,get-answer': 0.013423607564002757, 'alias': 'gsm8k'}, 'mutual_plus': {'r@1,none': 0.2595936794582393, 'r@1_stderr,none': 0.01473704740275095, 'r@2,none': 0.44808126410835214, 'r@2_stderr,none': 0.01671646047143711, 'mrr,none': 0.6669488337095556, 'mrr_stderr,none': 0.010463015830979078, 'alias': 'mutual_plus'}, 'kobest': {'acc,none': 0.5507564130673098, 'acc_stderr,none': 0.05115523112210208, 'f1,none': 0.4791948665252917, 'f1_stderr,none': 'N/A', 'acc_norm,none': 0.518, 'acc_norm_stderr,none': 0.0005003527054108269, 'alias': 'kobest'}, 'kobest_boolq': {'acc,none': 0.6274928774928775, 'acc_stderr,none': 0.012907521446784632, 'f1,none': 0.5779579332760488, 'f1_stderr,none': 'N/A', 'alias': ' - kobest_boolq'}, 'kobest_copa': {'acc,none': 0.588, 'acc_stderr,none': 0.015572363292015104, 'f1,none': 0.5873397435897436, 'f1_stderr,none': 'N/A', 'alias': ' - kobest_copa'}, 'kobest_hellaswag': {'acc,none': 0.43, 'acc_stderr,none': 0.022162634426652835, 'f1,none': 0.42600001945351906, 'f1_stderr,none': 'N/A', 'acc_norm,none': 0.518, 'acc_norm_stderr,none': 0.02236856511738799, 'alias': ' - kobest_hellaswag'}, 'kobest_sentineg': {'acc,none': 0.5365239294710328, 'acc_stderr,none': 0.02505881982355679, 'f1,none': 0.4043705153294194, 'f1_stderr,none': 'N/A', 'alias': ' - kobest_sentineg'}, 'kobest_wic': {'acc,none': 0.4880952380952381, 'acc_stderr,none': 0.014087502464604038, 'f1,none': 0.328, 'f1_stderr,none': 'N/A', 'alias': ' - kobest_wic'}, 'race': {'acc,none': 0.40861244019138754, 'acc_stderr,none': 0.015213937761630927, 'alias': 'race'}, 'arithmetic': {'acc,none': 0.90035, 'acc_stderr,none': 0.10511691499262178, 'alias': 'arithmetic'}, 'arithmetic_1dc': {'acc,none': 0.644, 'acc_stderr,none': 0.01070931112034454, 'alias': 'arithmetic_1dc'}, 'arithmetic_2da': {'acc,none': 0.9985, 'acc_stderr,none': 0.0008655920660521429, 'alias': 'arithmetic_2da'}, 'arithmetic_2dm': {'acc,none': 0.709, 'acc_stderr,none': 0.010159286665547608, 'alias': 'arithmetic_2dm'}, 'arithmetic_2ds': {'acc,none': 0.9985, 'acc_stderr,none': 0.000865592066052145, 'alias': 'arithmetic_2ds'}, 'arithmetic_3da': {'acc,none': 0.983, 'acc_stderr,none': 0.002891311093590575, 'alias': 'arithmetic_3da'}, 'arithmetic_3ds': {'acc,none': 0.9885, 'acc_stderr,none': 0.0023846841214675827, 'alias': 'arithmetic_3ds'}, 'arithmetic_4da': {'acc,none': 0.954, 'acc_stderr,none': 0.0046854003551718435, 'alias': 'arithmetic_4da'}, 'arithmetic_4ds': {'acc,none': 0.9455, 'acc_stderr,none': 0.005077180702116209, 'alias': 'arithmetic_4ds'}, 'arithmetic_5da': {'acc,none': 0.911, 'acc_stderr,none': 0.0063686560505294655, 'alias': 'arithmetic_5da'}, 'arithmetic_5ds': {'acc,none': 0.8715, 'acc_stderr,none': 0.0074847769467748975, 'alias': 'arithmetic_5ds'}, 'lambada_multilingual': {'perplexity,none': 27.056756454439657, 'perplexity_stderr,none': 10.55570565115782, 'acc,none': 0.5186493304870949, 'acc_stderr,none': 0.09575650539776685, 'alias': 'lambada_multilingual'}, 'lambada_openai_mt_de': {'perplexity,none': 43.33029030872575, 'perplexity_stderr,none': 2.393759617994182, 'acc,none': 0.4001552493692994, 'acc_stderr,none': 0.006825677476606509, 'alias': ' - lambada_openai_mt_de'}, 'lambada_openai_mt_en': {'perplexity,none': 3.1795871820811916, 'perplexity_stderr,none': 0.05830848485872709, 'acc,none': 0.7562584901998836, 'acc_stderr,none': 0.005981525423321768, 'alias': ' - lambada_openai_mt_en'}, 'lambada_openai_mt_es': {'perplexity,none': 36.28812043227467, 'perplexity_stderr,none': 1.791789103370077, 'acc,none': 0.4267417038618281, 'acc_stderr,none': 0.006890802308382406, 'alias': ' - lambada_openai_mt_es'}, 'lambada_openai_mt_fr': {'perplexity,none': 22.21756518405148, 'perplexity_stderr,none': 1.1041376692704696, 'acc,none': 0.5208616339996118, 'acc_stderr,none': 0.00695991172085145, 'alias': ' - lambada_openai_mt_fr'}, 'lambada_openai_mt_it': {'perplexity,none': 30.268219165065197, 'perplexity_stderr,none': 1.6670875958452993, 'acc,none': 0.48922957500485154, 'acc_stderr,none': 0.006964361334232535, 'alias': ' - lambada_openai_mt_it'}, 'pythia': {'acc,none': 0.7809920153879631, 'acc_stderr,none': 0.15963097565490927, 'acc_norm,none': 0.7148213731718109, 'acc_norm_stderr,none': 0.0082806592848003, 'word_perplexity,none': 17.952971816297133, 'word_perplexity_stderr,none': 'N/A', 'byte_perplexity,none': 1.7160493234446637, 'byte_perplexity_stderr,none': 'N/A', 'bits_per_byte,none': 0.7790910200090022, 'bits_per_byte_stderr,none': 'N/A', 'perplexity,none': 3.180029723439131, 'perplexity_stderr,none': 0.05830830767398106, 'alias': 'pythia'}, 'mmlu': {'acc,none': 0.596425010682239, 'acc_stderr,none': 0.13844007438448744, 'alias': 'mmlu'}, 'mmlu_humanities': {'alias': ' - humanities', 'acc,none': 0.5343251859723698, 'acc_stderr,none': 0.16186401202777495}, 'mmlu_formal_logic': {'alias': '  - formal_logic', 'acc,none': 0.35714285714285715, 'acc_stderr,none': 0.04285714285714281}, 'mmlu_high_school_european_history': {'alias': '  - high_school_european_history', 'acc,none': 0.7515151515151515, 'acc_stderr,none': 0.033744026441394036}, 'mmlu_high_school_us_history': {'alias': '  - high_school_us_history', 'acc,none': 0.7598039215686274, 'acc_stderr,none': 0.02998373305591361}, 'mmlu_high_school_world_history': {'alias': '  - high_school_world_history', 'acc,none': 0.7805907172995781, 'acc_stderr,none': 0.026939106581553945}, 'mmlu_international_law': {'alias': '  - international_law', 'acc,none': 0.7520661157024794, 'acc_stderr,none': 0.039418975265163025}, 'mmlu_jurisprudence': {'alias': '  - jurisprudence', 'acc,none': 0.7407407407407407, 'acc_stderr,none': 0.04236511258094631}, 'mmlu_logical_fallacies': {'alias': '  - logical_fallacies', 'acc,none': 0.7607361963190185, 'acc_stderr,none': 0.0335195387952127}, 'mmlu_moral_disputes': {'alias': '  - moral_disputes', 'acc,none': 0.6791907514450867, 'acc_stderr,none': 0.0251310002336479}, 'mmlu_moral_scenarios': {'alias': '  - moral_scenarios', 'acc,none': 0.24134078212290502, 'acc_stderr,none': 0.014310999547961464}, 'mmlu_philosophy': {'alias': '  - philosophy', 'acc,none': 0.6881028938906752, 'acc_stderr,none': 0.02631185807185416}, 'mmlu_prehistory': {'alias': '  - prehistory', 'acc,none': 0.7129629629629629, 'acc_stderr,none': 0.025171041915309684}, 'mmlu_professional_law': {'alias': '  - professional_law', 'acc,none': 0.44002607561929596, 'acc_stderr,none': 0.012678037478574513}, 'mmlu_world_religions': {'alias': '  - world_religions', 'acc,none': 0.8128654970760234, 'acc_stderr,none': 0.029913127232368032}, 'mmlu_other': {'alias': ' - other', 'acc,none': 0.6816865143224976, 'acc_stderr,none': 0.09811125388055714}, 'mmlu_business_ethics': {'alias': '  - business_ethics', 'acc,none': 0.58, 'acc_stderr,none': 0.049604496374885836}, 'mmlu_global_facts': {'alias': '  - global_facts', 'acc,none': 0.39, 'acc_stderr,none': 0.04902071300001975}, 'mmlu_human_aging': {'alias': '  - human_aging', 'acc,none': 0.6591928251121076, 'acc_stderr,none': 0.0318114974705536}, 'mmlu_management': {'alias': '  - management', 'acc,none': 0.7864077669902912, 'acc_stderr,none': 0.04058042015646035}, 'mmlu_marketing': {'alias': '  - marketing', 'acc,none': 0.8589743589743589, 'acc_stderr,none': 0.02280138253459753}, 'mmlu_miscellaneous': {'alias': '  - miscellaneous', 'acc,none': 0.7956577266922095, 'acc_stderr,none': 0.014419123980931904}, 'mmlu_nutrition': {'alias': '  - nutrition', 'acc,none': 0.7091503267973857, 'acc_stderr,none': 0.02600480036395213}, 'mmlu_professional_accounting': {'alias': '  - professional_accounting', 'acc,none': 0.46099290780141844, 'acc_stderr,none': 0.02973659252642444}, 'mmlu_virology': {'alias': '  - virology', 'acc,none': 0.5060240963855421, 'acc_stderr,none': 0.03892212195333045}, 'mmlu_social_sciences': {'alias': ' - social_sciences', 'acc,none': 0.6958076048098798, 'acc_stderr,none': 0.08956129050566648}, 'mmlu_econometrics': {'alias': '  - econometrics', 'acc,none': 0.42105263157894735, 'acc_stderr,none': 0.046446020912223177}, 'mmlu_high_school_geography': {'alias': '  - high_school_geography', 'acc,none': 0.7373737373737373, 'acc_stderr,none': 0.03135305009533084}, 'mmlu_high_school_government_and_politics': {'alias': '  - high_school_government_and_politics', 'acc,none': 0.844559585492228, 'acc_stderr,none': 0.02614848346915332}, 'mmlu_high_school_macroeconomics': {'alias': '  - high_school_macroeconomics', 'acc,none': 0.5897435897435898, 'acc_stderr,none': 0.02493931390694079}, 'mmlu_high_school_microeconomics': {'alias': '  - high_school_microeconomics', 'acc,none': 0.634453781512605, 'acc_stderr,none': 0.0312821770636846}, 'mmlu_high_school_psychology': {'alias': '  - high_school_psychology', 'acc,none': 0.7798165137614679, 'acc_stderr,none': 0.01776597865232756}, 'mmlu_human_sexuality': {'alias': '  - human_sexuality', 'acc,none': 0.7557251908396947, 'acc_stderr,none': 0.03768335959728745}, 'mmlu_professional_psychology': {'alias': '  - professional_psychology', 'acc,none': 0.6160130718954249, 'acc_stderr,none': 0.01967580813528152}, 'mmlu_public_relations': {'alias': '  - public_relations', 'acc,none': 0.6636363636363637, 'acc_stderr,none': 0.04525393596302506}, 'mmlu_security_studies': {'alias': '  - security_studies', 'acc,none': 0.7020408163265306, 'acc_stderr,none': 0.029279567411065667}, 'mmlu_sociology': {'alias': '  - sociology', 'acc,none': 0.8656716417910447, 'acc_stderr,none': 0.024112678240900836}, 'mmlu_us_foreign_policy': {'alias': '  - us_foreign_policy', 'acc,none': 0.83, 'acc_stderr,none': 0.0377525168068637}, 'mmlu_stem': {'alias': ' - stem', 'acc,none': 0.5080875356803045, 'acc_stderr,none': 0.1215476495130753}, 'mmlu_abstract_algebra': {'alias': '  - abstract_algebra', 'acc,none': 0.3, 'acc_stderr,none': 0.046056618647183814}, 'mmlu_astronomy': {'alias': '  - astronomy', 'acc,none': 0.618421052631579, 'acc_stderr,none': 0.03953173377749194}, 'mmlu_college_chemistry': {'alias': '  - college_chemistry', 'acc,none': 0.48, 'acc_stderr,none': 0.050211673156867795}, 'mmlu_college_computer_science': {'alias': '  - college_computer_science', 'acc,none': 0.56, 'acc_stderr,none': 0.04988876515698589}, 'mmlu_college_mathematics': {'alias': '  - college_mathematics', 'acc,none': 0.34, 'acc_stderr,none': 0.04760952285695235}, 'mmlu_college_physics': {'alias': '  - college_physics', 'acc,none': 0.45098039215686275, 'acc_stderr,none': 0.04951218252396262}, 'mmlu_computer_security': {'alias': '  - computer_security', 'acc,none': 0.74, 'acc_stderr,none': 0.0440844002276808}, 'mmlu_conceptual_physics': {'alias': '  - conceptual_physics', 'acc,none': 0.5319148936170213, 'acc_stderr,none': 0.03261936918467382}, 'mmlu_electrical_engineering': {'alias': '  - electrical_engineering', 'acc,none': 0.5724137931034483, 'acc_stderr,none': 0.04122737111370332}, 'mmlu_elementary_mathematics': {'alias': '  - elementary_mathematics', 'acc,none': 0.3915343915343915, 'acc_stderr,none': 0.025138091388851116}, 'mmlu_high_school_biology': {'alias': '  - high_school_biology', 'acc,none': 0.7387096774193549, 'acc_stderr,none': 0.024993053397764822}, 'mmlu_high_school_chemistry': {'alias': '  - high_school_chemistry', 'acc,none': 0.49261083743842365, 'acc_stderr,none': 0.035176035403610084}, 'mmlu_high_school_computer_science': {'alias': '  - high_school_computer_science', 'acc,none': 0.66, 'acc_stderr,none': 0.04760952285695237}, 'mmlu_high_school_mathematics': {'alias': '  - high_school_mathematics', 'acc,none': 0.34074074074074073, 'acc_stderr,none': 0.028897748741131133}, 'mmlu_high_school_physics': {'alias': '  - high_school_physics', 'acc,none': 0.304635761589404, 'acc_stderr,none': 0.037579499229433426}, 'mmlu_high_school_statistics': {'alias': '  - high_school_statistics', 'acc,none': 0.48148148148148145, 'acc_stderr,none': 0.034076320938540516}, 'mmlu_machine_learning': {'alias': '  - machine_learning', 'acc,none': 0.48214285714285715, 'acc_stderr,none': 0.047427623612430116}, 'wikitext': {'word_perplexity,none': 17.952971816297133, 'word_perplexity_stderr,none': 'N/A', 'byte_perplexity,none': 1.7160493234446637, 'byte_perplexity_stderr,none': 'N/A', 'bits_per_byte,none': 0.7790910200090022, 'bits_per_byte_stderr,none': 'N/A', 'alias': 'wikitext'}, 'asdiv': {'acc,none': 0.01735357917570499, 'acc_stderr,none': 0.002720520054825065, 'alias': 'asdiv'}, 'wic': {'acc,none': 0.5783699059561128, 'acc_stderr,none': 0.019565859392130985, 'alias': 'wic'}, 'swag': {'acc,none': 0.5755773268019594, 'acc_stderr,none': 0.0034944742875050363, 'acc_norm,none': 0.7741677496750975, 'acc_norm_stderr,none': 0.0029562505640686877, 'alias': 'swag'}, 'hellaswag': {'acc,none': 0.6131248755228043, 'acc_stderr,none': 0.004860393011974709, 'acc_norm,none': 0.8103963353913562, 'acc_norm_stderr,none': 0.003911862797736132, 'alias': 'hellaswag'}, 'cb': {'acc,none': 0.48214285714285715, 'acc_stderr,none': 0.06737697508644648, 'f1,none': 0.28777777777777774, 'f1_stderr,none': 'N/A', 'alias': 'cb'}, 'sglue_rte': {'acc,none': 0.6750902527075813, 'acc_stderr,none': 0.028190822551170353, 'alias': 'sglue_rte'}, 'mathqa': {'acc,none': 0.35544388609715244, 'acc_stderr,none': 0.008762266964873266, 'acc_norm,none': 0.35845896147403683, 'acc_norm_stderr,none': 0.008778747002389665, 'alias': 'mathqa'}, 'xwinograd': {'acc,none': 0.8120926050797932, 'acc_stderr,none': 0.04148122639462828, 'alias': 'xwinograd'}, 'xwinograd_en': {'acc,none': 0.8851612903225806, 'acc_stderr,none': 0.006613590439489932, 'alias': ' - xwinograd_en'}, 'xwinograd_fr': {'acc,none': 0.7469879518072289, 'acc_stderr,none': 0.04800875830437279, 'alias': ' - xwinograd_fr'}, 'xwinograd_jp': {'acc,none': 0.7194994786235662, 'acc_stderr,none': 0.014514407890552966, 'alias': ' - xwinograd_jp'}, 'xwinograd_pt': {'acc,none': 0.7642585551330798, 'acc_stderr,none': 0.026223308206222554, 'alias': ' - xwinograd_pt'}, 'xwinograd_ru': {'acc,none': 0.6857142857142857, 'acc_stderr,none': 0.026198057744026396, 'alias': ' - xwinograd_ru'}, 'xwinograd_zh': {'acc,none': 0.7658730158730159, 'acc_stderr,none': 0.018880788485078296, 'alias': ' - xwinograd_zh'}, 'xstorycloze': {'acc,none': 0.5916611515552614, 'acc_stderr,none': 0.07661315123253779, 'alias': 'xstorycloze'}, 'xstorycloze_ar': {'acc,none': 0.5307743216412971, 'acc_stderr,none': 0.012842730340585787, 'alias': ' - xstorycloze_ar'}, 'xstorycloze_en': {'acc,none': 0.7882197220383852, 'acc_stderr,none': 0.010514241109625348, 'alias': ' - xstorycloze_en'}, 'xstorycloze_es': {'acc,none': 0.6902713434811383, 'acc_stderr,none': 0.011899045981288764, 'alias': ' - xstorycloze_es'}, 'xstorycloze_eu': {'acc,none': 0.5115817339510258, 'acc_stderr,none': 0.012863672949335879, 'alias': ' - xstorycloze_eu'}, 'xstorycloze_hi': {'acc,none': 0.5526141628060887, 'acc_stderr,none': 0.012795688167385287, 'alias': ' - xstorycloze_hi'}, 'xstorycloze_id': {'acc,none': 0.5956320317670417, 'acc_stderr,none': 0.012629580396570946, 'alias': ' - xstorycloze_id'}, 'xstorycloze_my': {'acc,none': 0.4870946393117141, 'acc_stderr,none': 0.012862838605728477, 'alias': ' - xstorycloze_my'}, 'xstorycloze_ru': {'acc,none': 0.6657842488418266, 'acc_stderr,none': 0.012139246810918228, 'alias': ' - xstorycloze_ru'}, 'xstorycloze_sw': {'acc,none': 0.514228987425546, 'acc_stderr,none': 0.01286191399959613, 'alias': ' - xstorycloze_sw'}, 'xstorycloze_te': {'acc,none': 0.5387160820648577, 'acc_stderr,none': 0.012828493353271539, 'alias': ' - xstorycloze_te'}, 'xstorycloze_zh': {'acc,none': 0.6333553937789543, 'acc_stderr,none': 0.012401034429990705, 'alias': ' - xstorycloze_zh'}}, 'groups': {'anli': {'acc,none': 0.3803125, 'acc_stderr,none': 0.015133650384246593, 'alias': 'anli'}, 'lambada_cloze': {'perplexity,none': 92.9621493847383, 'perplexity_stderr,none': 4.749192287461508, 'acc,none': 0.08354356685425965, 'acc_stderr,none': 0.017359537873426072, 'alias': 'lambada_cloze'}, 'glue': {'acc,none': 0.5149773701762745, 'acc_stderr,none': 0.0011586493887209115, 'f1,none': 0.3490504972495486, 'f1_stderr,none': 0.0013384442216884647, 'mcc,none': -0.04847021005996873, 'mcc_stderr,none': 0.030783455837743674, 'alias': 'glue'}, 'blimp': {'acc,none': 0.8289253731343283, 'acc_stderr,none': 0.1651542013152315, 'alias': ' - blimp'}, 'xcopa': {'acc,none': 0.5594545454545454, 'acc_stderr,none': 0.05387910421610255, 'alias': 'xcopa'}, 'crows_pairs': {'likelihood_diff,none': 4.039303237651172, 'likelihood_diff_stderr,none': 0.4554088930609172, 'pct_stereotype,none': 0.5945140131186643, 'pct_stereotype_stderr,none': 0.08619149455497793, 'alias': 'crows_pairs'}, 'xnli': {'acc,none': 0.43052208835341366, 'acc_stderr,none': 0.05866526647573086, 'alias': 'xnli'}, 'truthfulqa': {'acc,none': 0.35372429452780935, 'acc_stderr,none': 0.0015289998228574559, 'bleu_max,none': 30.506553066296135, 'bleu_max_stderr,none': 0.8545048499620589, 'bleu_acc,none': 0.41370869033047736, 'bleu_acc_stderr,none': 0.0172408618120998, 'bleu_diff,none': -1.4792658300937298, 'bleu_diff_stderr,none': 1.0365996246291096, 'rouge1_max,none': 55.752927992652076, 'rouge1_max_stderr,none': 0.9304517781051922, 'rouge1_acc,none': 0.397796817625459, 'rouge1_acc_stderr,none': 0.017133934248559652, 'rouge1_diff,none': -1.916659079286129, 'rouge1_diff_stderr,none': 1.2644440329641677, 'rouge2_max,none': 40.96291082037781, 'rouge2_max_stderr,none': 1.1007195429525476, 'rouge2_acc,none': 0.3537331701346389, 'rouge2_acc_stderr,none': 0.016737814358846147, 'rouge2_diff,none': -2.594294632926603, 'rouge2_diff_stderr,none': 1.4182491490828897, 'rougeL_max,none': 53.07319351290984, 'rougeL_max_stderr,none': 0.9465416042598798, 'rougeL_acc,none': 0.40269277845777235, 'rougeL_acc_stderr,none': 0.017168830935187215, 'rougeL_diff,none': -2.232349573503825, 'rougeL_diff_stderr,none': 1.2801381361084667, 'alias': 'truthfulqa'}, 'ceval-valid': {'acc,none': 0.40713224368499257, 'acc_stderr,none': 0.14694388399894986, 'acc_norm,none': 0.40713224368499257, 'acc_norm_stderr,none': 0.14694388399894986, 'alias': 'ceval-valid'}, 'freebase': {'exact_match,none': 0.15403543307086615, 'exact_match_stderr,none': 0.008009980186286517, 'alias': 'freebase'}, 'cmmlu': {'acc,none': 0.3979450872042825, 'acc_stderr,none': 0.09095017592357733, 'acc_norm,none': 0.3979450872042825, 'acc_norm_stderr,none': 0.09095017592357733, 'alias': 'cmmlu'}, 'lambada': {'perplexity,none': 3.477999261370286, 'perplexity_stderr,none': 0.16329169211004324, 'acc,none': 0.7254997089074325, 'acc_stderr,none': 0.01676361499326025, 'alias': 'lambada'}, 'sycophancy': {'acc,none': 0.803766929553093, 'acc_stderr,none': 0.08597393979979331, 'alias': 'sycophancy'}, 'qa4mre': {'acc,none': 0.4875886524822695, 'acc_stderr,none': 0.04415178467483081, 'acc_norm,none': 0.5372340425531915, 'acc_norm_stderr,none': 0.07588716246805031, 'alias': 'qa4mre'}, 'ai2_arc': {'acc,none': 0.7065952649379932, 'acc_stderr,none': 0.09736868212773775, 'acc_norm,none': 0.7130777903043969, 'acc_norm_stderr,none': 0.08088060213614173, 'alias': ' - ai2_arc'}, 'pawsx': {'acc,none': 0.4147857142857143, 'acc_stderr,none': 0.057291271279150895, 'alias': 'pawsx'}, 'multimedqa': {'alias': 'stem', 'acc,none': 0.5325762952448545, 'acc_stderr,none': 0.06628005425577563, 'acc_norm,none': 0.4918553467234212, 'acc_norm_stderr,none': 0.0001327699679693839}, 'kmmlu': {'acc,none': 0.33245740687265374, 'acc_stderr,none': 0.047791173115529544, 'acc_norm,none': 0.33245740687265374, 'acc_norm_stderr,none': 0.047791173115529544, 'alias': 'kmmlu'}, 'kobest': {'acc,none': 0.5507564130673098, 'acc_stderr,none': 0.05115523112210208, 'f1,none': 0.4791948665252917, 'f1_stderr,none': 'N/A', 'acc_norm,none': 0.518, 'acc_norm_stderr,none': 0.0005003527054108269, 'alias': 'kobest'}, 'arithmetic': {'acc,none': 0.90035, 'acc_stderr,none': 0.10511691499262178, 'alias': 'arithmetic'}, 'lambada_multilingual': {'perplexity,none': 27.056756454439657, 'perplexity_stderr,none': 10.55570565115782, 'acc,none': 0.5186493304870949, 'acc_stderr,none': 0.09575650539776685, 'alias': 'lambada_multilingual'}, 'pythia': {'acc,none': 0.7809920153879631, 'acc_stderr,none': 0.15963097565490927, 'acc_norm,none': 0.7148213731718109, 'acc_norm_stderr,none': 0.0082806592848003, 'word_perplexity,none': 17.952971816297133, 'word_perplexity_stderr,none': 'N/A', 'byte_perplexity,none': 1.7160493234446637, 'byte_perplexity_stderr,none': 'N/A', 'bits_per_byte,none': 0.7790910200090022, 'bits_per_byte_stderr,none': 'N/A', 'perplexity,none': 3.180029723439131, 'perplexity_stderr,none': 0.05830830767398106, 'alias': 'pythia'}, 'mmlu': {'acc,none': 0.596425010682239, 'acc_stderr,none': 0.13844007438448744, 'alias': 'mmlu'}, 'mmlu_humanities': {'alias': ' - humanities', 'acc,none': 0.5343251859723698, 'acc_stderr,none': 0.16186401202777495}, 'mmlu_other': {'alias': ' - other', 'acc,none': 0.6816865143224976, 'acc_stderr,none': 0.09811125388055714}, 'mmlu_social_sciences': {'alias': ' - social_sciences', 'acc,none': 0.6958076048098798, 'acc_stderr,none': 0.08956129050566648}, 'mmlu_stem': {'alias': ' - stem', 'acc,none': 0.5080875356803045, 'acc_stderr,none': 0.1215476495130753}, 'xwinograd': {'acc,none': 0.8120926050797932, 'acc_stderr,none': 0.04148122639462828, 'alias': 'xwinograd'}, 'xstorycloze': {'acc,none': 0.5916611515552614, 'acc_stderr,none': 0.07661315123253779, 'alias': 'xstorycloze'}}}}}\n"
     ]
    }
   ],
   "source": [
    "import json\n",
    "\n",
    "# Global result map if it's not set\n",
    "if 'global_result_map' not in globals():\n",
    "    global_result_map = {}\n",
    "\n",
    "#\n",
    "# Function to process the results.json file\n",
    "#\n",
    "def process_results_json(file_path):\n",
    "    with open(file_path) as f:\n",
    "        data = json.load(f)\n",
    "\n",
    "    # Model args, presplit by ','\n",
    "    model_args = data['config']['model_args'].split(',')\n",
    "\n",
    "    # Extract the pretrained value from config.model_args\n",
    "    modelname = model_args[0].split('=')[1]\n",
    "\n",
    "    # Opt array\n",
    "    confArgsArr = model_args[1:]\n",
    "\n",
    "    # Sort the opt array\n",
    "    confArgsArr.sort()\n",
    "    # Convert it to a string\n",
    "    confStr = ','.join(confArgsArr)\n",
    "\n",
    "    # Convert the option array of key=value strings to a dictionary\n",
    "    confObj = { }\n",
    "    for o in confArgsArr:\n",
    "        k, v = o.split('=')\n",
    "        confObj[k] = v\n",
    "    \n",
    "    # Create a dictionary to store the results, or use the existing one if it exists\n",
    "    if modelname in global_result_map:\n",
    "        modelObj = global_result_map[modelname]\n",
    "    else:\n",
    "        modelObj = {\n",
    "            'name': modelname,\n",
    "            'config': { }\n",
    "        }\n",
    "    \n",
    "    # Get the opt object for the model\n",
    "    if confStr in modelObj['config']:\n",
    "        confSet = modelObj['config'][confStr]\n",
    "    else:\n",
    "        confSet = {\n",
    "            'confStr': confStr,\n",
    "            'confObj': confObj,\n",
    "            'results': {},\n",
    "            'groups': {}\n",
    "        }\n",
    "\n",
    "    # Iterate over the results and extract the result object for each test/group\n",
    "    if 'results' in data:\n",
    "        for test, result in data['results'].items():\n",
    "            confSet['results'][test] = result\n",
    "    if 'groups' in data:\n",
    "        for test, result in data['groups'].items():\n",
    "            confSet['groups'][test] = result\n",
    "    \n",
    "    # Update the global result map object\n",
    "    modelObj['config'][confStr] = confSet\n",
    "    global_result_map[modelname] = modelObj\n",
    "    return modelObj\n",
    "\n",
    "# Lets test the function with the first results.json file\n",
    "first_result = process_results_json(results_json_files[0])\n",
    "print(f\"Processed example: \", first_result)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 148,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Found 108 models\n",
      "Models: \n",
      "['mistralai/Mistral-7B-v0.1', 'mosaicml/mpt-7b-instruct', 'mosaicml/mpt-7b', 'mosaicml/mpt-7b-chat', 'bigscience/bloom-7b1', 'bigscience/bloomz-7b1-mt', 'bigscience/bloomz-7b1', 'EleutherAI/pythia-2.8b', 'EleutherAI/pythia-1.4b', 'EleutherAI/gpt-j-6b', 'EleutherAI/pythia-6.9b', 'google/flan-t5-base', 'google/gemma-2b', 'google/gemma-2b-it', 'google/gemma-7b', 'google/gemma-7b-it', 'google/flan-t5-large', 'microsoft/phi-1_5', 'microsoft/phi-2', 'microsoft/phi-1', 'allenai/OLMo-7B', 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'TinyLlama/TinyLlama-1.1B-Chat-v1.0', 'RWKV/rwkv-5-world-1b5', 'RWKV/rwkv-5-world-3b', 'RWKV/rwkv-4-world-3b', 'RWKV/rwkv-4-world-1b5', 'RWKV/v5-Eagle-7B-HF', 'RWKV/rwkv-4-world-7b', 'aisingapore/sealion7b', 'aisingapore/sealion3b', './rwkv-x-dev/1_3-C5-rwkv-270_pth', './rwkv-x-dev/1_0_pth', './rwkv-x-dev/chunk4-0_85_pth', './rwkv-x-dev/1_3-C1-rwkv-340_pth', './rwkv-x-dev/chunk1-0_8_pth', './rwkv-x-dev/chunk0-0_8_pth', './rwkv-x-dev/blink4-final_pth', './rwkv-x-dev/chunk2-0_8_pth', './rwkv-x-dev/chunk3-0_8_pth', './rwkv-x-dev/r3-4k-test2-fix3-blink-final_pth', './rwkv-x-dev/R4-7B-15t-With-Mask_pth', './rwkv-x-dev/r3-testchunk-1-8_pth', './rwkv-x-dev/R4-with-shuffle-rwkv-53_pth', './rwkv-x-dev/chunk7-2-0_85_pth', './rwkv-x-dev/EagleX-1_7T_pth', './rwkv-x-dev/r3-testchunk2-blink-fixed_pth', './rwkv-x-dev/r3-testchunk2-blink_pth', './rwkv-x-dev/rwkv-230_pth', './rwkv-x-dev/1_3-C0-rwkv-60_pth', './rwkv-x-dev/chunk5-0_85_pth', './rwkv-x-dev/R4-7B-Base-No-Mask_pth', './rwkv-x-dev/RWKV-5-World-1B5-v2-20231025-ctx4096', './rwkv-x-dev/R4-1B5-No-Mask_pth', './rwkv-x-dev/RWKV-32K-5B-RW_pth', './rwkv-x-dev/R4-7B-15t-32k-No-Mask_pth', './rwkv-x-dev/1_3-C0-PRERUN-rwkv-60_pth', './rwkv-x-dev/EagleX_1-7T_Chat_pth', './rwkv-x-dev/1_3-C1-rwkv-390_pth', './rwkv-x-dev/1_3-C1-rwkv-20_pth', './rwkv-x-dev/chunk8-1-0_85_pth', './rwkv-x-dev/R4-7B-Base-32k-No-Mask_pth', './rwkv-x-dev/R4-no-shuffle-rwkv-53_pth', './rwkv-x-dev/1_3-C2-rwkv-648_pth', './rwkv-x-dev/1_3-C2-rwkv-250_pth', './rwkv-x-dev/r3-testchunk-1-8-no-cuda-with-warmup_pth', './rwkv-x-dev/1_3-C0-rwkv-140_pth', './rwkv-x-dev/Eagle-225-1FT', './rwkv-x-dev/r3-c1-8_pth', './rwkv-x-dev/1_3-C0-PRERUN-rwkv-450_pth', './rwkv-x-dev/RWKV-5-World-3B-v2-20231118-ctx16k', './rwkv-x-dev/1_3-C0-PREPRERUN-rwkv-40_pth', './rwkv-x-dev/RWKV-5-World-7B-v2-20240128-ctx4096', './rwkv-x-dev/R4-7B-15t-No-Mask_pth', './rwkv-x-dev/1_0-c1-290_pth', './rwkv-x-dev/R4-1B5-With-Mask_pth', './rwkv-x-dev/1_3-C0-PREPRERUN-rwkv-30_pth', './rwkv-x-dev/1_3-C0-rwkv-70_pth', './rwkv-x-dev/chunk6-0_85_pth', './rwkv-x-dev/R4-7B-Base-With-Mask_pth', 'rwkv-x-dev/v5-Eagle-7B-1_0T-HF', './rwkv-x-dev/1_3-C0-PRERUN-rwkv-30_pth', './rwkv-x-dev/chunk7-1-0_85_pth', './rwkv-x-dev/1_3-C1-rwkv-190_pth', './rwkv-x-dev/R4-7B-15t-extd-e3_pth', './rwkv-x-dev/r3-testchunk2_pth', './rwkv-x-dev/Hermes-RWKV-v5-7B_pth', './rwkv-x-dev/1_3-C0-rwkv-153_pth', './rwkv-x-dev/R4-7B-15t-extd-e2_pth', './rwkv-x-dev/r3-testchunk-blink_pth', 'SmerkyG/rwkv6-world-1b6', 'SmerkyG/rwkv6-world-3b', 'SmerkyG/rwkv5-world-7b', 'togethercomputer/RedPajama-INCITE-7B-Base', 'togethercomputer/RedPajama-INCITE-7B-Instruct', 'togethercomputer/RedPajama-INCITE-7B-Chat', 'facebook/opt-2.7b', 'facebook/opt-6.7b', 'facebook/opt-1.3b', 'tiiuae/falcon-7b-instruct', 'tiiuae/falcon-rw-1b', 'tiiuae/falcon-rw-7b', 'tiiuae/falcon-7b', 'huggyllama/llama-7b', 'meta-llama/Llama-2-7b-chat-hf', 'meta-llama/Llama-2-7b-hf', 'state-spaces/mamba-2.8b-hf', 'state-spaces/mamba-1.4b-hf']\n",
      "Saved to compiled-lm-eval-results.json\n"
     ]
    }
   ],
   "source": [
    "# Lets reset and reprocess all the results.json files\n",
    "global_result_map = {}\n",
    "\n",
    "# Process all the results.json files\n",
    "for file in results_json_files:\n",
    "    process_results_json(file)\n",
    "\n",
    "# Show high level list of models\n",
    "print(f\"Found {len(global_result_map)} models\")\n",
    "print(f\"Models: \\n{list(global_result_map.keys())}\")\n",
    "\n",
    "# Save the result map to a file\n",
    "with open('summary/compiled-lm-eval-results.json', 'w') as f:\n",
    "    json.dump(global_result_map, f, sort_keys=True, indent='\\t')\n",
    "\n",
    "# Echo that its been saved to json\n",
    "print(f\"Saved to compiled-lm-eval-results.json\")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Convert the results into CSV table formats"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 149,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>model</th>\n",
       "      <th>avg_acc</th>\n",
       "      <th>avg_acc_stderr</th>\n",
       "      <th>xcopa (acc)</th>\n",
       "      <th>xcopa (acc_stderr)</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>mistralai/Mistral-7B-v0.1</td>\n",
       "      <td>0.559455</td>\n",
       "      <td>0.053879</td>\n",
       "      <td>0.559455</td>\n",
       "      <td>0.053879</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>mosaicml/mpt-7b-instruct</td>\n",
       "      <td>0.537091</td>\n",
       "      <td>0.041919</td>\n",
       "      <td>0.537091</td>\n",
       "      <td>0.041919</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>mosaicml/mpt-7b</td>\n",
       "      <td>0.536000</td>\n",
       "      <td>0.042339</td>\n",
       "      <td>0.536000</td>\n",
       "      <td>0.042339</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>mosaicml/mpt-7b-chat</td>\n",
       "      <td>0.538000</td>\n",
       "      <td>0.047059</td>\n",
       "      <td>0.538000</td>\n",
       "      <td>0.047059</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>bigscience/bloom-7b1</td>\n",
       "      <td>0.570909</td>\n",
       "      <td>0.061359</td>\n",
       "      <td>0.570909</td>\n",
       "      <td>0.061359</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>bigscience/bloomz-7b1-mt</td>\n",
       "      <td>0.546000</td>\n",
       "      <td>0.038321</td>\n",
       "      <td>0.546000</td>\n",
       "      <td>0.038321</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>bigscience/bloomz-7b1</td>\n",
       "      <td>0.547818</td>\n",
       "      <td>0.038920</td>\n",
       "      <td>0.547818</td>\n",
       "      <td>0.038920</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>EleutherAI/pythia-2.8b</td>\n",
       "      <td>0.537455</td>\n",
       "      <td>0.026941</td>\n",
       "      <td>0.537455</td>\n",
       "      <td>0.026941</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>EleutherAI/pythia-1.4b</td>\n",
       "      <td>0.526545</td>\n",
       "      <td>0.027441</td>\n",
       "      <td>0.526545</td>\n",
       "      <td>0.027441</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>EleutherAI/gpt-j-6b</td>\n",
       "      <td>0.544182</td>\n",
       "      <td>0.034404</td>\n",
       "      <td>0.544182</td>\n",
       "      <td>0.034404</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>EleutherAI/pythia-6.9b</td>\n",
       "      <td>0.540545</td>\n",
       "      <td>0.029689</td>\n",
       "      <td>0.540545</td>\n",
       "      <td>0.029689</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>google/flan-t5-base</td>\n",
       "      <td>0.510909</td>\n",
       "      <td>0.006743</td>\n",
       "      <td>0.510909</td>\n",
       "      <td>0.006743</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>google/gemma-2b</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>google/gemma-2b-it</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>google/gemma-7b</td>\n",
       "      <td>0.517636</td>\n",
       "      <td>0.006740</td>\n",
       "      <td>0.517636</td>\n",
       "      <td>0.006740</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>google/gemma-7b-it</td>\n",
       "      <td>0.517455</td>\n",
       "      <td>0.006742</td>\n",
       "      <td>0.517455</td>\n",
       "      <td>0.006742</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>google/flan-t5-large</td>\n",
       "      <td>0.510545</td>\n",
       "      <td>0.006743</td>\n",
       "      <td>0.510545</td>\n",
       "      <td>0.006743</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>microsoft/phi-1_5</td>\n",
       "      <td>0.521636</td>\n",
       "      <td>0.026198</td>\n",
       "      <td>0.521636</td>\n",
       "      <td>0.026198</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>microsoft/phi-2</td>\n",
       "      <td>0.512182</td>\n",
       "      <td>0.029742</td>\n",
       "      <td>0.512182</td>\n",
       "      <td>0.029742</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>microsoft/phi-1</td>\n",
       "      <td>0.517636</td>\n",
       "      <td>0.029612</td>\n",
       "      <td>0.517636</td>\n",
       "      <td>0.029612</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>allenai/OLMo-7B</td>\n",
       "      <td>0.537818</td>\n",
       "      <td>0.034147</td>\n",
       "      <td>0.537818</td>\n",
       "      <td>0.034147</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>TinyLlama/TinyLlama-1.1B-intermediate-step-143...</td>\n",
       "      <td>0.529273</td>\n",
       "      <td>0.029316</td>\n",
       "      <td>0.529273</td>\n",
       "      <td>0.029316</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>TinyLlama/TinyLlama-1.1B-Chat-v1.0</td>\n",
       "      <td>0.528909</td>\n",
       "      <td>0.031702</td>\n",
       "      <td>0.528909</td>\n",
       "      <td>0.031702</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>RWKV/rwkv-5-world-1b5</td>\n",
       "      <td>0.578909</td>\n",
       "      <td>0.044635</td>\n",
       "      <td>0.578909</td>\n",
       "      <td>0.044635</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>RWKV/rwkv-5-world-3b</td>\n",
       "      <td>0.590000</td>\n",
       "      <td>0.057252</td>\n",
       "      <td>0.590000</td>\n",
       "      <td>0.057252</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>RWKV/rwkv-4-world-3b</td>\n",
       "      <td>0.575455</td>\n",
       "      <td>0.040977</td>\n",
       "      <td>0.575455</td>\n",
       "      <td>0.040977</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>RWKV/rwkv-4-world-1b5</td>\n",
       "      <td>0.554000</td>\n",
       "      <td>0.039406</td>\n",
       "      <td>0.554000</td>\n",
       "      <td>0.039406</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>RWKV/v5-Eagle-7B-HF</td>\n",
       "      <td>0.622364</td>\n",
       "      <td>0.070563</td>\n",
       "      <td>0.622364</td>\n",
       "      <td>0.070563</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>RWKV/rwkv-4-world-7b</td>\n",
       "      <td>0.601455</td>\n",
       "      <td>0.053116</td>\n",
       "      <td>0.601455</td>\n",
       "      <td>0.053116</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>aisingapore/sealion7b</td>\n",
       "      <td>0.559818</td>\n",
       "      <td>0.060680</td>\n",
       "      <td>0.559818</td>\n",
       "      <td>0.060680</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>30</th>\n",
       "      <td>aisingapore/sealion3b</td>\n",
       "      <td>0.559273</td>\n",
       "      <td>0.054490</td>\n",
       "      <td>0.559273</td>\n",
       "      <td>0.054490</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>31</th>\n",
       "      <td>rwkv-x-dev/v5-Eagle-7B-1_0T-HF</td>\n",
       "      <td>0.622364</td>\n",
       "      <td>0.072168</td>\n",
       "      <td>0.622364</td>\n",
       "      <td>0.072168</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>32</th>\n",
       "      <td>SmerkyG/rwkv6-world-1b6</td>\n",
       "      <td>0.579636</td>\n",
       "      <td>0.052056</td>\n",
       "      <td>0.579636</td>\n",
       "      <td>0.052056</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>33</th>\n",
       "      <td>SmerkyG/rwkv6-world-3b</td>\n",
       "      <td>0.595273</td>\n",
       "      <td>0.061039</td>\n",
       "      <td>0.595273</td>\n",
       "      <td>0.061039</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>34</th>\n",
       "      <td>SmerkyG/rwkv5-world-7b</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>35</th>\n",
       "      <td>togethercomputer/RedPajama-INCITE-7B-Base</td>\n",
       "      <td>0.525455</td>\n",
       "      <td>0.036407</td>\n",
       "      <td>0.525455</td>\n",
       "      <td>0.036407</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>36</th>\n",
       "      <td>togethercomputer/RedPajama-INCITE-7B-Instruct</td>\n",
       "      <td>0.528545</td>\n",
       "      <td>0.036470</td>\n",
       "      <td>0.528545</td>\n",
       "      <td>0.036470</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>37</th>\n",
       "      <td>togethercomputer/RedPajama-INCITE-7B-Chat</td>\n",
       "      <td>0.535455</td>\n",
       "      <td>0.038723</td>\n",
       "      <td>0.535455</td>\n",
       "      <td>0.038723</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>38</th>\n",
       "      <td>facebook/opt-2.7b</td>\n",
       "      <td>0.521818</td>\n",
       "      <td>0.029821</td>\n",
       "      <td>0.521818</td>\n",
       "      <td>0.029821</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>39</th>\n",
       "      <td>facebook/opt-6.7b</td>\n",
       "      <td>0.522909</td>\n",
       "      <td>0.027216</td>\n",
       "      <td>0.522909</td>\n",
       "      <td>0.027216</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>40</th>\n",
       "      <td>facebook/opt-1.3b</td>\n",
       "      <td>0.521818</td>\n",
       "      <td>0.029112</td>\n",
       "      <td>0.521818</td>\n",
       "      <td>0.029112</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>41</th>\n",
       "      <td>tiiuae/falcon-7b-instruct</td>\n",
       "      <td>0.536727</td>\n",
       "      <td>0.053430</td>\n",
       "      <td>0.536727</td>\n",
       "      <td>0.053430</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>42</th>\n",
       "      <td>tiiuae/falcon-rw-1b</td>\n",
       "      <td>0.522545</td>\n",
       "      <td>0.029446</td>\n",
       "      <td>0.522545</td>\n",
       "      <td>0.029446</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>43</th>\n",
       "      <td>tiiuae/falcon-rw-7b</td>\n",
       "      <td>0.535818</td>\n",
       "      <td>0.033185</td>\n",
       "      <td>0.535818</td>\n",
       "      <td>0.033185</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>44</th>\n",
       "      <td>tiiuae/falcon-7b</td>\n",
       "      <td>0.559636</td>\n",
       "      <td>0.071650</td>\n",
       "      <td>0.559636</td>\n",
       "      <td>0.071650</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>45</th>\n",
       "      <td>huggyllama/llama-7b</td>\n",
       "      <td>0.541818</td>\n",
       "      <td>0.040718</td>\n",
       "      <td>0.541818</td>\n",
       "      <td>0.040718</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>46</th>\n",
       "      <td>meta-llama/Llama-2-7b-chat-hf</td>\n",
       "      <td>0.559818</td>\n",
       "      <td>0.054954</td>\n",
       "      <td>0.559818</td>\n",
       "      <td>0.054954</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>47</th>\n",
       "      <td>meta-llama/Llama-2-7b-hf</td>\n",
       "      <td>0.566727</td>\n",
       "      <td>0.052515</td>\n",
       "      <td>0.566727</td>\n",
       "      <td>0.052515</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>48</th>\n",
       "      <td>state-spaces/mamba-2.8b-hf</td>\n",
       "      <td>0.552909</td>\n",
       "      <td>0.035570</td>\n",
       "      <td>0.552909</td>\n",
       "      <td>0.035570</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>49</th>\n",
       "      <td>state-spaces/mamba-1.4b-hf</td>\n",
       "      <td>0.544182</td>\n",
       "      <td>0.031390</td>\n",
       "      <td>0.544182</td>\n",
       "      <td>0.031390</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                                                model   avg_acc  \\\n",
       "0                           mistralai/Mistral-7B-v0.1  0.559455   \n",
       "1                            mosaicml/mpt-7b-instruct  0.537091   \n",
       "2                                     mosaicml/mpt-7b  0.536000   \n",
       "3                                mosaicml/mpt-7b-chat  0.538000   \n",
       "4                                bigscience/bloom-7b1  0.570909   \n",
       "5                            bigscience/bloomz-7b1-mt  0.546000   \n",
       "6                               bigscience/bloomz-7b1  0.547818   \n",
       "7                              EleutherAI/pythia-2.8b  0.537455   \n",
       "8                              EleutherAI/pythia-1.4b  0.526545   \n",
       "9                                 EleutherAI/gpt-j-6b  0.544182   \n",
       "10                             EleutherAI/pythia-6.9b  0.540545   \n",
       "11                                google/flan-t5-base  0.510909   \n",
       "12                                    google/gemma-2b  0.000000   \n",
       "13                                 google/gemma-2b-it  0.000000   \n",
       "14                                    google/gemma-7b  0.517636   \n",
       "15                                 google/gemma-7b-it  0.517455   \n",
       "16                               google/flan-t5-large  0.510545   \n",
       "17                                  microsoft/phi-1_5  0.521636   \n",
       "18                                    microsoft/phi-2  0.512182   \n",
       "19                                    microsoft/phi-1  0.517636   \n",
       "20                                    allenai/OLMo-7B  0.537818   \n",
       "21  TinyLlama/TinyLlama-1.1B-intermediate-step-143...  0.529273   \n",
       "22                 TinyLlama/TinyLlama-1.1B-Chat-v1.0  0.528909   \n",
       "23                              RWKV/rwkv-5-world-1b5  0.578909   \n",
       "24                               RWKV/rwkv-5-world-3b  0.590000   \n",
       "25                               RWKV/rwkv-4-world-3b  0.575455   \n",
       "26                              RWKV/rwkv-4-world-1b5  0.554000   \n",
       "27                                RWKV/v5-Eagle-7B-HF  0.622364   \n",
       "28                               RWKV/rwkv-4-world-7b  0.601455   \n",
       "29                              aisingapore/sealion7b  0.559818   \n",
       "30                              aisingapore/sealion3b  0.559273   \n",
       "31                     rwkv-x-dev/v5-Eagle-7B-1_0T-HF  0.622364   \n",
       "32                            SmerkyG/rwkv6-world-1b6  0.579636   \n",
       "33                             SmerkyG/rwkv6-world-3b  0.595273   \n",
       "34                             SmerkyG/rwkv5-world-7b  0.000000   \n",
       "35          togethercomputer/RedPajama-INCITE-7B-Base  0.525455   \n",
       "36      togethercomputer/RedPajama-INCITE-7B-Instruct  0.528545   \n",
       "37          togethercomputer/RedPajama-INCITE-7B-Chat  0.535455   \n",
       "38                                  facebook/opt-2.7b  0.521818   \n",
       "39                                  facebook/opt-6.7b  0.522909   \n",
       "40                                  facebook/opt-1.3b  0.521818   \n",
       "41                          tiiuae/falcon-7b-instruct  0.536727   \n",
       "42                                tiiuae/falcon-rw-1b  0.522545   \n",
       "43                                tiiuae/falcon-rw-7b  0.535818   \n",
       "44                                   tiiuae/falcon-7b  0.559636   \n",
       "45                                huggyllama/llama-7b  0.541818   \n",
       "46                      meta-llama/Llama-2-7b-chat-hf  0.559818   \n",
       "47                           meta-llama/Llama-2-7b-hf  0.566727   \n",
       "48                         state-spaces/mamba-2.8b-hf  0.552909   \n",
       "49                         state-spaces/mamba-1.4b-hf  0.544182   \n",
       "\n",
       "    avg_acc_stderr  xcopa (acc)  xcopa (acc_stderr)  \n",
       "0         0.053879     0.559455            0.053879  \n",
       "1         0.041919     0.537091            0.041919  \n",
       "2         0.042339     0.536000            0.042339  \n",
       "3         0.047059     0.538000            0.047059  \n",
       "4         0.061359     0.570909            0.061359  \n",
       "5         0.038321     0.546000            0.038321  \n",
       "6         0.038920     0.547818            0.038920  \n",
       "7         0.026941     0.537455            0.026941  \n",
       "8         0.027441     0.526545            0.027441  \n",
       "9         0.034404     0.544182            0.034404  \n",
       "10        0.029689     0.540545            0.029689  \n",
       "11        0.006743     0.510909            0.006743  \n",
       "12        0.000000          NaN                 NaN  \n",
       "13        0.000000          NaN                 NaN  \n",
       "14        0.006740     0.517636            0.006740  \n",
       "15        0.006742     0.517455            0.006742  \n",
       "16        0.006743     0.510545            0.006743  \n",
       "17        0.026198     0.521636            0.026198  \n",
       "18        0.029742     0.512182            0.029742  \n",
       "19        0.029612     0.517636            0.029612  \n",
       "20        0.034147     0.537818            0.034147  \n",
       "21        0.029316     0.529273            0.029316  \n",
       "22        0.031702     0.528909            0.031702  \n",
       "23        0.044635     0.578909            0.044635  \n",
       "24        0.057252     0.590000            0.057252  \n",
       "25        0.040977     0.575455            0.040977  \n",
       "26        0.039406     0.554000            0.039406  \n",
       "27        0.070563     0.622364            0.070563  \n",
       "28        0.053116     0.601455            0.053116  \n",
       "29        0.060680     0.559818            0.060680  \n",
       "30        0.054490     0.559273            0.054490  \n",
       "31        0.072168     0.622364            0.072168  \n",
       "32        0.052056     0.579636            0.052056  \n",
       "33        0.061039     0.595273            0.061039  \n",
       "34        0.000000          NaN                 NaN  \n",
       "35        0.036407     0.525455            0.036407  \n",
       "36        0.036470     0.528545            0.036470  \n",
       "37        0.038723     0.535455            0.038723  \n",
       "38        0.029821     0.521818            0.029821  \n",
       "39        0.027216     0.522909            0.027216  \n",
       "40        0.029112     0.521818            0.029112  \n",
       "41        0.053430     0.536727            0.053430  \n",
       "42        0.029446     0.522545            0.029446  \n",
       "43        0.033185     0.535818            0.033185  \n",
       "44        0.071650     0.559636            0.071650  \n",
       "45        0.040718     0.541818            0.040718  \n",
       "46        0.054954     0.559818            0.054954  \n",
       "47        0.052515     0.566727            0.052515  \n",
       "48        0.035570     0.552909            0.035570  \n",
       "49        0.031390     0.544182            0.031390  "
      ]
     },
     "execution_count": 149,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# Lets convert this into a table, which we will display in this notebook, and save as a CSV\n",
    "import pandas as pd\n",
    "\n",
    "##################################################\n",
    "#\n",
    "#  Utility functions\n",
    "#\n",
    "##################################################\n",
    "\n",
    "# Check if the given name string, is within the list, including \"*\" wildcard\n",
    "def is_in_list(name, list):\n",
    "    for n in list:\n",
    "        if n[-1] == '*':\n",
    "            if name.startswith(n[:-1]):\n",
    "                return True\n",
    "        elif n == name:\n",
    "            return True\n",
    "    return False\n",
    "\n",
    "# Is in inclusion exclusion list pair\n",
    "def is_in_list_pair(name, inList, exList):\n",
    "    if not is_in_list(name, inList):\n",
    "        return False\n",
    "    if is_in_list(name, exList):\n",
    "        return False\n",
    "    return True\n",
    "\n",
    "# Prepare a single test/group result object\n",
    "# By applying common filtering and formatting changes\n",
    "def prepare_test_result(result):\n",
    "    # The reutrn object\n",
    "    ret = {}\n",
    "    # Iterate the result key/value\n",
    "    for k, v in result.items():\n",
    "        # Skip if its alias\n",
    "        if k == 'alias':\n",
    "            continue\n",
    "\n",
    "        # If the key ends with \",none\", drop the \",none\"\n",
    "        if k.endswith(',none'):\n",
    "            k = k[:-5]\n",
    "        \n",
    "        # Save the result\n",
    "        ret[k] = v\n",
    "    \n",
    "    # Return the result\n",
    "    return ret\n",
    "\n",
    "##################################################\n",
    "#\n",
    "#  Generate the result\n",
    "#\n",
    "##################################################\n",
    "\n",
    "# Create a list of rows for the table\n",
    "def generate_result_table(\n",
    "    inConfig = { \"dtype\": \"bfloat16\" },\n",
    "\n",
    "    # Models to include/exclude\n",
    "    inModels = [\"*\"],\n",
    "    exModels = [\"./rwkv-x-dev/*\", \"rwkv-x-dev\"],\n",
    "\n",
    "    # Results and groups to include\n",
    "    inResults = [],\n",
    "    inGroups = [\"*\"],\n",
    "\n",
    "    # Exclude results and groups, applied after inResults and inGroups\n",
    "    exResults = [],\n",
    "    exGroups = [],\n",
    "\n",
    "    # Sorted\n",
    "    sort = False,\n",
    "    simplified = False\n",
    "):\n",
    "    table_rows = []\n",
    "\n",
    "    # Iterate over the models\n",
    "    for model, modelObj in global_result_map.items():\n",
    "        # Skip if not in the inModels or exModels\n",
    "        if not is_in_list_pair(model, inModels, exModels):\n",
    "            continue\n",
    "\n",
    "        # Iterate over the configurations\n",
    "        for confStr, confSet in modelObj['config'].items():\n",
    "            # Get the confObj\n",
    "            confObj = confSet['confObj']\n",
    "\n",
    "            # Check if the inConfig, matches the confObj\n",
    "            if inConfig:\n",
    "                skip = False\n",
    "                for k, v in inConfig.items():\n",
    "                    if k not in confObj or confObj[k] != v:\n",
    "                        skip = True\n",
    "                        break\n",
    "                if skip:\n",
    "                    continue\n",
    "\n",
    "            # Create a row object\n",
    "            row = {\n",
    "                'model': model,\n",
    "                # 'config': confStr\n",
    "\n",
    "                \"avg_acc\": 0.0,\n",
    "                \"avg_acc_stderr\": 0.0,\n",
    "            }\n",
    "\n",
    "            # Total acc / acc_stderr\n",
    "            acc_total = 0.0\n",
    "            acc_count = 0\n",
    "            acc_stderr_total = 0.0\n",
    "            acc_stderr_count = 0\n",
    "\n",
    "            # Add the groups\n",
    "            for test, result in confSet['groups'].items():\n",
    "\n",
    "                # Skip if not in the inGroups or exGroups\n",
    "                if not is_in_list_pair(test, inGroups, exGroups):\n",
    "                    continue\n",
    "\n",
    "                # Filter the result obj\n",
    "                cleanResult = prepare_test_result(result)\n",
    "\n",
    "                # Add the result to the row, as seperate columns for each key\n",
    "                for k, v in cleanResult.items():\n",
    "                    if k == 'acc':\n",
    "                        acc_total += v\n",
    "                        acc_count += 1\n",
    "                    elif k == 'acc_stderr':\n",
    "                        acc_stderr_total += v\n",
    "                        acc_stderr_count += 1\n",
    "                    \n",
    "                    # For simplified, we only use acc and perplexity\n",
    "                    if simplified and k not in ['acc', 'perplexity']:\n",
    "                        continue\n",
    "\n",
    "                    # Save the value\n",
    "                    row[f\"{test} ({k})\"] = v\n",
    "\n",
    "            # Add the results\n",
    "            for test, result in confSet['results'].items():\n",
    "\n",
    "                # Skip if not in the inResults or exResults\n",
    "                if not is_in_list_pair(test, inResults, exResults):\n",
    "                    continue\n",
    "\n",
    "                # Filter the result obj\n",
    "                cleanResult = prepare_test_result(result)\n",
    "\n",
    "                # Add the result to the row, as seperate columns for each key\n",
    "                for k, v in cleanResult.items():\n",
    "                    if k == 'acc':\n",
    "                        acc_total += v\n",
    "                        acc_count += 1\n",
    "                    elif k == 'acc_stderr':\n",
    "                        acc_stderr_total += v\n",
    "                        acc_stderr_count += 1\n",
    "\n",
    "                    # For simplified, we only use acc and perplexity\n",
    "                    if simplified and k not in ['acc', 'perplexity']:\n",
    "                        continue\n",
    "\n",
    "                    # Save the value\n",
    "                    row[f\"{test} ({k})\"] = v\n",
    "                \n",
    "            # Add the avg acc and acc_stderr\n",
    "            if acc_count > 0:\n",
    "               row[\"avg_acc\"] = acc_total / acc_count\n",
    "            if acc_stderr_count > 0:\n",
    "                row[\"avg_acc_stderr\"] = acc_stderr_total / acc_stderr_count\n",
    "\n",
    "            # Append the row to the table\n",
    "            table_rows.append(row)\n",
    "\n",
    "    # Create a dataframe from the table rows\n",
    "    df = pd.DataFrame(table_rows)\n",
    "\n",
    "    # Sort by avg_acc\n",
    "    if sort:\n",
    "        df = df.sort_values(by='avg_acc', ascending=False)\n",
    "\n",
    "    # Show the dataframe\n",
    "    return df\n",
    "\n",
    "# Generate the dataframe\n",
    "df = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"xcopa\"], inResults=[] )\n",
    "\n",
    "# # Save the dataframe to a CSV file\n",
    "# df.to_csv('summary/compiled-lm-eval-results.csv', index=False)\n",
    "\n",
    "# Show results\n",
    "df\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 150,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "total 34400\n",
      "-rw-r--r--@ 1 picocreator  staff   1.2M Mar 23 00:42 bf16-all-results-and-groups.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   300K Mar 23 00:42 bf16-all-simplified-results-and-groups.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   300K Mar 23 00:42 bf16-all-sorted-results-and-groups.csv\n",
      "-rw-r--r--@ 1 picocreator  staff    76K Mar 23 00:42 bf16-eng-focus.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   1.1M Mar 23 00:42 bf16-eng-results.csv\n",
      "-rw-r--r--@ 1 picocreator  staff    91K Mar 23 00:42 bf16-eng-summary.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   113K Mar 23 00:42 bf16-multilang-results.csv\n",
      "-rw-r--r--@ 1 picocreator  staff    16K Mar 23 00:42 bf16-multilang-summary.csv\n",
      "-rw-r--r--@ 1 picocreator  staff    76K Mar 23 00:42 bf16-sorted-eng-focus.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   1.1M Mar 23 00:42 bf16-sorted-eng-results.csv\n",
      "-rw-r--r--@ 1 picocreator  staff    91K Mar 23 00:42 bf16-sorted-eng-summary.csv\n",
      "-rw-r--r--@ 1 picocreator  staff    16K Mar 23 00:42 bf16-sorted-multilang-summary.csv\n",
      "-rw-r--r--  1 picocreator  staff   8.7M Mar 23 00:42 compiled-lm-eval-results.json\n",
      "-rw-r--r--@ 1 picocreator  staff   347K Mar 23 00:42 rwkv-x-dev-bf16-sorted-eng-all.csv\n",
      "-rw-r--r--@ 1 picocreator  staff    25K Mar 23 00:42 rwkv-x-dev-bf16-sorted-eng-focus.csv\n",
      "-rw-r--r--@ 1 picocreator  staff    22K Mar 23 00:42 rwkv-x-dev-bf16-sorted-multilang-summary.csv\n"
     ]
    }
   ],
   "source": [
    "##################################################\n",
    "#\n",
    "#  Build the various subsets\n",
    "#\n",
    "##################################################\n",
    "\n",
    "# Overall results\n",
    "all_results = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"] )\n",
    "all_results.to_csv('summary/bf16-all-results-and-groups.csv', index=False)\n",
    "\n",
    "all_results = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"], simplified=True )\n",
    "all_results.to_csv('summary/bf16-all-simplified-results-and-groups.csv', index=False)\n",
    "\n",
    "all_results = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"], simplified=True, sort=True )\n",
    "all_results.to_csv('summary/bf16-all-sorted-results-and-groups.csv', index=False)\n",
    "\n",
    "# Multilang results\n",
    "multiLang_tTest = [\"xcopa_*\", \"xnli_*\", \"xstorycloze_*\", \"xwinograd_*\", \"lambada_openai_*\",     \"pawsx_*\"]\n",
    "multiLang_tGrps = [\"xcopa\",   \"xnli\",   \"xstorycloze\",   \"xwinograd\",   \"lambada_multilingual\", \"pawsx\"]\n",
    "# Both test and groups, merged into a single list\n",
    "multiLang_joint = multiLang_tTest + multiLang_tGrps\n",
    "\n",
    "multilang_grp = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=multiLang_tGrps, inResults=[] )\n",
    "multilang_test = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=multiLang_tGrps, inResults=multiLang_tTest )\n",
    "multilang_grp.to_csv('summary/bf16-multilang-summary.csv', index=False)\n",
    "multilang_test.to_csv('summary/bf16-multilang-results.csv', index=False)\n",
    "\n",
    "multilang_grp_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=multiLang_tGrps, inResults=[], sort=True )\n",
    "multilang_grp_sorted.to_csv('summary/bf16-sorted-multilang-summary.csv', index=False)\n",
    "\n",
    "# RWKV perf tracking\n",
    "rwkv_multilang_grp_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=multiLang_tGrps, inResults=[], exModels=[], inModels=[\"./rwkv-x-dev/*\", \"rwkv-x-dev/*\", \"RWKV/*\", \"meta-llama/Llama-2-7b*\", \"mistralai/Mistral-7B-v0.1\"], sort=True )\n",
    "rwkv_multilang_grp_sorted.to_csv('summary/rwkv-x-dev-bf16-sorted-multilang-summary.csv', index=False)\n",
    "\n",
    "# All other results\n",
    "eng_grp = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[], exGroups=multiLang_joint, exResults=multiLang_joint )\n",
    "eng_grp_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[], exGroups=multiLang_joint, exResults=multiLang_joint, sort=True )\n",
    "eng_test = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"], exGroups=multiLang_joint, exResults=multiLang_joint )\n",
    "eng_test_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"], exGroups=multiLang_joint, exResults=multiLang_joint, sort=True )\n",
    "\n",
    "eng_grp.to_csv('summary/bf16-eng-summary.csv', index=False)\n",
    "eng_test.to_csv('summary/bf16-eng-results.csv', index=False)\n",
    "eng_test_sorted.to_csv('summary/bf16-sorted-eng-results.csv', index=False)\n",
    "eng_grp_sorted.to_csv('summary/bf16-sorted-eng-summary.csv', index=False)\n",
    "\n",
    "# English focused subset\n",
    "eng_focus_mixed=[\"lambada_openai\", \"lambada_standard\", \"blimp\", \"piqa\", \"copa\", \"sciq\", \"truthfulqa\", \"pythia\"] #\"np_open\", \"cmmlu\", \"record\"\n",
    "eng_focus_tGrps=[\"anli\", \"glue\", \"mmlu\" ]\n",
    "eng_focus_tTest=[\"blimp\", \"arc_*\", \"logiqa\", \"winogrande\", \"openbookqa\", \"hellaswag\"]\n",
    "\n",
    "eng_focus_tGrps = eng_focus_tGrps + eng_focus_mixed\n",
    "eng_focus_tTest = eng_focus_tTest + eng_focus_mixed\n",
    "\n",
    "eng_focus = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=eng_focus_tGrps, inResults=eng_focus_tTest )\n",
    "eng_focus_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=eng_focus_tGrps, inResults=eng_focus_tTest, sort=True )\n",
    "eng_focus.to_csv('summary/bf16-eng-focus.csv', index=False)\n",
    "eng_focus_sorted.to_csv('summary/bf16-sorted-eng-focus.csv', index=False)\n",
    "\n",
    "# RWKV perf tracking\n",
    "rwkv_eng_focus_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=eng_focus_tGrps, inResults=eng_focus_tTest, exModels=[], inModels=[\"./rwkv-x-dev/*\", \"rwkv-x-dev/*\", \"RWKV/*\", \"meta-llama/Llama-2-7b*\", \"mistralai/Mistral-7B-v0.1\"], sort=True, simplified=True )\n",
    "rwkv_eng_focus_sorted.to_csv('summary/rwkv-x-dev-bf16-sorted-eng-focus.csv', index=False)\n",
    "\n",
    "# RWKV perf tracking\n",
    "rwkv_eng_all_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"], exModels=[], inModels=[\"./rwkv-x-dev/*\", \"rwkv-x-dev/*\", \"RWKV/*\", \"meta-llama/Llama-2-7b*\", \"mistralai/Mistral-7B-v0.1\"], sort=True, simplified=True )\n",
    "rwkv_eng_all_sorted.to_csv('summary/rwkv-x-dev-bf16-sorted-eng-all.csv', index=False)\n",
    "\n",
    "# # Overall results\n",
    "# rwkv_all_results = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"], inModels=[\"./rwkv-x-dev/*\", \"rwkv-x-dev/*\", \"RWKV/*\"], exModels=[], sort=True )\n",
    "# rwkv_all_results.to_csv('summary/rwkv-x-dev-bf16-all-results-and-groups.csv', index=False)\n",
    "\n",
    "# List the files\n",
    "!ls -lh summary"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}