xuanricheng commited on
Commit
f66813d
·
verified ·
1 Parent(s): 28b731e

Add results for amazon/MegaBeam-Mistral-7B-300k

Browse files
amazon/MegaBeam-Mistral-7B-300k/results_2024-12-27T15-27-22.261848.json ADDED
@@ -0,0 +1,177 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "amazon/MegaBeam-Mistral-7B-300k",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "harness-c_arc_challenge": {
9
+ "acc_norm": 41.47,
10
+ "acc_stderr": 0,
11
+ "c_arc_challenge_25shot_acc": 36.52,
12
+ "c_arc_challenge_25shot_acc_norm": 41.47
13
+ },
14
+ "harness-c_gsm8k": {
15
+ "acc": 14.78,
16
+ "acc_stderr": 0,
17
+ "c_gsm8k_5shot_acc": 14.78
18
+ },
19
+ "harness-c_hellaswag": {
20
+ "acc_norm": 48.59,
21
+ "acc_stderr": 0,
22
+ "c_hellaswag_10shot_acc": 37.94,
23
+ "c_hellaswag_10shot_acc_norm": 48.59
24
+ },
25
+ "harness-c-sem-v2": {
26
+ "acc": 40.2325,
27
+ "acc_stderr": 0,
28
+ "c_sem_v2-LLSRC_5shot_acc": 40.58,
29
+ "c_sem_v2-SLPWC_5shot_acc": 31.43,
30
+ "c_sem_v2-SLRFC_5shot_acc": 33.81,
31
+ "c_sem_v2-SLSRC_5shot_acc": 55.11,
32
+ "c_sem_v2-LLSRC_5shot_acc_norm": 40.58,
33
+ "c_sem_v2-SLPWC_5shot_acc_norm": 31.43,
34
+ "c_sem_v2-SLRFC_5shot_acc_norm": 33.81,
35
+ "c_sem_v2-SLSRC_5shot_acc_norm": 55.11
36
+ },
37
+ "harness-c_truthfulqa_mc": {
38
+ "mc2": 57.78,
39
+ "acc_stderr": 0,
40
+ "c_truthfulqa_mc_0shot_mc1": 36.11,
41
+ "c_truthfulqa_mc_0shot_mc2": 57.78
42
+ },
43
+ "harness-c_winogrande": {
44
+ "acc": 59.51,
45
+ "acc_stderr": 0,
46
+ "c_winogrande_0shot_acc": 59.51
47
+ },
48
+ "CLCC-H": {
49
+ "acc": 0.5494,
50
+ "acc_stderr": 0
51
+ },
52
+ "harness-cmmlu": {
53
+ "acc_norm": 40.59,
54
+ "acc_stderr": 0,
55
+ "cmmlu-anatomy_5shot_acc": 32.59,
56
+ "cmmlu_fullavg_5shot_acc": 40.59,
57
+ "cmmlu-virology_5shot_acc": 32.53,
58
+ "cmmlu-astronomy_5shot_acc": 37.5,
59
+ "cmmlu-marketing_5shot_acc": 63.68,
60
+ "cmmlu-nutrition_5shot_acc": 48.37,
61
+ "cmmlu-sociology_5shot_acc": 54.23,
62
+ "cmmlu-management_5shot_acc": 38.83,
63
+ "cmmlu-philosophy_5shot_acc": 39.87,
64
+ "cmmlu-prehistory_5shot_acc": 39.2,
65
+ "cmmlu-human_aging_5shot_acc": 43.95,
66
+ "cmmlu-econometrics_5shot_acc": 27.19,
67
+ "cmmlu-formal_logic_5shot_acc": 33.6,
68
+ "cmmlu-global_facts_5shot_acc": 31.0,
69
+ "cmmlu-jurisprudence_5shot_acc": 50.93,
70
+ "cmmlu-miscellaneous_5shot_acc": 42.78,
71
+ "cmmlu-moral_disputes_5shot_acc": 42.2,
72
+ "cmmlu-business_ethics_5shot_acc": 42.0,
73
+ "cmmlu-college_biology_5shot_acc": 32.64,
74
+ "cmmlu-college_physics_5shot_acc": 23.53,
75
+ "cmmlu-human_sexuality_5shot_acc": 43.51,
76
+ "cmmlu-moral_scenarios_5shot_acc": 26.03,
77
+ "cmmlu-world_religions_5shot_acc": 45.03,
78
+ "cmmlu-abstract_algebra_5shot_acc": 29.0,
79
+ "cmmlu-college_medicine_5shot_acc": 35.84,
80
+ "cmmlu-machine_learning_5shot_acc": 31.25,
81
+ "cmmlu-medical_genetics_5shot_acc": 46.0,
82
+ "cmmlu-professional_law_5shot_acc": 32.72,
83
+ "cmmlu-public_relations_5shot_acc": 48.18,
84
+ "cmmlu-security_studies_5shot_acc": 60.41,
85
+ "cmmlu-college_chemistry_5shot_acc": 30.0,
86
+ "cmmlu-computer_security_5shot_acc": 55.0,
87
+ "cmmlu-international_law_5shot_acc": 53.72,
88
+ "cmmlu-logical_fallacies_5shot_acc": 39.88,
89
+ "cmmlu-us_foreign_policy_5shot_acc": 58.0,
90
+ "cmmlu-clinical_knowledge_5shot_acc": 41.13,
91
+ "cmmlu-conceptual_physics_5shot_acc": 39.15,
92
+ "cmmlu-college_mathematics_5shot_acc": 36.0,
93
+ "cmmlu-high_school_biology_5shot_acc": 38.06,
94
+ "cmmlu-high_school_physics_5shot_acc": 32.45,
95
+ "cmmlu-high_school_chemistry_5shot_acc": 30.05,
96
+ "cmmlu-high_school_geography_5shot_acc": 45.96,
97
+ "cmmlu-professional_medicine_5shot_acc": 31.25,
98
+ "cmmlu-electrical_engineering_5shot_acc": 44.14,
99
+ "cmmlu-elementary_mathematics_5shot_acc": 31.75,
100
+ "cmmlu-high_school_psychology_5shot_acc": 45.14,
101
+ "cmmlu-high_school_statistics_5shot_acc": 31.48,
102
+ "cmmlu-high_school_us_history_5shot_acc": 50.49,
103
+ "cmmlu-high_school_mathematics_5shot_acc": 32.22,
104
+ "cmmlu-professional_accounting_5shot_acc": 32.62,
105
+ "cmmlu-professional_psychology_5shot_acc": 38.56,
106
+ "cmmlu-college_computer_science_5shot_acc": 34.0,
107
+ "cmmlu-high_school_world_history_5shot_acc": 57.81,
108
+ "cmmlu-high_school_macroeconomics_5shot_acc": 40.0,
109
+ "cmmlu-high_school_microeconomics_5shot_acc": 45.38,
110
+ "cmmlu-high_school_computer_science_5shot_acc": 47.0,
111
+ "cmmlu-high_school_european_history_5shot_acc": 49.7,
112
+ "cmmlu-high_school_government_and_politics_5shot_acc": 48.19,
113
+ "cmmlu-anatomy_5shot_acc_norm": 32.59,
114
+ "cmmlu_fullavg_5shot_acc_norm": 40.59,
115
+ "cmmlu-virology_5shot_acc_norm": 32.53,
116
+ "cmmlu-astronomy_5shot_acc_norm": 37.5,
117
+ "cmmlu-marketing_5shot_acc_norm": 63.68,
118
+ "cmmlu-nutrition_5shot_acc_norm": 48.37,
119
+ "cmmlu-sociology_5shot_acc_norm": 54.23,
120
+ "cmmlu-management_5shot_acc_norm": 38.83,
121
+ "cmmlu-philosophy_5shot_acc_norm": 39.87,
122
+ "cmmlu-prehistory_5shot_acc_norm": 39.2,
123
+ "cmmlu-human_aging_5shot_acc_norm": 43.95,
124
+ "cmmlu-econometrics_5shot_acc_norm": 27.19,
125
+ "cmmlu-formal_logic_5shot_acc_norm": 33.6,
126
+ "cmmlu-global_facts_5shot_acc_norm": 31.0,
127
+ "cmmlu-jurisprudence_5shot_acc_norm": 50.93,
128
+ "cmmlu-miscellaneous_5shot_acc_norm": 42.78,
129
+ "cmmlu-moral_disputes_5shot_acc_norm": 42.2,
130
+ "cmmlu-business_ethics_5shot_acc_norm": 42.0,
131
+ "cmmlu-college_biology_5shot_acc_norm": 32.64,
132
+ "cmmlu-college_physics_5shot_acc_norm": 23.53,
133
+ "cmmlu-human_sexuality_5shot_acc_norm": 43.51,
134
+ "cmmlu-moral_scenarios_5shot_acc_norm": 26.03,
135
+ "cmmlu-world_religions_5shot_acc_norm": 45.03,
136
+ "cmmlu-abstract_algebra_5shot_acc_norm": 29.0,
137
+ "cmmlu-college_medicine_5shot_acc_norm": 35.84,
138
+ "cmmlu-machine_learning_5shot_acc_norm": 31.25,
139
+ "cmmlu-medical_genetics_5shot_acc_norm": 46.0,
140
+ "cmmlu-professional_law_5shot_acc_norm": 32.72,
141
+ "cmmlu-public_relations_5shot_acc_norm": 48.18,
142
+ "cmmlu-security_studies_5shot_acc_norm": 60.41,
143
+ "cmmlu-college_chemistry_5shot_acc_norm": 30.0,
144
+ "cmmlu-computer_security_5shot_acc_norm": 55.0,
145
+ "cmmlu-international_law_5shot_acc_norm": 53.72,
146
+ "cmmlu-logical_fallacies_5shot_acc_norm": 39.88,
147
+ "cmmlu-us_foreign_policy_5shot_acc_norm": 58.0,
148
+ "cmmlu-clinical_knowledge_5shot_acc_norm": 41.13,
149
+ "cmmlu-conceptual_physics_5shot_acc_norm": 39.15,
150
+ "cmmlu-college_mathematics_5shot_acc_norm": 36.0,
151
+ "cmmlu-high_school_biology_5shot_acc_norm": 38.06,
152
+ "cmmlu-high_school_physics_5shot_acc_norm": 32.45,
153
+ "cmmlu-high_school_chemistry_5shot_acc_norm": 30.05,
154
+ "cmmlu-high_school_geography_5shot_acc_norm": 45.96,
155
+ "cmmlu-professional_medicine_5shot_acc_norm": 31.25,
156
+ "cmmlu-electrical_engineering_5shot_acc_norm": 44.14,
157
+ "cmmlu-elementary_mathematics_5shot_acc_norm": 31.75,
158
+ "cmmlu-high_school_psychology_5shot_acc_norm": 45.14,
159
+ "cmmlu-high_school_statistics_5shot_acc_norm": 31.48,
160
+ "cmmlu-high_school_us_history_5shot_acc_norm": 50.49,
161
+ "cmmlu-high_school_mathematics_5shot_acc_norm": 32.22,
162
+ "cmmlu-professional_accounting_5shot_acc_norm": 32.62,
163
+ "cmmlu-professional_psychology_5shot_acc_norm": 38.56,
164
+ "cmmlu-college_computer_science_5shot_acc_norm": 34.0,
165
+ "cmmlu-high_school_world_history_5shot_acc_norm": 57.81,
166
+ "cmmlu-high_school_macroeconomics_5shot_acc_norm": 40.0,
167
+ "cmmlu-high_school_microeconomics_5shot_acc_norm": 45.38,
168
+ "cmmlu-high_school_computer_science_5shot_acc_norm": 47.0,
169
+ "cmmlu-high_school_european_history_5shot_acc_norm": 49.7,
170
+ "cmmlu-high_school_government_and_politics_5shot_acc_norm": 48.19
171
+ }
172
+ },
173
+ "versions": {},
174
+ "config_tasks": {},
175
+ "summary_tasks": {},
176
+ "summary_general": {}
177
+ }