choco9966 commited on
Commit
d744075
1 Parent(s): 1752f03

Add results for 2023-10-19 08:44:42

Browse files
choco9966/Llama-2-7b-instruct-tuning/result_2023-10-19 08:44:42.json CHANGED
@@ -3,14 +3,14 @@
3
  "harness|ko_arc_challenge|25": {
4
  "acc": 0.2960750853242321,
5
  "acc_stderr": 0.013340916085246268,
6
- "acc_norm": 0.33276450511945393,
7
- "acc_norm_stderr": 0.013769863046192304
8
  },
9
  "harness|ko_hellaswag|10": {
10
- "acc": 0.3458474407488548,
11
- "acc_stderr": 0.004746716805735752,
12
- "acc_norm": 0.42471619199362676,
13
- "acc_norm_stderr": 0.00493289647246057
14
  },
15
  "harness|ko_mmlu_world_religions|5": {
16
  "acc": 0.4327485380116959,
@@ -37,10 +37,10 @@
37
  "acc_norm_stderr": 0.040247784019771124
38
  },
39
  "harness|ko_mmlu_abstract_algebra|5": {
40
- "acc": 0.38,
41
- "acc_stderr": 0.048783173121456316,
42
- "acc_norm": 0.38,
43
- "acc_norm_stderr": 0.048783173121456316
44
  },
45
  "harness|ko_mmlu_conceptual_physics|5": {
46
  "acc": 0.3659574468085106,
@@ -49,22 +49,22 @@
49
  "acc_norm_stderr": 0.03148955829745529
50
  },
51
  "harness|ko_mmlu_virology|5": {
52
- "acc": 0.2891566265060241,
53
- "acc_stderr": 0.03529486801511114,
54
- "acc_norm": 0.2891566265060241,
55
- "acc_norm_stderr": 0.03529486801511114
56
  },
57
  "harness|ko_mmlu_philosophy|5": {
58
- "acc": 0.3858520900321543,
59
- "acc_stderr": 0.02764814959975147,
60
- "acc_norm": 0.3858520900321543,
61
- "acc_norm_stderr": 0.02764814959975147
62
  },
63
  "harness|ko_mmlu_human_aging|5": {
64
- "acc": 0.34977578475336324,
65
- "acc_stderr": 0.03200736719484503,
66
- "acc_norm": 0.34977578475336324,
67
- "acc_norm_stderr": 0.03200736719484503
68
  },
69
  "harness|ko_mmlu_human_sexuality|5": {
70
  "acc": 0.45038167938931295,
@@ -73,16 +73,16 @@
73
  "acc_norm_stderr": 0.04363643698524779
74
  },
75
  "harness|ko_mmlu_medical_genetics|5": {
76
- "acc": 0.36,
77
- "acc_stderr": 0.04824181513244218,
78
- "acc_norm": 0.36,
79
- "acc_norm_stderr": 0.04824181513244218
80
  },
81
  "harness|ko_mmlu_high_school_geography|5": {
82
- "acc": 0.35353535353535354,
83
- "acc_stderr": 0.03406086723547153,
84
- "acc_norm": 0.35353535353535354,
85
- "acc_norm_stderr": 0.03406086723547153
86
  },
87
  "harness|ko_mmlu_electrical_engineering|5": {
88
  "acc": 0.3724137931034483,
@@ -97,16 +97,16 @@
97
  "acc_norm_stderr": 0.04092563958237654
98
  },
99
  "harness|ko_mmlu_high_school_microeconomics|5": {
100
- "acc": 0.3403361344537815,
101
- "acc_stderr": 0.030778057422931673,
102
- "acc_norm": 0.3403361344537815,
103
- "acc_norm_stderr": 0.030778057422931673
104
  },
105
  "harness|ko_mmlu_high_school_macroeconomics|5": {
106
- "acc": 0.31025641025641026,
107
- "acc_stderr": 0.023454674889404288,
108
- "acc_norm": 0.31025641025641026,
109
- "acc_norm_stderr": 0.023454674889404288
110
  },
111
  "harness|ko_mmlu_computer_security|5": {
112
  "acc": 0.37,
@@ -133,10 +133,10 @@
133
  "acc_norm_stderr": 0.03161856335358611
134
  },
135
  "harness|ko_mmlu_high_school_biology|5": {
136
- "acc": 0.3548387096774194,
137
- "acc_stderr": 0.02721888977330876,
138
- "acc_norm": 0.3548387096774194,
139
- "acc_norm_stderr": 0.02721888977330876
140
  },
141
  "harness|ko_mmlu_marketing|5": {
142
  "acc": 0.5128205128205128,
@@ -157,16 +157,16 @@
157
  "acc_norm_stderr": 0.04653429807913508
158
  },
159
  "harness|ko_mmlu_high_school_mathematics|5": {
160
- "acc": 0.29259259259259257,
161
- "acc_stderr": 0.027738969632176095,
162
- "acc_norm": 0.29259259259259257,
163
- "acc_norm_stderr": 0.027738969632176095
164
  },
165
  "harness|ko_mmlu_high_school_physics|5": {
166
- "acc": 0.2781456953642384,
167
- "acc_stderr": 0.03658603262763743,
168
- "acc_norm": 0.2781456953642384,
169
- "acc_norm_stderr": 0.03658603262763743
170
  },
171
  "harness|ko_mmlu_sociology|5": {
172
  "acc": 0.48258706467661694,
@@ -187,10 +187,10 @@
187
  "acc_norm_stderr": 0.0236369759961018
188
  },
189
  "harness|ko_mmlu_college_biology|5": {
190
- "acc": 0.3055555555555556,
191
- "acc_stderr": 0.03852084696008534,
192
- "acc_norm": 0.3055555555555556,
193
- "acc_norm_stderr": 0.03852084696008534
194
  },
195
  "harness|ko_mmlu_college_chemistry|5": {
196
  "acc": 0.31,
@@ -199,10 +199,10 @@
199
  "acc_norm_stderr": 0.04648231987117316
200
  },
201
  "harness|ko_mmlu_us_foreign_policy|5": {
202
- "acc": 0.44,
203
- "acc_stderr": 0.04988876515698589,
204
- "acc_norm": 0.44,
205
- "acc_norm_stderr": 0.04988876515698589
206
  },
207
  "harness|ko_mmlu_moral_disputes|5": {
208
  "acc": 0.3439306358381503,
@@ -217,16 +217,16 @@
217
  "acc_norm_stderr": 0.03731133519673893
218
  },
219
  "harness|ko_mmlu_prehistory|5": {
220
- "acc": 0.38580246913580246,
221
- "acc_stderr": 0.027085401226132143,
222
- "acc_norm": 0.38580246913580246,
223
- "acc_norm_stderr": 0.027085401226132143
224
  },
225
  "harness|ko_mmlu_college_mathematics|5": {
226
- "acc": 0.28,
227
- "acc_stderr": 0.04512608598542128,
228
- "acc_norm": 0.28,
229
- "acc_norm_stderr": 0.04512608598542128
230
  },
231
  "harness|ko_mmlu_high_school_government_and_politics|5": {
232
  "acc": 0.43523316062176165,
@@ -241,10 +241,10 @@
241
  "acc_norm_stderr": 0.041857744240220575
242
  },
243
  "harness|ko_mmlu_high_school_psychology|5": {
244
- "acc": 0.3376146788990826,
245
- "acc_stderr": 0.020275265986638903,
246
- "acc_norm": 0.3376146788990826,
247
- "acc_norm_stderr": 0.020275265986638903
248
  },
249
  "harness|ko_mmlu_formal_logic|5": {
250
  "acc": 0.30158730158730157,
@@ -253,16 +253,16 @@
253
  "acc_norm_stderr": 0.04104947269903394
254
  },
255
  "harness|ko_mmlu_nutrition|5": {
256
- "acc": 0.3888888888888889,
257
- "acc_stderr": 0.027914055510468008,
258
- "acc_norm": 0.3888888888888889,
259
- "acc_norm_stderr": 0.027914055510468008
260
  },
261
  "harness|ko_mmlu_business_ethics|5": {
262
- "acc": 0.47,
263
- "acc_stderr": 0.05016135580465919,
264
- "acc_norm": 0.47,
265
- "acc_norm_stderr": 0.05016135580465919
266
  },
267
  "harness|ko_mmlu_international_law|5": {
268
  "acc": 0.49586776859504134,
@@ -277,10 +277,10 @@
277
  "acc_norm_stderr": 0.03894734487013315
278
  },
279
  "harness|ko_mmlu_professional_psychology|5": {
280
- "acc": 0.29411764705882354,
281
- "acc_stderr": 0.018433427649401896,
282
- "acc_norm": 0.29411764705882354,
283
- "acc_norm_stderr": 0.018433427649401896
284
  },
285
  "harness|ko_mmlu_professional_accounting|5": {
286
  "acc": 0.2765957446808511,
@@ -337,10 +337,10 @@
337
  "acc_norm_stderr": 0.031052391937584356
338
  },
339
  "harness|ko_mmlu_professional_law|5": {
340
- "acc": 0.2633637548891786,
341
- "acc_stderr": 0.011249506403605284,
342
- "acc_norm": 0.2633637548891786,
343
- "acc_norm_stderr": 0.011249506403605284
344
  },
345
  "harness|ko_mmlu_high_school_us_history|5": {
346
  "acc": 0.31862745098039214,
@@ -355,16 +355,16 @@
355
  "acc_norm_stderr": 0.03793713171165634
356
  },
357
  "harness|ko_truthfulqa_mc|0": {
358
- "mc1": 0.3317013463892289,
359
- "mc1_stderr": 0.01648214881024146,
360
- "mc2": 0.5140886375597211,
361
- "mc2_stderr": 0.016082111072689104
362
  },
363
  "harness|ko_commongen_v2|2": {
364
  "acc": 0.25737898465171194,
365
  "acc_stderr": 0.015030899730346749,
366
- "acc_norm": 0.29161747343565525,
367
- "acc_norm_stderr": 0.01562627669007024
368
  }
369
  },
370
  "versions": {
 
3
  "harness|ko_arc_challenge|25": {
4
  "acc": 0.2960750853242321,
5
  "acc_stderr": 0.013340916085246268,
6
+ "acc_norm": 0.33361774744027306,
7
+ "acc_norm_stderr": 0.013778687054176534
8
  },
9
  "harness|ko_hellaswag|10": {
10
+ "acc": 0.34534953196574386,
11
+ "acc_stderr": 0.0047451035439012934,
12
+ "acc_norm": 0.4252141007767377,
13
+ "acc_norm_stderr": 0.004933650697000603
14
  },
15
  "harness|ko_mmlu_world_religions|5": {
16
  "acc": 0.4327485380116959,
 
37
  "acc_norm_stderr": 0.040247784019771124
38
  },
39
  "harness|ko_mmlu_abstract_algebra|5": {
40
+ "acc": 0.37,
41
+ "acc_stderr": 0.048523658709391,
42
+ "acc_norm": 0.37,
43
+ "acc_norm_stderr": 0.048523658709391
44
  },
45
  "harness|ko_mmlu_conceptual_physics|5": {
46
  "acc": 0.3659574468085106,
 
49
  "acc_norm_stderr": 0.03148955829745529
50
  },
51
  "harness|ko_mmlu_virology|5": {
52
+ "acc": 0.29518072289156627,
53
+ "acc_stderr": 0.03550920185689629,
54
+ "acc_norm": 0.29518072289156627,
55
+ "acc_norm_stderr": 0.03550920185689629
56
  },
57
  "harness|ko_mmlu_philosophy|5": {
58
+ "acc": 0.3890675241157556,
59
+ "acc_stderr": 0.027690337536485372,
60
+ "acc_norm": 0.3890675241157556,
61
+ "acc_norm_stderr": 0.027690337536485372
62
  },
63
  "harness|ko_mmlu_human_aging|5": {
64
+ "acc": 0.3452914798206278,
65
+ "acc_stderr": 0.03191100192835794,
66
+ "acc_norm": 0.3452914798206278,
67
+ "acc_norm_stderr": 0.03191100192835794
68
  },
69
  "harness|ko_mmlu_human_sexuality|5": {
70
  "acc": 0.45038167938931295,
 
73
  "acc_norm_stderr": 0.04363643698524779
74
  },
75
  "harness|ko_mmlu_medical_genetics|5": {
76
+ "acc": 0.35,
77
+ "acc_stderr": 0.04793724854411021,
78
+ "acc_norm": 0.35,
79
+ "acc_norm_stderr": 0.04793724854411021
80
  },
81
  "harness|ko_mmlu_high_school_geography|5": {
82
+ "acc": 0.3484848484848485,
83
+ "acc_stderr": 0.033948539651564025,
84
+ "acc_norm": 0.3484848484848485,
85
+ "acc_norm_stderr": 0.033948539651564025
86
  },
87
  "harness|ko_mmlu_electrical_engineering|5": {
88
  "acc": 0.3724137931034483,
 
97
  "acc_norm_stderr": 0.04092563958237654
98
  },
99
  "harness|ko_mmlu_high_school_microeconomics|5": {
100
+ "acc": 0.33613445378151263,
101
+ "acc_stderr": 0.030684737115135377,
102
+ "acc_norm": 0.33613445378151263,
103
+ "acc_norm_stderr": 0.030684737115135377
104
  },
105
  "harness|ko_mmlu_high_school_macroeconomics|5": {
106
+ "acc": 0.3128205128205128,
107
+ "acc_stderr": 0.023507579020645365,
108
+ "acc_norm": 0.3128205128205128,
109
+ "acc_norm_stderr": 0.023507579020645365
110
  },
111
  "harness|ko_mmlu_computer_security|5": {
112
  "acc": 0.37,
 
133
  "acc_norm_stderr": 0.03161856335358611
134
  },
135
  "harness|ko_mmlu_high_school_biology|5": {
136
+ "acc": 0.3580645161290323,
137
+ "acc_stderr": 0.027273890594300642,
138
+ "acc_norm": 0.3580645161290323,
139
+ "acc_norm_stderr": 0.027273890594300642
140
  },
141
  "harness|ko_mmlu_marketing|5": {
142
  "acc": 0.5128205128205128,
 
157
  "acc_norm_stderr": 0.04653429807913508
158
  },
159
  "harness|ko_mmlu_high_school_mathematics|5": {
160
+ "acc": 0.2962962962962963,
161
+ "acc_stderr": 0.027840811495871916,
162
+ "acc_norm": 0.2962962962962963,
163
+ "acc_norm_stderr": 0.027840811495871916
164
  },
165
  "harness|ko_mmlu_high_school_physics|5": {
166
+ "acc": 0.271523178807947,
167
+ "acc_stderr": 0.03631329803969653,
168
+ "acc_norm": 0.271523178807947,
169
+ "acc_norm_stderr": 0.03631329803969653
170
  },
171
  "harness|ko_mmlu_sociology|5": {
172
  "acc": 0.48258706467661694,
 
187
  "acc_norm_stderr": 0.0236369759961018
188
  },
189
  "harness|ko_mmlu_college_biology|5": {
190
+ "acc": 0.2986111111111111,
191
+ "acc_stderr": 0.03827052357950756,
192
+ "acc_norm": 0.2986111111111111,
193
+ "acc_norm_stderr": 0.03827052357950756
194
  },
195
  "harness|ko_mmlu_college_chemistry|5": {
196
  "acc": 0.31,
 
199
  "acc_norm_stderr": 0.04648231987117316
200
  },
201
  "harness|ko_mmlu_us_foreign_policy|5": {
202
+ "acc": 0.43,
203
+ "acc_stderr": 0.049756985195624284,
204
+ "acc_norm": 0.43,
205
+ "acc_norm_stderr": 0.049756985195624284
206
  },
207
  "harness|ko_mmlu_moral_disputes|5": {
208
  "acc": 0.3439306358381503,
 
217
  "acc_norm_stderr": 0.03731133519673893
218
  },
219
  "harness|ko_mmlu_prehistory|5": {
220
+ "acc": 0.38271604938271603,
221
+ "acc_stderr": 0.027044538138402616,
222
+ "acc_norm": 0.38271604938271603,
223
+ "acc_norm_stderr": 0.027044538138402616
224
  },
225
  "harness|ko_mmlu_college_mathematics|5": {
226
+ "acc": 0.27,
227
+ "acc_stderr": 0.044619604333847394,
228
+ "acc_norm": 0.27,
229
+ "acc_norm_stderr": 0.044619604333847394
230
  },
231
  "harness|ko_mmlu_high_school_government_and_politics|5": {
232
  "acc": 0.43523316062176165,
 
241
  "acc_norm_stderr": 0.041857744240220575
242
  },
243
  "harness|ko_mmlu_high_school_psychology|5": {
244
+ "acc": 0.3394495412844037,
245
+ "acc_stderr": 0.02030210934266235,
246
+ "acc_norm": 0.3394495412844037,
247
+ "acc_norm_stderr": 0.02030210934266235
248
  },
249
  "harness|ko_mmlu_formal_logic|5": {
250
  "acc": 0.30158730158730157,
 
253
  "acc_norm_stderr": 0.04104947269903394
254
  },
255
  "harness|ko_mmlu_nutrition|5": {
256
+ "acc": 0.3954248366013072,
257
+ "acc_stderr": 0.02799672318063145,
258
+ "acc_norm": 0.3954248366013072,
259
+ "acc_norm_stderr": 0.02799672318063145
260
  },
261
  "harness|ko_mmlu_business_ethics|5": {
262
+ "acc": 0.46,
263
+ "acc_stderr": 0.05009082659620332,
264
+ "acc_norm": 0.46,
265
+ "acc_norm_stderr": 0.05009082659620332
266
  },
267
  "harness|ko_mmlu_international_law|5": {
268
  "acc": 0.49586776859504134,
 
277
  "acc_norm_stderr": 0.03894734487013315
278
  },
279
  "harness|ko_mmlu_professional_psychology|5": {
280
+ "acc": 0.2957516339869281,
281
+ "acc_stderr": 0.018463154132632813,
282
+ "acc_norm": 0.2957516339869281,
283
+ "acc_norm_stderr": 0.018463154132632813
284
  },
285
  "harness|ko_mmlu_professional_accounting|5": {
286
  "acc": 0.2765957446808511,
 
337
  "acc_norm_stderr": 0.031052391937584356
338
  },
339
  "harness|ko_mmlu_professional_law|5": {
340
+ "acc": 0.2627118644067797,
341
+ "acc_stderr": 0.011240545514995669,
342
+ "acc_norm": 0.2627118644067797,
343
+ "acc_norm_stderr": 0.011240545514995669
344
  },
345
  "harness|ko_mmlu_high_school_us_history|5": {
346
  "acc": 0.31862745098039214,
 
355
  "acc_norm_stderr": 0.03793713171165634
356
  },
357
  "harness|ko_truthfulqa_mc|0": {
358
+ "mc1": 0.3329253365973072,
359
+ "mc1_stderr": 0.016497402382012055,
360
+ "mc2": 0.5140993490896929,
361
+ "mc2_stderr": 0.016082660027674764
362
  },
363
  "harness|ko_commongen_v2|2": {
364
  "acc": 0.25737898465171194,
365
  "acc_stderr": 0.015030899730346749,
366
+ "acc_norm": 0.29043683589138136,
367
+ "acc_norm_stderr": 0.015607602569814626
368
  }
369
  },
370
  "versions": {