Isaak-Carter commited on
Commit
b147afc
1 Parent(s): f364909

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +386 -1
README.md CHANGED
@@ -73,4 +73,389 @@ pipeline = transformers.pipeline(
73
 
74
  outputs = pipeline(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
75
  print(outputs[0]["generated_text"])
76
- ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
 
74
  outputs = pipeline(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
75
  print(outputs[0]["generated_text"])
76
+ ```
77
+
78
+ # Evaluation results:
79
+
80
+ ```json
81
+ {
82
+ "all": {
83
+ "acc": 0.635008846776534,
84
+ "acc_stderr": 0.03244450973873997,
85
+ "acc_norm": 0.6365238167399629,
86
+ "acc_norm_stderr": 0.033101612504829854,
87
+ "mc1": 0.397796817625459,
88
+ "mc1_stderr": 0.017133934248559635,
89
+ "mc2": 0.5816259277988214,
90
+ "mc2_stderr": 0.01521267822060948
91
+ },
92
+ "harness|arc:challenge|25": {
93
+ "acc": 0.6220136518771331,
94
+ "acc_stderr": 0.0141696645203031,
95
+ "acc_norm": 0.6459044368600683,
96
+ "acc_norm_stderr": 0.013975454122756557
97
+ },
98
+ "harness|hellaswag|10": {
99
+ "acc": 0.6512646883091018,
100
+ "acc_stderr": 0.004755960559929163,
101
+ "acc_norm": 0.8397729535949015,
102
+ "acc_norm_stderr": 0.003660668242740655
103
+ },
104
+ "harness|hendrycksTest-abstract_algebra|5": {
105
+ "acc": 0.4,
106
+ "acc_stderr": 0.04923659639173309,
107
+ "acc_norm": 0.4,
108
+ "acc_norm_stderr": 0.04923659639173309
109
+ },
110
+ "harness|hendrycksTest-anatomy|5": {
111
+ "acc": 0.5703703703703704,
112
+ "acc_stderr": 0.042763494943765995,
113
+ "acc_norm": 0.5703703703703704,
114
+ "acc_norm_stderr": 0.042763494943765995
115
+ },
116
+ "harness|hendrycksTest-astronomy|5": {
117
+ "acc": 0.6842105263157895,
118
+ "acc_stderr": 0.0378272898086547,
119
+ "acc_norm": 0.6842105263157895,
120
+ "acc_norm_stderr": 0.0378272898086547
121
+ },
122
+ "harness|hendrycksTest-business_ethics|5": {
123
+ "acc": 0.58,
124
+ "acc_stderr": 0.049604496374885836,
125
+ "acc_norm": 0.58,
126
+ "acc_norm_stderr": 0.049604496374885836
127
+ },
128
+ "harness|hendrycksTest-clinical_knowledge|5": {
129
+ "acc": 0.6792452830188679,
130
+ "acc_stderr": 0.028727502957880267,
131
+ "acc_norm": 0.6792452830188679,
132
+ "acc_norm_stderr": 0.028727502957880267
133
+ },
134
+ "harness|hendrycksTest-college_biology|5": {
135
+ "acc": 0.7361111111111112,
136
+ "acc_stderr": 0.03685651095897532,
137
+ "acc_norm": 0.7361111111111112,
138
+ "acc_norm_stderr": 0.03685651095897532
139
+ },
140
+ "harness|hendrycksTest-college_chemistry|5": {
141
+ "acc": 0.54,
142
+ "acc_stderr": 0.05009082659620332,
143
+ "acc_norm": 0.54,
144
+ "acc_norm_stderr": 0.05009082659620332
145
+ },
146
+ "harness|hendrycksTest-college_computer_science|5": {
147
+ "acc": 0.51,
148
+ "acc_stderr": 0.05024183937956912,
149
+ "acc_norm": 0.51,
150
+ "acc_norm_stderr": 0.05024183937956912
151
+ },
152
+ "harness|hendrycksTest-college_mathematics|5": {
153
+ "acc": 0.29,
154
+ "acc_stderr": 0.04560480215720684,
155
+ "acc_norm": 0.29,
156
+ "acc_norm_stderr": 0.04560480215720684
157
+ },
158
+ "harness|hendrycksTest-college_medicine|5": {
159
+ "acc": 0.6416184971098265,
160
+ "acc_stderr": 0.036563436533531585,
161
+ "acc_norm": 0.6416184971098265,
162
+ "acc_norm_stderr": 0.036563436533531585
163
+ },
164
+ "harness|hendrycksTest-college_physics|5": {
165
+ "acc": 0.3235294117647059,
166
+ "acc_stderr": 0.04655010411319619,
167
+ "acc_norm": 0.3235294117647059,
168
+ "acc_norm_stderr": 0.04655010411319619
169
+ },
170
+ "harness|hendrycksTest-computer_security|5": {
171
+ "acc": 0.76,
172
+ "acc_stderr": 0.04292346959909283,
173
+ "acc_norm": 0.76,
174
+ "acc_norm_stderr": 0.04292346959909283
175
+ },
176
+ "harness|hendrycksTest-conceptual_physics|5": {
177
+ "acc": 0.5829787234042553,
178
+ "acc_stderr": 0.03223276266711712,
179
+ "acc_norm": 0.5829787234042553,
180
+ "acc_norm_stderr": 0.03223276266711712
181
+ },
182
+ "harness|hendrycksTest-econometrics|5": {
183
+ "acc": 0.4649122807017544,
184
+ "acc_stderr": 0.046920083813689104,
185
+ "acc_norm": 0.4649122807017544,
186
+ "acc_norm_stderr": 0.046920083813689104
187
+ },
188
+ "harness|hendrycksTest-electrical_engineering|5": {
189
+ "acc": 0.5517241379310345,
190
+ "acc_stderr": 0.04144311810878152,
191
+ "acc_norm": 0.5517241379310345,
192
+ "acc_norm_stderr": 0.04144311810878152
193
+ },
194
+ "harness|hendrycksTest-elementary_mathematics|5": {
195
+ "acc": 0.42063492063492064,
196
+ "acc_stderr": 0.025424835086924006,
197
+ "acc_norm": 0.42063492063492064,
198
+ "acc_norm_stderr": 0.025424835086924006
199
+ },
200
+ "harness|hendrycksTest-formal_logic|5": {
201
+ "acc": 0.4444444444444444,
202
+ "acc_stderr": 0.044444444444444495,
203
+ "acc_norm": 0.4444444444444444,
204
+ "acc_norm_stderr": 0.044444444444444495
205
+ },
206
+ "harness|hendrycksTest-global_facts|5": {
207
+ "acc": 0.44,
208
+ "acc_stderr": 0.04988876515698589,
209
+ "acc_norm": 0.44,
210
+ "acc_norm_stderr": 0.04988876515698589
211
+ },
212
+ "harness|hendrycksTest-high_school_biology|5": {
213
+ "acc": 0.7548387096774194,
214
+ "acc_stderr": 0.024472243840895525,
215
+ "acc_norm": 0.7548387096774194,
216
+ "acc_norm_stderr": 0.024472243840895525
217
+ },
218
+ "harness|hendrycksTest-high_school_chemistry|5": {
219
+ "acc": 0.5024630541871922,
220
+ "acc_stderr": 0.035179450386910616,
221
+ "acc_norm": 0.5024630541871922,
222
+ "acc_norm_stderr": 0.035179450386910616
223
+ },
224
+ "harness|hendrycksTest-high_school_computer_science|5": {
225
+ "acc": 0.66,
226
+ "acc_stderr": 0.04760952285695237,
227
+ "acc_norm": 0.66,
228
+ "acc_norm_stderr": 0.04760952285695237
229
+ },
230
+ "harness|hendrycksTest-high_school_european_history|5": {
231
+ "acc": 0.7818181818181819,
232
+ "acc_stderr": 0.03225078108306289,
233
+ "acc_norm": 0.7818181818181819,
234
+ "acc_norm_stderr": 0.03225078108306289
235
+ },
236
+ "harness|hendrycksTest-high_school_geography|5": {
237
+ "acc": 0.797979797979798,
238
+ "acc_stderr": 0.02860620428922988,
239
+ "acc_norm": 0.797979797979798,
240
+ "acc_norm_stderr": 0.02860620428922988
241
+ },
242
+ "harness|hendrycksTest-high_school_government_and_politics|5": {
243
+ "acc": 0.8756476683937824,
244
+ "acc_stderr": 0.023814477086593552,
245
+ "acc_norm": 0.8756476683937824,
246
+ "acc_norm_stderr": 0.023814477086593552
247
+ },
248
+ "harness|hendrycksTest-high_school_macroeconomics|5": {
249
+ "acc": 0.658974358974359,
250
+ "acc_stderr": 0.02403548967633509,
251
+ "acc_norm": 0.658974358974359,
252
+ "acc_norm_stderr": 0.02403548967633509
253
+ },
254
+ "harness|hendrycksTest-high_school_mathematics|5": {
255
+ "acc": 0.32592592592592595,
256
+ "acc_stderr": 0.02857834836547308,
257
+ "acc_norm": 0.32592592592592595,
258
+ "acc_norm_stderr": 0.02857834836547308
259
+ },
260
+ "harness|hendrycksTest-high_school_microeconomics|5": {
261
+ "acc": 0.6638655462184874,
262
+ "acc_stderr": 0.030684737115135363,
263
+ "acc_norm": 0.6638655462184874,
264
+ "acc_norm_stderr": 0.030684737115135363
265
+ },
266
+ "harness|hendrycksTest-high_school_physics|5": {
267
+ "acc": 0.304635761589404,
268
+ "acc_stderr": 0.03757949922943344,
269
+ "acc_norm": 0.304635761589404,
270
+ "acc_norm_stderr": 0.03757949922943344
271
+ },
272
+ "harness|hendrycksTest-high_school_psychology|5": {
273
+ "acc": 0.8238532110091743,
274
+ "acc_stderr": 0.016332882393431353,
275
+ "acc_norm": 0.8238532110091743,
276
+ "acc_norm_stderr": 0.016332882393431353
277
+ },
278
+ "harness|hendrycksTest-high_school_statistics|5": {
279
+ "acc": 0.5092592592592593,
280
+ "acc_stderr": 0.03409386946992699,
281
+ "acc_norm": 0.5092592592592593,
282
+ "acc_norm_stderr": 0.03409386946992699
283
+ },
284
+ "harness|hendrycksTest-high_school_us_history|5": {
285
+ "acc": 0.7990196078431373,
286
+ "acc_stderr": 0.02812597226565437,
287
+ "acc_norm": 0.7990196078431373,
288
+ "acc_norm_stderr": 0.02812597226565437
289
+ },
290
+ "harness|hendrycksTest-high_school_world_history|5": {
291
+ "acc": 0.759493670886076,
292
+ "acc_stderr": 0.027820781981149685,
293
+ "acc_norm": 0.759493670886076,
294
+ "acc_norm_stderr": 0.027820781981149685
295
+ },
296
+ "harness|hendrycksTest-human_aging|5": {
297
+ "acc": 0.6681614349775785,
298
+ "acc_stderr": 0.03160295143776679,
299
+ "acc_norm": 0.6681614349775785,
300
+ "acc_norm_stderr": 0.03160295143776679
301
+ },
302
+ "harness|hendrycksTest-human_sexuality|5": {
303
+ "acc": 0.7404580152671756,
304
+ "acc_stderr": 0.03844876139785271,
305
+ "acc_norm": 0.7404580152671756,
306
+ "acc_norm_stderr": 0.03844876139785271
307
+ },
308
+ "harness|hendrycksTest-international_law|5": {
309
+ "acc": 0.8016528925619835,
310
+ "acc_stderr": 0.036401182719909456,
311
+ "acc_norm": 0.8016528925619835,
312
+ "acc_norm_stderr": 0.036401182719909456
313
+ },
314
+ "harness|hendrycksTest-jurisprudence|5": {
315
+ "acc": 0.7777777777777778,
316
+ "acc_stderr": 0.040191074725573483,
317
+ "acc_norm": 0.7777777777777778,
318
+ "acc_norm_stderr": 0.040191074725573483
319
+ },
320
+ "harness|hendrycksTest-logical_fallacies|5": {
321
+ "acc": 0.754601226993865,
322
+ "acc_stderr": 0.03380939813943354,
323
+ "acc_norm": 0.754601226993865,
324
+ "acc_norm_stderr": 0.03380939813943354
325
+ },
326
+ "harness|hendrycksTest-machine_learning|5": {
327
+ "acc": 0.4732142857142857,
328
+ "acc_stderr": 0.047389751192741546,
329
+ "acc_norm": 0.4732142857142857,
330
+ "acc_norm_stderr": 0.047389751192741546
331
+ },
332
+ "harness|hendrycksTest-management|5": {
333
+ "acc": 0.7766990291262136,
334
+ "acc_stderr": 0.04123553189891431,
335
+ "acc_norm": 0.7766990291262136,
336
+ "acc_norm_stderr": 0.04123553189891431
337
+ },
338
+ "harness|hendrycksTest-marketing|5": {
339
+ "acc": 0.8632478632478633,
340
+ "acc_stderr": 0.022509033937077802,
341
+ "acc_norm": 0.8632478632478633,
342
+ "acc_norm_stderr": 0.022509033937077802
343
+ },
344
+ "harness|hendrycksTest-medical_genetics|5": {
345
+ "acc": 0.69,
346
+ "acc_stderr": 0.04648231987117316,
347
+ "acc_norm": 0.69,
348
+ "acc_norm_stderr": 0.04648231987117316
349
+ },
350
+ "harness|hendrycksTest-miscellaneous|5": {
351
+ "acc": 0.8173690932311622,
352
+ "acc_stderr": 0.013816335389973141,
353
+ "acc_norm": 0.8173690932311622,
354
+ "acc_norm_stderr": 0.013816335389973141
355
+ },
356
+ "harness|hendrycksTest-moral_disputes|5": {
357
+ "acc": 0.7254335260115607,
358
+ "acc_stderr": 0.02402774515526502,
359
+ "acc_norm": 0.7254335260115607,
360
+ "acc_norm_stderr": 0.02402774515526502
361
+ },
362
+ "harness|hendrycksTest-moral_scenarios|5": {
363
+ "acc": 0.27039106145251396,
364
+ "acc_stderr": 0.014854993938010071,
365
+ "acc_norm": 0.27039106145251396,
366
+ "acc_norm_stderr": 0.014854993938010071
367
+ },
368
+ "harness|hendrycksTest-nutrition|5": {
369
+ "acc": 0.7516339869281046,
370
+ "acc_stderr": 0.02473998135511359,
371
+ "acc_norm": 0.7516339869281046,
372
+ "acc_norm_stderr": 0.02473998135511359
373
+ },
374
+ "harness|hendrycksTest-philosophy|5": {
375
+ "acc": 0.7331189710610932,
376
+ "acc_stderr": 0.025122637608816653,
377
+ "acc_norm": 0.7331189710610932,
378
+ "acc_norm_stderr": 0.025122637608816653
379
+ },
380
+ "harness|hendrycksTest-prehistory|5": {
381
+ "acc": 0.7222222222222222,
382
+ "acc_stderr": 0.024922001168886324,
383
+ "acc_norm": 0.7222222222222222,
384
+ "acc_norm_stderr": 0.024922001168886324
385
+ },
386
+ "harness|hendrycksTest-professional_accounting|5": {
387
+ "acc": 0.46099290780141844,
388
+ "acc_stderr": 0.02973659252642444,
389
+ "acc_norm": 0.46099290780141844,
390
+ "acc_norm_stderr": 0.02973659252642444
391
+ },
392
+ "harness|hendrycksTest-professional_law|5": {
393
+ "acc": 0.4680573663624511,
394
+ "acc_stderr": 0.012744149704869647,
395
+ "acc_norm": 0.4680573663624511,
396
+ "acc_norm_stderr": 0.012744149704869647
397
+ },
398
+ "harness|hendrycksTest-professional_medicine|5": {
399
+ "acc": 0.6801470588235294,
400
+ "acc_stderr": 0.02833295951403121,
401
+ "acc_norm": 0.6801470588235294,
402
+ "acc_norm_stderr": 0.02833295951403121
403
+ },
404
+ "harness|hendrycksTest-professional_psychology|5": {
405
+ "acc": 0.6470588235294118,
406
+ "acc_stderr": 0.01933314202079716,
407
+ "acc_norm": 0.6470588235294118,
408
+ "acc_norm_stderr": 0.01933314202079716
409
+ },
410
+ "harness|hendrycksTest-public_relations|5": {
411
+ "acc": 0.6727272727272727,
412
+ "acc_stderr": 0.0449429086625209,
413
+ "acc_norm": 0.6727272727272727,
414
+ "acc_norm_stderr": 0.0449429086625209
415
+ },
416
+ "harness|hendrycksTest-security_studies|5": {
417
+ "acc": 0.6816326530612244,
418
+ "acc_stderr": 0.029822533793982062,
419
+ "acc_norm": 0.6816326530612244,
420
+ "acc_norm_stderr": 0.029822533793982062
421
+ },
422
+ "harness|hendrycksTest-sociology|5": {
423
+ "acc": 0.8507462686567164,
424
+ "acc_stderr": 0.025196929874827072,
425
+ "acc_norm": 0.8507462686567164,
426
+ "acc_norm_stderr": 0.025196929874827072
427
+ },
428
+ "harness|hendrycksTest-us_foreign_policy|5": {
429
+ "acc": 0.85,
430
+ "acc_stderr": 0.035887028128263734,
431
+ "acc_norm": 0.85,
432
+ "acc_norm_stderr": 0.035887028128263734
433
+ },
434
+ "harness|hendrycksTest-virology|5": {
435
+ "acc": 0.5180722891566265,
436
+ "acc_stderr": 0.03889951252827216,
437
+ "acc_norm": 0.5180722891566265,
438
+ "acc_norm_stderr": 0.03889951252827216
439
+ },
440
+ "harness|hendrycksTest-world_religions|5": {
441
+ "acc": 0.8362573099415205,
442
+ "acc_stderr": 0.028380919596145866,
443
+ "acc_norm": 0.8362573099415205,
444
+ "acc_norm_stderr": 0.028380919596145866
445
+ },
446
+ "harness|truthfulqa:mc|0": {
447
+ "mc1": 0.397796817625459,
448
+ "mc1_stderr": 0.017133934248559635,
449
+ "mc2": 0.5816259277988214,
450
+ "mc2_stderr": 0.01521267822060948
451
+ },
452
+ "harness|winogrande|5": {
453
+ "acc": 0.7963693764798737,
454
+ "acc_stderr": 0.011317798781626913
455
+ },
456
+ "harness|gsm8k|5": {
457
+ "acc": 0.5966641394996209,
458
+ "acc_stderr": 0.013512654781814702
459
+ }
460
+ }
461
+ ```