model
stringlengths
4
89
revision
stringclasses
1 value
model_sha
stringlengths
0
40
results
dict
commit
stringlengths
40
40
date
unknown
score
float64
21.8
83
h2oai/h2ogpt-research-oasst1-llama-65b
main
a6d8676aaa2ca2c25ea99180b538f0369dc70185
{ "arc:challenge": 64.8, "hellaswag": 85.9, "hendrycksTest": 63.6, "truthfulqa:mc": 48.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
65.8
h2oai/h2ogpt-oig-oasst1-256-6_9b
main
f1c9bac89b74d3487cb092788ce828fb9520c1a7
{ "arc:challenge": 39.9, "hellaswag": 65.4, "hendrycksTest": 26.4, "truthfulqa:mc": 35 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
41.7
h2oai/h2ogpt-gm-oasst1-en-2048-open-llama-7b-preview-300bt
main
754e0c90ed5d9241fdfd5a188572b3ea2152eaa7
{ "arc:challenge": 34, "hellaswag": 50.5, "hendrycksTest": 24.7, "truthfulqa:mc": 41.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
37.8
h2oai/h2ogpt-research-oig-oasst1-512-30b
main
4215e83b9038c9e61d979cf5223b29f860bace9b
{ "arc:challenge": 59, "hellaswag": 82.6, "hendrycksTest": 50.7, "truthfulqa:mc": 48.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
60.2
h2oai/h2ogpt-gm-oasst1-en-1024-20b
main
1a5b8d25587eab67d837621a6c9423e7ef6df289
{ "arc:challenge": 48, "hellaswag": 72.8, "hendrycksTest": 26, "truthfulqa:mc": 39.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
46.7
h2oai/h2ogpt-oasst1-512-12b
main
c6bb0fe363e0105839d34ca757793b61c9606f95
{ "arc:challenge": 42.3, "hellaswag": 70.2, "hendrycksTest": 26, "truthfulqa:mc": 36.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
43.7
h2oai/h2ogpt-gm-oasst1-en-1024-12b
main
e547fffafb382fd39ef5de35ba3b5afc1b43e74d
{ "arc:challenge": 43.1, "hellaswag": 69.7, "hendrycksTest": 25.9, "truthfulqa:mc": 38 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
44.2
minlik/chinese-alpaca-33b-merged
main
fc2535104c0b48afc42575f9fe10bbcbb7612ec3
{ "arc:challenge": 59.3, "hellaswag": 78.4, "hendrycksTest": 57.7, "truthfulqa:mc": 52.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
62
xzuyn/Alpacino-SuperCOT-13B
main
3a82b04684fe99d59556421c3f96a187049a3cec
{ "arc:challenge": 58.4, "hellaswag": 81.7, "hendrycksTest": 47.9, "truthfulqa:mc": 45.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
58.4
xzuyn/MedicWizard-7B
main
0b3ef975fb5e8ac1eae775160ab54c98221889df
{ "arc:challenge": 53.5, "hellaswag": 78.4, "hendrycksTest": 44.6, "truthfulqa:mc": 41.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
54.4
teknium/OpenHermes-2.5-Mistral-7B
main
2a54cad766bc90828354db5c4199795aecfd0df1
{ "arc:challenge": 64.9, "hellaswag": 84.3, "hendrycksTest": 63.8, "truthfulqa:mc": 52.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
66.3
teknium/OpenHermes-2-Mistral-7B
main
843a9bb94fac7d7bfc1b7c9f201efba295b6f5d6
{ "arc:challenge": 63.1, "hellaswag": 83.8, "hendrycksTest": 63.5, "truthfulqa:mc": 50.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
65.2
teknium/OpenHermes-13B
main
f09d0fe655ad57cce9179b7b40ea6f81e07db18c
{ "arc:challenge": 59.8, "hellaswag": 82.2, "hendrycksTest": 56.3, "truthfulqa:mc": 46 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
61.1
teknium/CollectiveCognition-v1.1-Mistral-7B
main
5f57f70ec99450c70da2540e94dd7fd67be4b23c
{ "arc:challenge": 62.1, "hellaswag": 84.2, "hendrycksTest": 62.3, "truthfulqa:mc": 57.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
66.6
nathan0/mpt_delta_tuned_model_v2
main
6adb4cb4ba37f4ce9e9c3051d343addf1098182c
{ "arc:challenge": 50.7, "hellaswag": 76.4, "hendrycksTest": 28.7, "truthfulqa:mc": 35.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
47.8
nathan0/mpt_delta_tuned_model_v3
main
6adb4cb4ba37f4ce9e9c3051d343addf1098182c
{ "arc:challenge": 50.7, "hellaswag": 76.4, "hendrycksTest": 28.7, "truthfulqa:mc": 35.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
47.8
chargoddard/duplicitous-mammal-13b
main
a05d0562b8da2ac2e76aa65984e8063249bc85c8
{ "arc:challenge": 61.7, "hellaswag": 83.8, "hendrycksTest": 57.5, "truthfulqa:mc": 52.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
63.8
chargoddard/llama2-22b-blocktriangular
main
40a51343ae776b5cb39f2b4343ae8f9b676ffd58
{ "arc:challenge": 58.5, "hellaswag": 82.6, "hendrycksTest": 54.6, "truthfulqa:mc": 39.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
58.8
chargoddard/MelangeB-70b
main
08239fb1e30b1e42b14370f23e942bc51e76027c
{ "arc:challenge": 71.7, "hellaswag": 87.5, "hendrycksTest": 70, "truthfulqa:mc": 59.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
72.1
chargoddard/ypotryll-22b-epoch2-qlora
main
26fdd8fa420d72ed835c7d17086f0441db0985d4
{ "arc:challenge": 59.2, "hellaswag": 80.7, "hendrycksTest": 54.5, "truthfulqa:mc": 40.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
58.7
chargoddard/llama-2-26b-trenchcoat-stack
main
075d67c3223f4b379ab7f997c3787cd0630d80f7
{ "arc:challenge": 55, "hellaswag": 79.9, "hendrycksTest": 53.7, "truthfulqa:mc": 40.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
57.3
chargoddard/llama2-22b
main
2bece0787009b4b584f49d0e0d1b49ecf4a52da9
{ "arc:challenge": 58.5, "hellaswag": 82.6, "hendrycksTest": 54.7, "truthfulqa:mc": 39.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
58.9
chargoddard/platypus-2-22b-relora
main
15bca3e9b25cc2f280fec21686ef3bc445217503
{ "arc:challenge": 57.7, "hellaswag": 82.4, "hendrycksTest": 55.3, "truthfulqa:mc": 43.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.8
chargoddard/platypus2-22b-relora
main
15bca3e9b25cc2f280fec21686ef3bc445217503
{ "arc:challenge": 57.5, "hellaswag": 82.4, "hendrycksTest": 54.9, "truthfulqa:mc": 43.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.6
chargoddard/storytime-13b
main
233568319a636b6a7b02a4def2c51d08a3e0fbfc
{ "arc:challenge": 62, "hellaswag": 84, "hendrycksTest": 57.5, "truthfulqa:mc": 52.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
64
chargoddard/Chronorctypus-Limarobormes-13b
main
75c1bf5f4b40cf61873ff6487ccd3efc4f684330
{ "arc:challenge": 59.9, "hellaswag": 82.8, "hendrycksTest": 58.4, "truthfulqa:mc": 51.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
63.2
chargoddard/MelangeC-70b
main
e54a2b924dec135f3fa2373933ab8485178cde1b
{ "arc:challenge": 71.7, "hellaswag": 87.6, "hendrycksTest": 70.4, "truthfulqa:mc": 58.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
72
Open-Orca/Mistral-7B-SlimOrca
main
a9744d8cf9ce4230678a891bcf8bba7cbc0aaece
{ "arc:challenge": 62.5, "hellaswag": 83.9, "hendrycksTest": 62.8, "truthfulqa:mc": 54.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
65.8
Open-Orca/LlongOrca-13B-16k
main
8ea1fb205553cadbc90069d80a7e58281b6281c3
{ "arc:challenge": 62.5, "hellaswag": 82.8, "hendrycksTest": 55.5, "truthfulqa:mc": 50.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
62.7
Open-Orca/OpenOrca-Preview1-13B
main
4c558283a98348383460939afda9cb5c54544c8f
{ "arc:challenge": 54.9, "hellaswag": 78.2, "hendrycksTest": 50.1, "truthfulqa:mc": 49.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
58.1
Open-Orca/OpenOrca-Platypus2-13B
main
e7a40134f7eb687c6ab66d445dc7251257f8d391
{ "arc:challenge": 62.8, "hellaswag": 83.2, "hendrycksTest": 59.4, "truthfulqa:mc": 53.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
64.6
Open-Orca/LlongOrca-7B-16k
main
1370c7c595e6c8394e6332bc535ae25e21def85b
{ "arc:challenge": 57.5, "hellaswag": 79.4, "hendrycksTest": 49.4, "truthfulqa:mc": 49.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59
grantprice/Cerebras-GPT-590M-finetuned-DND
main
a0a2fbe342cdc86433913ba5f96978e4703ff672
{ "arc:challenge": 24.7, "hellaswag": 27.8, "hendrycksTest": 23.1, "truthfulqa:mc": 48.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
31
lgaalves/tinyllama-1.1b-chat-v0.3_platypus
main
0bb6ebe1d41d394bae0ed9107ec8d776d9d76a68
{ "arc:challenge": 30.3, "hellaswag": 55.1, "hendrycksTest": 26.1, "truthfulqa:mc": 39.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
37.7
lgaalves/gpt2_guanaco-dolly-platypus
main
6bf0a8146cf255c829ec2ad83926c8b80945b431
{ "arc:challenge": 23.5, "hellaswag": 31, "hendrycksTest": 26.4, "truthfulqa:mc": 40 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
30.2
lgaalves/mistral-7b_open_platypus
main
b9a60b9ad0fe06bd314ffe99d543f1df6ecd10da
{ "arc:challenge": 55.8, "hellaswag": 82.1, "hendrycksTest": 59.8, "truthfulqa:mc": 48.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
61.6
lgaalves/gpt2_camel_physics-platypus
main
66165ff32ed8de6c39f3524a810f5e97ba6d3347
{ "arc:challenge": 23, "hellaswag": 31.3, "hendrycksTest": 26.9, "truthfulqa:mc": 39.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
30.2
lgaalves/gpt2-xl_lima
main
f7db5b1db521abd7578b95138e737637e0037ca5
{ "arc:challenge": 31.1, "hellaswag": 51.3, "hendrycksTest": 25.4, "truthfulqa:mc": 38.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
36.6
lgaalves/llama-2-7b-hf_open-platypus
main
c7e776f3f3afc0fa22cb7aff0d00522e571e9b29
{ "arc:challenge": 51.5, "hellaswag": 78.6, "hendrycksTest": 43.6, "truthfulqa:mc": 43.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
54.4
NobodyExistsOnTheInternet/PuffedLIMA13bQLORA
main
7da6d235d625e16c850ccd0b947dee40071b1f89
{ "arc:challenge": 59.9, "hellaswag": 84.4, "hendrycksTest": 53.7, "truthfulqa:mc": 39.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.5
NobodyExistsOnTheInternet/PuffedConvo13bLoraE4
main
40e4fce0c25bd23f6011b424748ee2b5374b98d5
{ "arc:challenge": 59.6, "hellaswag": 84.4, "hendrycksTest": 53.7, "truthfulqa:mc": 39.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.4
AlpinDale/pygmalion-instruct
main
1665b271316dfee05b2a8daf8b9d6c22ed0aef60
{ "arc:challenge": 52.6, "hellaswag": 77.7, "hendrycksTest": 35.9, "truthfulqa:mc": 42.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
52.1
Doctor-Shotgun/mythospice-70b
main
b00992c26604c9cd496bc41472a05e4c01cd2008
{ "arc:challenge": 69.3, "hellaswag": 87.5, "hendrycksTest": 70.1, "truthfulqa:mc": 56.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
70.9
Doctor-Shotgun/mythospice-limarp-70b
main
ff29fed2a33fc050fd20d0e25b5b23c4a101b074
{ "arc:challenge": 69.2, "hellaswag": 87.5, "hendrycksTest": 70.1, "truthfulqa:mc": 55.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
70.7
voidful/changpt-bart
main
e3d26f736b8b47d5275421be6133b81bef84db7d
{ "arc:challenge": 29.4, "hellaswag": 26.3, "hendrycksTest": 23.1, "truthfulqa:mc": 47.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
31.7
The-Face-Of-Goonery/Huginn-22b-Prototype
main
29222b05794abb862ad0aaaf3020696c9f599810
{ "arc:challenge": 57.7, "hellaswag": 80.7, "hendrycksTest": 49.8, "truthfulqa:mc": 52.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
60.1
The-Face-Of-Goonery/Huginn-v3-13b
main
6c2faf828c5380d28c51fcb4d3d0f1a420fb9a9a
{ "arc:challenge": 60.7, "hellaswag": 82.3, "hendrycksTest": 52.3, "truthfulqa:mc": 50.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
61.5
The-Face-Of-Goonery/Huginn-13b-v4.5
main
f3be56d8bf71a8d3905974b1e5fcba7336b02159
{ "arc:challenge": 60.7, "hellaswag": 82.3, "hendrycksTest": 52.3, "truthfulqa:mc": 50.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
61.5
The-Face-Of-Goonery/Huginn-13b-v1.2
main
cb3562e7aae05a95fe61610b7b8f4957d3529ce7
{ "arc:challenge": 60.9, "hellaswag": 83.6, "hendrycksTest": 55.3, "truthfulqa:mc": 52 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
63
The-Face-Of-Goonery/huginnv1.2
main
aed4ddc951c657993939fa5b87a4088550569a3b
{ "arc:challenge": 62.4, "hellaswag": 84.3, "hendrycksTest": 57, "truthfulqa:mc": 47.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
62.9
simsim314/WizardLM-70B-V1.0-HF
main
97112db6d0fae8354c13437a5e7dc99fb37b8c2e
{ "arc:challenge": 65.4, "hellaswag": 84.4, "hendrycksTest": 64, "truthfulqa:mc": 54.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
67.2
HyperbeeAI/Tulpar-7b-v0
main
d7c2bc52a3ae13571357f51273ae948caf84400e
{ "arc:challenge": 56.3, "hellaswag": 79, "hendrycksTest": 52.6, "truthfulqa:mc": 51.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.9
illuin/test-custom-llama
main
d985610bef080473e40f01c53266083c5f0c3169
{ "arc:challenge": 52.3, "hellaswag": 77.5, "hendrycksTest": 36.6, "truthfulqa:mc": 33.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
50
luffycodes/mcq-vicuna-13b-v1.5
main
f769a92cfeffe8ee07beee8814ce7eca7cd62805
{ "arc:challenge": 56.2, "hellaswag": 81.1, "hendrycksTest": 53.4, "truthfulqa:mc": 44.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
58.7
luffycodes/llama-shishya-7b-ep3-v2
main
679c6cb9e869df686b1ae415ed440e6cfc05f80b
{ "arc:challenge": 47.4, "hellaswag": 75.9, "hendrycksTest": 43.8, "truthfulqa:mc": 30.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
49.3
luffycodes/nash-vicuna-13b-v1dot5-ep2-w-rag-w-simple
main
848ef91ab46a72260542283918a971347c6bfa93
{ "arc:challenge": 59.1, "hellaswag": 80.6, "hendrycksTest": 56.1, "truthfulqa:mc": 51.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
61.8
luffycodes/llama-shishya-7b-ep3-v1
main
8dc109f45ef36cc7bbd0f5d83fb65ac8e768d1bd
{ "arc:challenge": 48, "hellaswag": 76.6, "hendrycksTest": 46.1, "truthfulqa:mc": 30.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
50.4
luffycodes/vicuna-shishya-7b-ep3-v1
main
082cf758aa3f6d8f956056003b5b3b6cde447d88
{ "arc:challenge": 45.9, "hellaswag": 76.4, "hendrycksTest": 50, "truthfulqa:mc": 40.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
53.2
RoversX/llama-2-7b-hf-small-shards-Samantha-V1-SFT
main
c39cee3821269e7fdffa690c2d0836c74dfebd25
{ "arc:challenge": 53.2, "hellaswag": 77.7, "hendrycksTest": 43.5, "truthfulqa:mc": 45.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
54.9
circulus/Llama-2-13b-orca-v1
main
e77ec90f432bdffa210a0e4310d117e5d1c662df
{ "arc:challenge": 62.2, "hellaswag": 82.3, "hendrycksTest": 57.7, "truthfulqa:mc": 49.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
63
mosaicml/mpt-30b-chat
main
b7957743f18845ff8695f7919420adb769ec225e
{ "arc:challenge": 58.4, "hellaswag": 82.4, "hendrycksTest": 51, "truthfulqa:mc": 52 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
61
mosaicml/mpt-30b
main
0261af71d7177453889f868d26607dec8d5aaa2e
{ "arc:challenge": 55.9, "hellaswag": 82.4, "hendrycksTest": 47.9, "truthfulqa:mc": 38.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
56.2
mosaicml/mpt-7b-8k-chat
main
ef97b878a279cd1765fbed7b8321fb3cff1aa5b5
{ "arc:challenge": 48, "hellaswag": 77.6, "hendrycksTest": 41.9, "truthfulqa:mc": 43.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
52.8
mosaicml/mpt-7b-chat
main
64e5c9c9fb53a8e89690c2dee75a5add37f7113e
{ "arc:challenge": 46.5, "hellaswag": 75.5, "hendrycksTest": 37.6, "truthfulqa:mc": 40.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
50