model
stringlengths
4
89
revision
stringclasses
1 value
model_sha
stringlengths
0
40
results
dict
commit
stringlengths
40
40
date
timestamp[ns]
score
float64
21.8
83
augtoma/qCammel-70
main
cf1e917e42fd1e56ee1edef7ee1a98cbe705c18c
{ "arc:challenge": 68.3, "hellaswag": 87.9, "hendrycksTest": 70.2, "truthfulqa:mc": 57.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
71
augtoma/qCammel-70v1
main
cf1e917e42fd1e56ee1edef7ee1a98cbe705c18c
{ "arc:challenge": 68.3, "hellaswag": 87.9, "hendrycksTest": 70.2, "truthfulqa:mc": 57.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
71
augtoma/qCammel70
main
cf1e917e42fd1e56ee1edef7ee1a98cbe705c18c
{ "arc:challenge": 68.3, "hellaswag": 87.9, "hendrycksTest": 70.2, "truthfulqa:mc": 57.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
71
augtoma/qCammel-13
main
af473e64f6a4fa02a7e24ee7679eea9505eb179d
{ "arc:challenge": 60.8, "hellaswag": 83.7, "hendrycksTest": 56.7, "truthfulqa:mc": 47.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.2
augtoma/qCammel-70x
main
cf1e917e42fd1e56ee1edef7ee1a98cbe705c18c
{ "arc:challenge": 68.3, "hellaswag": 87.9, "hendrycksTest": 70.2, "truthfulqa:mc": 57.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
71
alibidaran/medical_transcription_generator
main
f622239151c89c2db0f1cef495d1b42afd16ce64
{ "arc:challenge": 22.8, "hellaswag": 30.6, "hendrycksTest": 23.8, "truthfulqa:mc": 46.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
30.9
wannaphong/openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard
main
c1068f859a225e50d9d9ec74c572bfaf38573051
{ "arc:challenge": 51.3, "hellaswag": 77.5, "hendrycksTest": 33.2, "truthfulqa:mc": 43.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
51.3
liuxiang886/llama2-70B-qlora-gpt4
main
08115ee077953e9c01c6a40f5086def3ecf9f5f0
{ "arc:challenge": 70.3, "hellaswag": 86.4, "hendrycksTest": 69.3, "truthfulqa:mc": 54 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
70
bhenrym14/airoboros-33b-gpt4-1.4.1-PI-8192-fp16
main
1dd7804dbbb547c1be852652ce74568ba41d4e73
{ "arc:challenge": 32, "hellaswag": 53.9, "hendrycksTest": 31.4, "truthfulqa:mc": 38.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
39
bhenrym14/airophin-v2-13b-PI-8k-fp16
main
26b7edfd282af223d86d5e539451357bb114247b
{ "arc:challenge": 60.6, "hellaswag": 83, "hendrycksTest": 56.7, "truthfulqa:mc": 40.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.1
bhenrym14/airophin-13b-pntk-16k-fp16
main
6b5418b69e8270df659eacb192f469e7c3af70b3
{ "arc:challenge": 61.4, "hellaswag": 82.8, "hendrycksTest": 55.4, "truthfulqa:mc": 43.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.7
bhenrym14/airoboros-33b-gpt4-1.4.1-lxctx-PI-16384-fp16
main
468225a547a8cb0a62758d813cf9606b58506ab4
{ "arc:challenge": 25.3, "hellaswag": 26.7, "hendrycksTest": 23.4, "truthfulqa:mc": 49.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
31.2
keyfan/vicuna-chinese-replication-v1.1
main
259ab0967975012a546f2362d6cd03ab10768157
{ "arc:challenge": 42.8, "hellaswag": 71.5, "hendrycksTest": 47.5, "truthfulqa:mc": 47.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
52.2
uberkie/metharme-1.3b-finetuned
main
7335669475711806eb04f8850e4eef91a9d2677d
{ "arc:challenge": 20.6, "hellaswag": 28, "hendrycksTest": 25.3, "truthfulqa:mc": 44.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
29.7
NoIdeaLand/test-2048-1500ck
main
dae2a6d32b71fb5f88856a324e594f4f5be2f283
{ "arc:challenge": 36.7, "hellaswag": 62.6, "hendrycksTest": 25.7, "truthfulqa:mc": 41 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
41.5
Sao10K/Stheno-1.1-L2-13B
main
0f45a9f834dd216ce25ffa606b3b1ef2c99e7acd
{ "arc:challenge": 60.8, "hellaswag": 83.6, "hendrycksTest": 56.4, "truthfulqa:mc": 50.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.8
Sao10K/Stheno-1.3-L2-13B
main
45ba2f603769aa6b97639962f522b8d7398c2393
{ "arc:challenge": 56.8, "hellaswag": 81.7, "hendrycksTest": 52.8, "truthfulqa:mc": 50.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.4
Sao10K/Stheno-Mix-L2-20B
main
6f9dcdaae6ef9071effe63d2107abe8b9712345b
{ "arc:challenge": 57.8, "hellaswag": 79.6, "hendrycksTest": 52.5, "truthfulqa:mc": 51.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.4
Sao10K/Stheno-Inverted-1.2-L2-13B
main
8d2e9087093eef1c9173e167beb40b9d034a4655
{ "arc:challenge": 59.4, "hellaswag": 83, "hendrycksTest": 55.8, "truthfulqa:mc": 51.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.4
Sao10K/Medusa-1.1-L2-7B
main
df23c3d22bc546dbce0267415e94bdb482446c06
{ "arc:challenge": 56.5, "hellaswag": 78.6, "hendrycksTest": 51.6, "truthfulqa:mc": 47.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
58.6
Sao10K/Euryale-L2-70B
main
6589310a57ce5d9d6877f353f3d00cda8fa9101c
{ "arc:challenge": 68.9, "hellaswag": 87.1, "hendrycksTest": 68.8, "truthfulqa:mc": 54.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
69.8
Sao10K/Mythical-Destroyer-L2-13B
main
7c87376b201b1c30c4e12c0b7bc2f28f017ce7bc
{ "arc:challenge": 58.7, "hellaswag": 82, "hendrycksTest": 57.7, "truthfulqa:mc": 56.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63.7
Sao10K/Stheno-Inverted-L2-13B
main
efaf592c95ae8e769e0d56d36ba4ed23e3bf4059
{ "arc:challenge": 59.3, "hellaswag": 82.9, "hendrycksTest": 56.5, "truthfulqa:mc": 52 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.7
Sao10K/Stheno-1.2-L2-13B
main
e76f35fe771ef142d6629092bd4a93301fd6cd4a
{ "arc:challenge": 60.8, "hellaswag": 83.7, "hendrycksTest": 56.3, "truthfulqa:mc": 50.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.8
Sao10K/Medusa-13b
main
be755c9eef8233ca59e0178db75de878f5859222
{ "arc:challenge": 58.2, "hellaswag": 81.3, "hendrycksTest": 57.4, "truthfulqa:mc": 51.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62
Gryphe/MythoBoros-13b
main
67695d15e6610bc8055fbcde82f298e48ad2d374
{ "arc:challenge": 58.2, "hellaswag": 81.7, "hendrycksTest": 50.1, "truthfulqa:mc": 48.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.7
Gryphe/MythoLogic-L2-13b
main
665948fc79acc2bcce3e9e7d2b0689ca43ae62d4
{ "arc:challenge": 61, "hellaswag": 83.9, "hendrycksTest": 55.7, "truthfulqa:mc": 48.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.3
Gryphe/MythoLogic-13b
main
d89d925ad1eeaee465c4de3e5c74240a5a40b585
{ "arc:challenge": 58.4, "hellaswag": 81.6, "hendrycksTest": 49.4, "truthfulqa:mc": 49.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.7
Gryphe/MythoMax-L2-13b
main
faa4ef8c87dbb00d447904ceb048d49b6a463d07
{ "arc:challenge": 60.9, "hellaswag": 83.6, "hendrycksTest": 55.3, "truthfulqa:mc": 52 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63
Gryphe/MythoMix-L2-13b
main
eca790fb9394c9c61be27ef709080b3b92783a45
{ "arc:challenge": 61.1, "hellaswag": 83.9, "hendrycksTest": 55.4, "truthfulqa:mc": 52.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63.1
beaugogh/Llama2-7b-openorca-mc-v2
main
1e74a9cca843cdeb8591d4e4f4320dc1870adf1b
{ "arc:challenge": 55.5, "hellaswag": 81.3, "hendrycksTest": 48.3, "truthfulqa:mc": 51.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.2
beaugogh/pythia-1.4b-deduped-sharegpt
main
03dfdc25c111a6a4a16d3da12190697611936426
{ "arc:challenge": 34.3, "hellaswag": 54.5, "hendrycksTest": 24, "truthfulqa:mc": 41.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
38.6
beaugogh/Llama2-13b-sharegpt4
main
294c40349bf0c5377f71d92e7539bf5de3176a74
{ "arc:challenge": 61.8, "hellaswag": 84.5, "hendrycksTest": 55.2, "truthfulqa:mc": 45.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.8
beaugogh/Llama2-7b-openorca-mc-v1
main
2c4096fa2129665fb127f1c2a1302f30565a5265
{ "arc:challenge": 55.6, "hellaswag": 80.2, "hendrycksTest": 48.4, "truthfulqa:mc": 51.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59
beaugogh/Llama2-7b-sharegpt4
main
922d1d963ad1b042c30b774a818d9f6180c28075
{ "arc:challenge": 55.9, "hellaswag": 80.8, "hendrycksTest": 47.2, "truthfulqa:mc": 45.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
57.4
klosax/open_llama_13b_600bt_preview
main
3465eaca4d293ccc6ce66888e6c8bd9032ae7071
{ "arc:challenge": 44.3, "hellaswag": 72.4, "hendrycksTest": 31.5, "truthfulqa:mc": 34.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
45.7
klosax/openllama-3b-350bt
main
4ed354d8f9537fe0a7400772eece1a93f2bd1366
{ "arc:challenge": 36.5, "hellaswag": 60.9, "hendrycksTest": 26.8, "truthfulqa:mc": 35 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
39.8
klosax/pythia-160m-deduped-step92k-193bt
main
9eac24dad1bd7194e38ce8083a0197cee456456c
{ "arc:challenge": 24.2, "hellaswag": 32.3, "hendrycksTest": 24.5, "truthfulqa:mc": 43.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
31.1
klosax/open_llama_3b_350bt_preview
main
4ed354d8f9537fe0a7400772eece1a93f2bd1366
{ "arc:challenge": 36.5, "hellaswag": 60.9, "hendrycksTest": 26.8, "truthfulqa:mc": 35 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
39.8
klosax/open_llama_7b_400bt_preview
main
4cd3a97dcc9c25b44b552ab53f0ec01ec36acc8d
{ "arc:challenge": 39.5, "hellaswag": 65.9, "hendrycksTest": 27.6, "truthfulqa:mc": 36 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
42.2
AIDC-ai-business/Marcoroni-70B
main
bf4adbef7d1817e5458d0171fa8f71a330e2711c
{ "arc:challenge": 73, "hellaswag": 87.5, "hendrycksTest": 70.8, "truthfulqa:mc": 64.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
74
AIDC-ai-business/Marcoroni-13B
main
9ae7c49f59e7d493bda183aeb2dde0ce9f6d4705
{ "arc:challenge": 63.3, "hellaswag": 83, "hendrycksTest": 58.8, "truthfulqa:mc": 55.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
65.2
AIDC-ai-business/Marcoroni-7B
main
8f485ca1ad32d324cc87106af40de225135617a2
{ "arc:challenge": 58.1, "hellaswag": 80.1, "hendrycksTest": 51.4, "truthfulqa:mc": 50.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.1
migtissera/Synthia-7B
main
4f9e95665d95b4c692910190ff77257216e476f1
{ "arc:challenge": 56.1, "hellaswag": 78.6, "hendrycksTest": 50.3, "truthfulqa:mc": 45 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
57.5
migtissera/Synthia-70B-v1.1
main
05a13f6adfe95a713dff04dc2eaa214c77c2512a
{ "arc:challenge": 70.1, "hellaswag": 87.1, "hendrycksTest": 70.3, "truthfulqa:mc": 57.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
71.3
migtissera/Synthia-13B
main
fbb23bc41438b016f1df1e9180c6c350a03557ea
{ "arc:challenge": 60, "hellaswag": 81.9, "hendrycksTest": 56.1, "truthfulqa:mc": 47.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.4
migtissera/Synthia-70B
main
d63dfdd0baed756981f5f78f7419fd822c572362
{ "arc:challenge": 69.5, "hellaswag": 87.1, "hendrycksTest": 68.9, "truthfulqa:mc": 59.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
71.3
migtissera/Synthia-34B-v1.2
main
42c2e521c1de5f83f2d3f537ceac71ede63e988d
{ "arc:challenge": 54.9, "hellaswag": 74.3, "hendrycksTest": 53.2, "truthfulqa:mc": 44.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
56.8
migtissera/Synthia-70B-v1.2b
main
7b687d6e4101b8bb8cc4062f8a318d639098a55d
{ "arc:challenge": 68.8, "hellaswag": 87.6, "hendrycksTest": 68.8, "truthfulqa:mc": 57.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
70.7
migtissera/Synthia-70B-v1.2
main
9b92ee1093b125035ba1649dca6f4ceb9d86a656
{ "arc:challenge": 70.5, "hellaswag": 87, "hendrycksTest": 70.1, "truthfulqa:mc": 58.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
71.6
llama-anon/instruct-13b
main
142e198df473fd0cd4370b0d50be5f57e1da399b
{ "arc:challenge": 56.1, "hellaswag": 80.3, "hendrycksTest": 47.9, "truthfulqa:mc": 37 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
55.3
Undi95/UndiMix-v1-13b
main
fd311f52648825d6988d2f945918468ceb32289f
{ "arc:challenge": 59.5, "hellaswag": 82.5, "hendrycksTest": 55.8, "truthfulqa:mc": 49.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.9
Undi95/Unholy-v1-12L-13B
main
ee25c078f08b0812d82597afa3f5e877c19a5c83
{ "arc:challenge": 63.6, "hellaswag": 83.7, "hendrycksTest": 58.1, "truthfulqa:mc": 51.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
64.1
Undi95/CreativityEngine
main
7870cc50b82b5cbebfa9935b6d73a9d20170299a
{ "arc:challenge": 59.3, "hellaswag": 82.4, "hendrycksTest": 53.6, "truthfulqa:mc": 52.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62
Undi95/OpenRP-13B
main
d11815287c51ef51485fb003f8f72773cf6f19a4
{ "arc:challenge": 62.1, "hellaswag": 82.6, "hendrycksTest": 57.5, "truthfulqa:mc": 48.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.6
Undi95/ReMM-v2-L2-13B
main
bc42c77f88482c37c72c85c66135e99972bbca1b
{ "arc:challenge": 61.9, "hellaswag": 84, "hendrycksTest": 56.1, "truthfulqa:mc": 50.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63.2
Undi95/ReMM-L2-13B
main
c4710577003a23ca8e9040d16dfb8f3e9bc5d636
{ "arc:challenge": 59.7, "hellaswag": 83.1, "hendrycksTest": 54.1, "truthfulqa:mc": 49.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.7
Undi95/MLewd-L2-13B
main
feb1fa71e0b24261d3ca428b4aed881dd31f166e
{ "arc:challenge": 58.3, "hellaswag": 82.3, "hendrycksTest": 54.7, "truthfulqa:mc": 48.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61
Undi95/CodeEngine
main
f57879831c39f2dcb656cb2c9e9ce5878e92bb44
{ "arc:challenge": 58.4, "hellaswag": 82.3, "hendrycksTest": 54.2, "truthfulqa:mc": 45.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60
Undi95/Nous-Hermes-13B-Code
main
5a45cb2a6442581ce32cc19c561c49cec1db4ebb
{ "arc:challenge": 61.2, "hellaswag": 83.2, "hendrycksTest": 55.1, "truthfulqa:mc": 50.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.5
Undi95/ReMM-SLERP-L2-13B
main
27baccf242bc1dc34fc39661a40bbf867cbea8b5
{ "arc:challenge": 60.9, "hellaswag": 83.6, "hendrycksTest": 55.3, "truthfulqa:mc": 52 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63
Undi95/LewdEngine
main
6e918ff9f563552af4ad66f4308f6d040e24af4b
{ "arc:challenge": 60.5, "hellaswag": 83.1, "hendrycksTest": 54.8, "truthfulqa:mc": 43.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.5
Undi95/MLewdBoros-L2-13B
main
a3033ac5825662f1c66418d7543648dc76980185
{ "arc:challenge": 62.5, "hellaswag": 83.9, "hendrycksTest": 56.6, "truthfulqa:mc": 48.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.8
Undi95/ReMM-v2.1-L2-13B
main
e6b5ac97f74355cb281a621261debe5720fb4da2
{ "arc:challenge": 61.4, "hellaswag": 83.9, "hendrycksTest": 56, "truthfulqa:mc": 50.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.9
Undi95/MLewd-L2-Chat-13B
main
6c66622a99c1bc73498aa6a15a59da825d875310
{ "arc:challenge": 62, "hellaswag": 84.2, "hendrycksTest": 58.8, "truthfulqa:mc": 52.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
64.4
Undi95/ReMM-L2-13B-PIPPA
main
79e711178c6881496ae1f5635b08bc193f370709
{ "arc:challenge": 59.7, "hellaswag": 83.1, "hendrycksTest": 54.1, "truthfulqa:mc": 49.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.7
Undi95/UndiMix-v4-13B
main
6dd97c74cfe1d22432d5c993814e230f333ba401
{ "arc:challenge": 61.9, "hellaswag": 83.9, "hendrycksTest": 56.9, "truthfulqa:mc": 49 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.9
notstoic/PygmalionCoT-7b
main
c03ac527360663d17bb142405251028eec843ed9
{ "arc:challenge": 51.5, "hellaswag": 76.9, "hendrycksTest": 33.3, "truthfulqa:mc": 48.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
52.4
elliotthwang/Elliott-Chinese-LLaMa-GPTQ-V1.0
main
01305dc473ba231519fe71e7f4b2d1e3f6aa9bc8
{ "arc:challenge": 50.7, "hellaswag": 75.4, "hendrycksTest": 49.3, "truthfulqa:mc": 44.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
55
elliotthwang/Elliott-Chinese-LLaMa-GPTQ
main
bbbca62bb340b4ae0a19ba93dae38fc9f9787c16
{ "arc:challenge": 51, "hellaswag": 75.2, "hendrycksTest": 49.6, "truthfulqa:mc": 45.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
55.2
elliotthwang/Elliott-Chinese-LLaMa-GPTQ-V2.0
main
ebffe57ba6cc70b60ff5295889abc62d91eeb4dd
{ "arc:challenge": 50.8, "hellaswag": 75.4, "hendrycksTest": 49.4, "truthfulqa:mc": 44.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
55.1
MBZUAI/LaMini-GPT-774M
main
4f3bd4b37d249e6aa335be677afd39f417e05b5d
{ "arc:challenge": 27.6, "hellaswag": 43.8, "hendrycksTest": 26.3, "truthfulqa:mc": 40.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
34.5
MBZUAI/lamini-cerebras-1.3b
main
502e70081df53edc8a9156acf5a26a11a9dad8fb
{ "arc:challenge": 26.9, "hellaswag": 38, "hendrycksTest": 28.4, "truthfulqa:mc": 36.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.4
MBZUAI/lamini-neo-1.3b
main
a5c7ecc4d908e7a9469d080308af64ae775c733d
{ "arc:challenge": 32.8, "hellaswag": 49.1, "hendrycksTest": 28.8, "truthfulqa:mc": 41.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
38
MBZUAI/LaMini-GPT-1.5B
main
88ca6f5abe2335bac317e82684e574afdd6046b5
{ "arc:challenge": 31.4, "hellaswag": 48.4, "hendrycksTest": 29.9, "truthfulqa:mc": 42.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
38
MBZUAI/lamini-cerebras-111m
main
e8e347b02f9305e4bc144eb9be2821c518d43183
{ "arc:challenge": 22.1, "hellaswag": 27.1, "hendrycksTest": 25.5, "truthfulqa:mc": 43.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
29.6
MBZUAI/lamini-cerebras-256m
main
72df0b6d62d64002575687ea2edbb0df05712678
{ "arc:challenge": 21.8, "hellaswag": 28.7, "hendrycksTest": 26.7, "truthfulqa:mc": 41.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
29.8
MBZUAI/lamini-neo-125m
main
f01e73ba67da96f6645be3067158cc493b0cbbcb
{ "arc:challenge": 24.6, "hellaswag": 30.2, "hendrycksTest": 26.7, "truthfulqa:mc": 42.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
31.1
rombodawg/LosslessMegaCoder-llama2-7b-mini
main
186b105d61054611d0b921a55c220d41c6aefe43
{ "arc:challenge": 53.5, "hellaswag": 77.4, "hendrycksTest": 49.7, "truthfulqa:mc": 45.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
56.6
rombodawg/LosslessMegaCoder-llama2-13b-mini
main
1f5609ffd40bc3af2dcbc5c88e9312d47a73c4b4
{ "arc:challenge": 60.6, "hellaswag": 81.3, "hendrycksTest": 57.9, "truthfulqa:mc": 48.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.2
Locutusque/gpt2-large-conversational
main
6674ad1ed9f518054561b866172eb88b7a769413
{ "arc:challenge": 27, "hellaswag": 45, "hendrycksTest": 26.3, "truthfulqa:mc": 39.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
34.5
Kunhao/pile-7b
main
aa1c2fff615235b007e15ce191b35816959ace99
{ "arc:challenge": 26.8, "hellaswag": 38.8, "hendrycksTest": 26.5, "truthfulqa:mc": 42.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
33.6
georgesung/llama2_7b_chat_uncensored
main
e9a972b12c6b59bfbcf30fe3779c2c933ce755bd
{ "arc:challenge": 53.6, "hellaswag": 78.7, "hendrycksTest": 44.5, "truthfulqa:mc": 41.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
54.5
digitous/Javelin-GPTJ
main
bee7068ab002784420a1a30170db3906185359f2
{ "arc:challenge": 42.7, "hellaswag": 70.5, "hendrycksTest": 26.2, "truthfulqa:mc": 36.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
43.9
digitous/13B-Chimera
main
85cfe8e6db2bee804873cfdb48955696cc5b0689
{ "arc:challenge": 57.6, "hellaswag": 81.5, "hendrycksTest": 49.9, "truthfulqa:mc": 52.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.4
digitous/Janin-R
main
f6963f77098d8421ff4a1cf4d36f1e94c6c8f44b
{ "arc:challenge": 40.4, "hellaswag": 67.4, "hendrycksTest": 31.2, "truthfulqa:mc": 34.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
43.4
digitous/Janin-GPTJ
main
a6773861798f2abea3849514aa6f60961518af9c
{ "arc:challenge": 40.9, "hellaswag": 67.3, "hendrycksTest": 27.4, "truthfulqa:mc": 36.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
43
digitous/Alpacino30b
main
300bc5f3dc129a3d17adf059394e381eff7fbd55
{ "arc:challenge": 62.7, "hellaswag": 85, "hendrycksTest": 58.5, "truthfulqa:mc": 44.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.6
digitous/13B-HyperMantis
main
aa828ef92c363a5577ffd7d29e678277b9d2eb3c
{ "arc:challenge": 58.5, "hellaswag": 82.2, "hendrycksTest": 50.6, "truthfulqa:mc": 47.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.7
digitous/Alpacino13b
main
7092a5c8dec649694dd66ff8cfe5452ce52e6a40
{ "arc:challenge": 58.5, "hellaswag": 81.3, "hendrycksTest": 47.9, "truthfulqa:mc": 41.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
57.4
digitous/GPT-R
main
92b955a3ff74aa577fa0d8517dfc314847ef60af
{ "arc:challenge": 41.2, "hellaswag": 66.9, "hendrycksTest": 36.5, "truthfulqa:mc": 34.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
44.7
digitous/Javalion-R
main
b881231ab6ea85da2a9a139f282df85d1d18b002
{ "arc:challenge": 41.7, "hellaswag": 68, "hendrycksTest": 30.8, "truthfulqa:mc": 34.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
43.7
digitous/Skegma-GPTJ
main
4dff006b2ea7e8d9b067dfe8af8ca1a16bc44dce
{ "arc:challenge": 43.8, "hellaswag": 69.2, "hendrycksTest": 25.4, "truthfulqa:mc": 34.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
43.3
digitous/Javalion-GPTJ
main
3ce176bc0f91cae416c78e99f964f54b12472de0
{ "arc:challenge": 41.9, "hellaswag": 68.7, "hendrycksTest": 26.8, "truthfulqa:mc": 35.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
43.2
digitous/Adventien-GPTJ
main
4fbfe9eae03a1d6ecf60fda8cf39c4123f0438bd
{ "arc:challenge": 42.5, "hellaswag": 69.2, "hendrycksTest": 25.4, "truthfulqa:mc": 36.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
43.5
digitous/Javelin-R
main
4c4a5caf5d9049a47f5565b72e5a53dede08ac8b
{ "arc:challenge": 41.6, "hellaswag": 69, "hendrycksTest": 30.7, "truthfulqa:mc": 34.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
43.9
OpenAssistant/galactica-6.7b-finetuned
main
d86db70e16111175ff7900f71d40806ccf4b8491
{ "arc:challenge": 41.6, "hellaswag": 51, "hendrycksTest": 38, "truthfulqa:mc": 41.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
43
OpenAssistant/llama2-13b-orca-8k-3319
main
160f58ec85ef25ad935eb583f14c7e8c7f7e7839
{ "arc:challenge": 60.8, "hellaswag": 81.9, "hendrycksTest": 57.1, "truthfulqa:mc": 42.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.6
OpenAssistant/codellama-13b-oasst-sft-v10
main
612dab2a8b2d77edb4fd36cfc28b3ffbbb20ffc1
{ "arc:challenge": 43.9, "hellaswag": 62.8, "hendrycksTest": 37.2, "truthfulqa:mc": 46.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
47.5
OpenAssistant/oasst-sft-1-pythia-12b
main
293df535fe7711a5726987fc2f17dfc87de452a1
{ "arc:challenge": 46.4, "hellaswag": 70, "hendrycksTest": 26.2, "truthfulqa:mc": 39.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
45.4