model
stringlengths
4
89
revision
stringclasses
1 value
model_sha
stringlengths
0
40
results
dict
commit
stringlengths
40
40
date
unknown
score
float64
21.8
83
OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5
main
626b8c140cfdedb119dfb78c626cd772283dee33
{ "arc:challenge": 45.7, "hellaswag": 68.6, "hendrycksTest": 26.8, "truthfulqa:mc": 37.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
44.7
OpenAssistant/pythia-12b-sft-v8-7k-steps
main
275c9b71bfab4e271d1ed85515c61e317b6ef65e
{ "arc:challenge": 44, "hellaswag": 70.3, "hendrycksTest": 26.6, "truthfulqa:mc": 36.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
44.4
OpenAssistant/llama2-70b-oasst-sft-v10
main
e68a8a2888097def3c7f4fe5d443866a18d05c6c
{ "arc:challenge": 67.1, "hellaswag": 86.4, "hendrycksTest": 67.7, "truthfulqa:mc": 56.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
69.4
OpenAssistant/llama2-13b-orca-v2-8k-3166
main
386700af58cc125fc843a0fe031ae969b267dbba
{ "arc:challenge": 56.9, "hellaswag": 80.2, "hendrycksTest": 55.5, "truthfulqa:mc": 46.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.8
OpenAssistant/pythia-12b-pre-v8-12.5k-steps
main
37ca702e957a4b740689d67c58c284224e2fbae2
{ "arc:challenge": 41.5, "hellaswag": 68.8, "hendrycksTest": 26.6, "truthfulqa:mc": 36.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
43.4
OpenAssistant/pythia-12b-sft-v8-2.5k-steps
main
142e306db8e279a07c557ea5a919ab7e7a4af17c
{ "arc:challenge": 42.3, "hellaswag": 70.1, "hendrycksTest": 27.4, "truthfulqa:mc": 36.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
44.1
OpenAssistant/pythia-12b-sft-v8-rlhf-2k-steps
main
a0debfed4a020d449e3d00f4e75f2c2aefb68db3
{ "arc:challenge": 43.4, "hellaswag": 70.1, "hendrycksTest": 26.1, "truthfulqa:mc": 36.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
43.9
OpenAssistant/llama2-13b-megacode2-oasst
main
2c45ecf161da2ff2aa984900f2e4d2b7a7311ab8
{ "arc:challenge": 60.7, "hellaswag": 81.9, "hendrycksTest": 57.4, "truthfulqa:mc": 47.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
62
OpenAssistant/stablelm-7b-sft-v7-epoch-3
main
4c454bfc0e3618b3d574e28ba71369607e637e91
{ "arc:challenge": 36, "hellaswag": 55.8, "hendrycksTest": 25, "truthfulqa:mc": 37 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
38.4
Azure99/blossom-v1-3b
main
3235ee41e3793c98749b7bbd2bb80882a12ac889
{ "arc:challenge": 36.9, "hellaswag": 55.1, "hendrycksTest": 26.7, "truthfulqa:mc": 43.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
40.6
Azure99/blossom-v2-llama2-7b
main
8c71cdb481ce6bbda3b2042e5526a232ab23825c
{ "arc:challenge": 54.1, "hellaswag": 78.6, "hendrycksTest": 51.7, "truthfulqa:mc": 46.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57.8
illuin/test-custom-llama
main
d985610bef080473e40f01c53266083c5f0c3169
{ "arc:challenge": 52.3, "hellaswag": 77.5, "hendrycksTest": 36.6, "truthfulqa:mc": 33.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
50
grimpep/llama2-28B-Airo03
main
28edc75ddcb1b6e83f28d5d1076f065c05a4942a
{ "arc:challenge": 58.4, "hellaswag": 81.4, "hendrycksTest": 53.3, "truthfulqa:mc": 47.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60
grimpep/L2-MythoMax22b-instruct-Falseblock
main
2573392c8dc7a468d1a02d538e4311c4aaa4c42f
{ "arc:challenge": 60.5, "hellaswag": 82.1, "hendrycksTest": 52.9, "truthfulqa:mc": 55.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
62.8
grimpep/llama2-22b-wizard_vicuna
main
b1fe4450efe20d1330e2e4335deaa23076596070
{ "arc:challenge": 59, "hellaswag": 82, "hendrycksTest": 54.5, "truthfulqa:mc": 46.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60.6
grimpep/MythoMax-L2-33b
main
db6e214fdb9afc91e9ba234940efffc516d3c3f2
{ "arc:challenge": 57.3, "hellaswag": 79.1, "hendrycksTest": 50.8, "truthfulqa:mc": 52.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.9
grimpep/llama2-22B-GPLATTY
main
7707d4baf43a8654a77619af02bbf948e07829d6
{ "arc:challenge": 59, "hellaswag": 82, "hendrycksTest": 54.5, "truthfulqa:mc": 46.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60.6
heegyu/WizardVicuna-Uncensored-3B-0719
main
36841c80535bc3e8403e3cc084e8e65884c75076
{ "arc:challenge": 41.4, "hellaswag": 66.2, "hendrycksTest": 26.5, "truthfulqa:mc": 39.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
43.3
heegyu/WizardVicuna2-13b-hf
main
6cfd95e2dcdb6996afa9eb5c63273a1a3524c6c6
{ "arc:challenge": 55.4, "hellaswag": 79.1, "hendrycksTest": 48.5, "truthfulqa:mc": 42.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
56.4
heegyu/LIMA-13b-hf
main
98faa74a9b41cbd9033904cd58420705936849eb
{ "arc:challenge": 57.4, "hellaswag": 81.7, "hendrycksTest": 48.7, "truthfulqa:mc": 41.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57.4
heegyu/LIMA2-13b-hf
main
ed3535921eb24e0737f9a6cda70b1a3fd71532cd
{ "arc:challenge": 60.2, "hellaswag": 83.7, "hendrycksTest": 53.2, "truthfulqa:mc": 41.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.7
heegyu/WizardVicuna-open-llama-3b-v2
main
1c69905286171d7d3ef3f95f8e1bbc9150bad3cd
{ "arc:challenge": 37.7, "hellaswag": 66.6, "hendrycksTest": 27.2, "truthfulqa:mc": 36.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
42.1
heegyu/WizardVicuna-3B-0719
main
62d3d450b8ab2bd2fb9f82383b55d1ecae33a401
{ "arc:challenge": 40.7, "hellaswag": 65.4, "hendrycksTest": 25.4, "truthfulqa:mc": 40.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
43
heegyu/RedTulu-Uncensored-3B-0719
main
c92bf022cddc3f57b4552ec3391df487295a2f87
{ "arc:challenge": 40, "hellaswag": 62.5, "hendrycksTest": 30.4, "truthfulqa:mc": 37.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
42.6
heegyu/LIMA2-7b-hf
main
6a1aa59cb7624f059728840ce68b20b1070ebdcb
{ "arc:challenge": 53.2, "hellaswag": 80.6, "hendrycksTest": 43.2, "truthfulqa:mc": 44.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
55.4
CobraMamba/mamba-gpt-3b-v2
main
935f4d90bd0fc7117113d3c7b6b6af9dba93183d
{ "arc:challenge": 42.2, "hellaswag": 71.5, "hendrycksTest": 27.1, "truthfulqa:mc": 36.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
44.4
CobraMamba/mamba-gpt-3b-v3
main
d860a90ef6b30c695b985dd2ff382d4bbb80e857
{ "arc:challenge": 41.7, "hellaswag": 71.1, "hendrycksTest": 27.3, "truthfulqa:mc": 37.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
44.5
CobraMamba/mamba-gpt-3b
main
21a8212e3641dd14924d6bdead0774b64dda8ce0
{ "arc:challenge": 40.5, "hellaswag": 64.9, "hendrycksTest": 25.3, "truthfulqa:mc": 37.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
42
CobraMamba/mamba-gpt-3b-v4
main
49cdf710c1a9178ddf616da79211fdcdb2170c3f
{ "arc:challenge": 42.6, "hellaswag": 71, "hendrycksTest": 30, "truthfulqa:mc": 37.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
45.2
FreedomIntelligence/phoenix-inst-chat-7b
main
5ed4d9570e0f76e1becb05bf467a7b4ff7b66055
{ "arc:challenge": 44.7, "hellaswag": 63.2, "hendrycksTest": 39.1, "truthfulqa:mc": 47.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
48.5
Rardilit/Panther_v1
main
{ "arc:challenge": 22.7, "hellaswag": 25, "hendrycksTest": 23.1, "truthfulqa:mc": null }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
null
jxhong/CAlign-alpaca-7b
main
f5cc642a10160a014e2afeefcd57d4781994c51e
{ "arc:challenge": 50.9, "hellaswag": 74.5, "hendrycksTest": 38.6, "truthfulqa:mc": 46.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
52.7
upstage/llama-30b-instruct
main
fea4312379557e8a1e8073965f560798de369edd
{ "arc:challenge": 62.5, "hellaswag": 86.2, "hendrycksTest": 59.4, "truthfulqa:mc": 52.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
65.2
upstage/Llama-2-70b-instruct-v2
main
5f9c77b2c0397cf83d2f97740483f107c7109e8c
{ "arc:challenge": 71.1, "hellaswag": 87.9, "hendrycksTest": 70.6, "truthfulqa:mc": 62.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
73
upstage/llama-30b-instruct-2048
main
be44a37814a20e790063086703f570732597887a
{ "arc:challenge": 64.9, "hellaswag": 84.9, "hendrycksTest": 61.9, "truthfulqa:mc": 56.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
67
upstage/Llama-2-70b-instruct
main
8469429924dc2e1a9394b8095753985668a4052e
{ "arc:challenge": 70.9, "hellaswag": 87.5, "hendrycksTest": 69.8, "truthfulqa:mc": 61 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
72.3
upstage/llama-65b-instruct
main
f70a9865cb0a1ac1157ad928b3b428dd85d52946
{ "arc:challenge": 68.9, "hellaswag": 86.4, "hendrycksTest": 64.8, "truthfulqa:mc": 59.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
70
conceptofmind/Hermes-LLongMA-2-7b-8k
main
c8755804d0e7d97ec059b1fd867ae3dba742c275
{ "arc:challenge": 49.7, "hellaswag": 72.9, "hendrycksTest": 28.5, "truthfulqa:mc": 38.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
47.5
conceptofmind/Open-LLongMA-3b
main
397d45bba893f6ad2b85a11f273f34289557edae
{ "arc:challenge": 39.8, "hellaswag": 65.5, "hendrycksTest": 24.9, "truthfulqa:mc": 34.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
41.2
conceptofmind/LLongMA-2-7b-16k
main
5ffe363bb3e9ca7a24816981f399f67163f3c116
{ "arc:challenge": 52.2, "hellaswag": 76.2, "hendrycksTest": 38.5, "truthfulqa:mc": 39.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
51.5
golaxy/gogpt-7b
main
7eb70c0e330b7d3ff490047ddbb153bb96294882
{ "arc:challenge": 48.8, "hellaswag": 73.8, "hendrycksTest": 43, "truthfulqa:mc": 41 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
51.6
golaxy/gogpt2-13b-chat
main
6750491b8c720f2cc6f7ec53bbd61fb6efca6c04
{ "arc:challenge": 48.4, "hellaswag": 71.8, "hendrycksTest": 44.5, "truthfulqa:mc": 44.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
52.4
golaxy/gowizardlm
main
385f2d164e7fe780e053276d95d36240f2368c21
{ "arc:challenge": 49.7, "hellaswag": 71.9, "hendrycksTest": 43, "truthfulqa:mc": 47.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
53.1
golaxy/gogpt2-7b
main
ee60ed402dedf24b6154aef05df54512e02fc9e2
{ "arc:challenge": 46.8, "hellaswag": 71.5, "hendrycksTest": 42.8, "truthfulqa:mc": 47.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
52.2
golaxy/gogpt-560m
main
82bd8b88b95068eee614a35b790388c5d2415705
{ "arc:challenge": 26.4, "hellaswag": 31.9, "hendrycksTest": 25.3, "truthfulqa:mc": 43.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
31.7
golaxy/goims
main
9ef1045ca31f670d9cbf820af904b33a097cd787
{ "arc:challenge": 49.5, "hellaswag": 72.7, "hendrycksTest": 43.9, "truthfulqa:mc": 44.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
52.7
golaxy/gogpt2-13b
main
16d4c4214fa8d5a962b9064a8b958076b7c79a17
{ "arc:challenge": 48.4, "hellaswag": 71.8, "hendrycksTest": 44.5, "truthfulqa:mc": 44.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
52.4
sauce1337/BerrySauce-L2-13b
main
c8788874b78c84bc5593586d16fbd8ae7b5b2991
{ "arc:challenge": 62.3, "hellaswag": 83.8, "hendrycksTest": 57.1, "truthfulqa:mc": 48.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
62.9
sauce1337/AppleSauce-L2-13b
main
ba253c52eb85e24987c81e5d36b5a9a00e276ce7
{ "arc:challenge": 61, "hellaswag": 83.6, "hendrycksTest": 57.1, "truthfulqa:mc": 47.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
62.4
nomic-ai/gpt4all-j
main
73c15208cb608be2949b7c6e4ba6d88f0176c267
{ "arc:challenge": 40.5, "hellaswag": 64.6, "hendrycksTest": 26.4, "truthfulqa:mc": 43.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
43.7
doas/test2
main
f08d224deae510ebf1408ce38bc2610b1e4c77eb
{ "arc:challenge": 29.6, "hellaswag": 26.6, "hendrycksTest": 24.3, "truthfulqa:mc": 48.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
32.2
doas/test5
main
b0dae937b7137790d8946794375e1affd51c760a
{ "arc:challenge": 28.4, "hellaswag": 26.6, "hendrycksTest": 25.4, "truthfulqa:mc": 47.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
31.9
Brouz/Slerpeno
main
7ff32abd17851a769a031659e91e660f219be363
{ "arc:challenge": 61.7, "hellaswag": 84.1, "hendrycksTest": 56.8, "truthfulqa:mc": 48 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
62.6
HuggingFaceH4/starchat-alpha
main
b693a7a7d52bed1cd7cc0fe00399db838b09c74f
{ "arc:challenge": 31.6, "hellaswag": 49.4, "hendrycksTest": 30.8, "truthfulqa:mc": 43.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
38.9
HuggingFaceH4/starchat-beta
main
b1bcda690655777373f57ea6614eb095ec2c886f
{ "arc:challenge": 52.5, "hellaswag": 80.6, "hendrycksTest": 42.8, "truthfulqa:mc": 47.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
55.8
deepnight-research/zsc-text
main
9b1c704ac76968dbd61597c22610084b975ef576
{ "arc:challenge": 26.7, "hellaswag": 25.8, "hendrycksTest": 23.1, "truthfulqa:mc": 48.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
31
bongchoi/test-llama-2-7b
main
ebe2e68699cb7ab6bb22688f265c89be2ac0fa6d
{ "arc:challenge": 53.1, "hellaswag": 78.6, "hendrycksTest": 46.9, "truthfulqa:mc": 38.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
54.4
bongchoi/test-llama2-7b
main
ebe2e68699cb7ab6bb22688f265c89be2ac0fa6d
{ "arc:challenge": 53.1, "hellaswag": 78.6, "hendrycksTest": 46.9, "truthfulqa:mc": 38.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
54.4
ahxt/llama2_xs_460M_experimental
main
c8db281477559f5c969a9be794ce236f8a99e1a0
{ "arc:challenge": 24.9, "hellaswag": 38.5, "hendrycksTest": 26.2, "truthfulqa:mc": 41.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
32.8
Quake24/easyTermsSummerizer
main
8df9f96cc14be8f681c40bd1672b3f3540b70e31
{ "arc:challenge": 25.8, "hellaswag": 25.8, "hendrycksTest": 23.1, "truthfulqa:mc": 47.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
30.6
xDAN-AI/xDAN_13b_l2_lora
main
a8db938daa42016324291e38c4b45e34536ecbf4
{ "arc:challenge": 61, "hellaswag": 82.6, "hendrycksTest": 56, "truthfulqa:mc": 44.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
61.1
LinkSoul/Chinese-Llama-2-7b
main
72efd71d7f89d9c46008b7a574faf90300ed9ba8
{ "arc:challenge": 53, "hellaswag": 75.6, "hendrycksTest": 50.7, "truthfulqa:mc": 48.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57
IDEA-CCNL/Ziya-LLaMA-13B-Pretrain-v1
main
826e83e411df32f358893ab21f5eae680499ae9a
{ "arc:challenge": 28, "hellaswag": 26, "hendrycksTest": 27, "truthfulqa:mc": 48.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
32.4
cyberagent/open-calm-large
main
f9b7a3222967b15169a09bcc86b118ac68a1ad62
{ "arc:challenge": 20.7, "hellaswag": 29.6, "hendrycksTest": 25.2, "truthfulqa:mc": 46.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
30.5
edor/Platypus2-mini-7B
main
4ede4a6f8a8d6cc3bfff8b98837116c74c280f63
{ "arc:challenge": 53.3, "hellaswag": 78.8, "hendrycksTest": 45.6, "truthfulqa:mc": 42 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
54.9
edor/Stable-Platypus2-mini-7B
main
a595cdcbee7562e5ff13ff720245a8c5cf26ffdf
{ "arc:challenge": 54.9, "hellaswag": 78.9, "hendrycksTest": 51.8, "truthfulqa:mc": 51.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.2
edor/Hermes-Platypus2-mini-7B
main
2797c255626b396cc89c416110a4d785aa5cbe25
{ "arc:challenge": 53.8, "hellaswag": 79.2, "hendrycksTest": 47.1, "truthfulqa:mc": 49.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57.4
bigcode/santacoder
main
132eb6b6cedaf579c2f333f1ecd78a16d7e45978
{ "arc:challenge": 26.3, "hellaswag": 25.6, "hendrycksTest": 25.9, "truthfulqa:mc": 51.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
32.2
bigcode/starcoder
main
7c6927d25ac2ec0b9e81d98bd54926e36f5c9de1
{ "arc:challenge": 30.3, "hellaswag": 48, "hendrycksTest": 30, "truthfulqa:mc": 41.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
37.4
bigcode/starcoderplus
main
95be82087c33f14ee9941c812a154a9dd66efe72
{ "arc:challenge": 48.7, "hellaswag": 77.3, "hendrycksTest": 43.7, "truthfulqa:mc": 37.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
51.9
Aeala/Alpaca-elina-65b
main
51ce30a69b3c3363c8cfcd6395bf1df974ba2977
{ "arc:challenge": 65.3, "hellaswag": 85.7, "hendrycksTest": 63.4, "truthfulqa:mc": 47.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
65.4
Aeala/VicUnlocked-alpaca-30b
main
c63d117d1ec5794766dd6dc5e1469769df8aba1d
{ "arc:challenge": 61.9, "hellaswag": 83.8, "hendrycksTest": 57.6, "truthfulqa:mc": 51 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.6
Aeala/GPT4-x-AlpacaDente2-30b
main
9fe5a8dada738f44e7ee9293b2140ae0be021787
{ "arc:challenge": 60.6, "hellaswag": 81.8, "hendrycksTest": 56.6, "truthfulqa:mc": 48.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
61.8
Aeala/GPT4-x-Alpasta-13b
main
50af05b015446110a2dc52a1b4b341142c98e62b
{ "arc:challenge": 58.5, "hellaswag": 79.9, "hendrycksTest": 46, "truthfulqa:mc": 53.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.4
Aeala/Enterredaas-33b
main
d72dc1f05eaf1beb6373fd53fd22eb90f293a5c4
{ "arc:challenge": 60.9, "hellaswag": 84.2, "hendrycksTest": 58.3, "truthfulqa:mc": 49 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.1
jordiclive/gpt4all-alpaca-oa-codealpaca-lora-13b
main
13443d633eaa5b7e1a90ac9cdb4a4d51b1c8d0d1
{ "arc:challenge": 56.1, "hellaswag": 80.9, "hendrycksTest": 47.7, "truthfulqa:mc": 39.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
56
jordiclive/Llama-2-70b-oasst-1-200
main
153b209007e688d713cd670c9972f2827c597b45
{ "arc:challenge": 67.7, "hellaswag": 87.2, "hendrycksTest": 70, "truthfulqa:mc": 51.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
69
LMFlow/Robin-v2
main
{ "arc:challenge": 48.8, "hellaswag": 74.5, "hendrycksTest": 39.3, "truthfulqa:mc": 42.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
51.2
LMFlow/Robin-7b-v2
main
ec74e3955d91ae04e48250a658b37093e839e65c
{ "arc:challenge": 48.8, "hellaswag": 74.5, "hendrycksTest": 39.3, "truthfulqa:mc": 42.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
51.2
zarakiquemparte/zaraxls-l2-7b
main
cc1dad50689b3ebcc1c9c67f275da6b4bb63e2ce
{ "arc:challenge": 54.4, "hellaswag": 78.9, "hendrycksTest": 50.4, "truthfulqa:mc": 46.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57.6
zarakiquemparte/zararp-l2-7b
main
6032c5106970f98d59925959fbd330ae4b1d1a7e
{ "arc:challenge": 56.3, "hellaswag": 79.2, "hendrycksTest": 51.4, "truthfulqa:mc": 51.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.6
zarakiquemparte/zarablend-l2-7b
main
8b14e71ae3f52c409a25e1ac98dd05e0bb91eaff
{ "arc:challenge": 54.4, "hellaswag": 78.6, "hendrycksTest": 47.6, "truthfulqa:mc": 49.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57.5
zarakiquemparte/zarafusionix-l2-7b
main
13d0e2498a4b5f53f6dc2464f20e093b07a4bd4b
{ "arc:challenge": 55.5, "hellaswag": 79.4, "hendrycksTest": 51.2, "truthfulqa:mc": 51 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.3
zarakiquemparte/zaraxe-l2-7b
main
0875bf202aedeef7a58d7382fd6f55f5bca12968
{ "arc:challenge": 57.2, "hellaswag": 79.3, "hendrycksTest": 51, "truthfulqa:mc": 49.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.2
zarakiquemparte/zarafusionex-1.1-l2-7b
main
3268ff5291934a14f3f5e7013bbb408f33adb542
{ "arc:challenge": 56.1, "hellaswag": 79.3, "hendrycksTest": 52.1, "truthfulqa:mc": 50.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.6
zarakiquemparte/zarablendex-vq-l2-7b
main
0c47d39ac609c39b521b8ca3612f88b391ecd34e
{ "arc:challenge": 56.1, "hellaswag": 79.4, "hendrycksTest": 51.4, "truthfulqa:mc": 51.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.5
Pirr/pythia-13b-deduped-green_devil
main
7faeb395c26189eeab9bf3a98994696687ad31a3
{ "arc:challenge": 42.3, "hellaswag": 68.9, "hendrycksTest": 26, "truthfulqa:mc": 35.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
43.2
sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0
main
a893ebef4b818de1968dd9e932da2f513d16386a
{ "arc:challenge": 51.3, "hellaswag": 78.7, "hendrycksTest": 44.7, "truthfulqa:mc": 45.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
55.1
wei123602/FINETUNE3_TEST4
main
5195e87bb34317c5aaf201faa476aae78ecc9f1b
{ "arc:challenge": 55.6, "hellaswag": 81.3, "hendrycksTest": 52.1, "truthfulqa:mc": 41.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57.5
wei123602/llama2-13b-FINETUNE3_TEST2
main
9e6431061bd13852a7435f5fe7a6eb0bbd148e14
{ "arc:challenge": 54.7, "hellaswag": 81.5, "hendrycksTest": 56.8, "truthfulqa:mc": 39.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.2
wei123602/llama-13b-FINETUNE3
main
bacd035db122dafaf86bf52bb9ca8c613070cc58
{ "arc:challenge": 59.3, "hellaswag": 81.5, "hendrycksTest": 57.5, "truthfulqa:mc": 41.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60
wei123602/Llama-2-13b-FINETUNE4
main
939d06081210fa943c60210a47583f43b60901ad
{ "arc:challenge": 58.7, "hellaswag": 81.9, "hendrycksTest": 57.2, "truthfulqa:mc": 43.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60.3
wei123602/llama2-13b-fintune2-4E
main
645ede9d6ec60d8fa051bc7ad32ab5f7bfdc066d
{ "arc:challenge": 55.9, "hellaswag": 81, "hendrycksTest": 53.7, "truthfulqa:mc": 42.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.3
wei123602/llama2-13b-FINETUNE3_TEST
main
22cea7bf138eb0d6c962812df2b2235290acbee2
{ "arc:challenge": 53.7, "hellaswag": 79.7, "hendrycksTest": 54.5, "truthfulqa:mc": 40.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57
fangloveskari/ORCA_LLaMA_70B_QLoRA
main
ef9b04ef02ccc4d96f1181467da92bb6b5baf835
{ "arc:challenge": 72.3, "hellaswag": 87.7, "hendrycksTest": 70.2, "truthfulqa:mc": 63.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
73.4
fangloveskari/Platypus_QLoRA_LLaMA_70b
main
b9b8560832276f60ba6bf37ac913b230a85ac19b
{ "arc:challenge": 72.1, "hellaswag": 87.5, "hendrycksTest": 71, "truthfulqa:mc": 61.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
73
Rallio67/3B-redpajama-conditional-alpha
main
7e2156c14b4b7981a4cd6db7b878888a98144df0
{ "arc:challenge": 36.3, "hellaswag": 61.9, "hendrycksTest": 25.4, "truthfulqa:mc": 36.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
40
Rallio67/7B-redpajama-conditional-alpha
main
9a3f69a1eba3618930f222d4e013d534102a2af5
{ "arc:challenge": 42.6, "hellaswag": 69.9, "hendrycksTest": 26.5, "truthfulqa:mc": 36.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
43.8
Yehoon/yehoon_llama2
main
443cb81ce988ea6c0b1e20132c170463d559367e
{ "arc:challenge": 54.8, "hellaswag": 79, "hendrycksTest": 51.3, "truthfulqa:mc": 49.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.6
ai-forever/rugpt3large_based_on_gpt2
main
8201db0de8deb68f25e7309db04d163b71970494
{ "arc:challenge": 22.6, "hellaswag": 32.8, "hendrycksTest": 24.9, "truthfulqa:mc": 43.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
30.9