model
stringlengths
4
89
revision
stringclasses
1 value
model_sha
stringlengths
0
40
results
dict
commit
stringlengths
40
40
date
unknown
score
float64
21.8
83
lloorree/kssht-euripedes-70b
main
04ae5f2187697a7e9a2d57f327a7131f23d3e927
{ "arc:challenge": 69.8, "hellaswag": 87.6, "hendrycksTest": 70.4, "truthfulqa:mc": 55.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
70.8
lloorree/kssht-dahj-70b
main
85901aab2c3faf09de5ba8e9d65ec03aee4b20e4
{ "arc:challenge": 70.8, "hellaswag": 87.3, "hendrycksTest": 70.4, "truthfulqa:mc": 58.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
71.8
lloorree/kssht-castor-70b
main
e49a6bdc5e6024fb0e60dbba4601b346b4369377
{ "arc:challenge": 69.5, "hellaswag": 87.5, "hendrycksTest": 70.4, "truthfulqa:mc": 56.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
70.9
StudentLLM/Alpagasus-2-13b-QLoRA-merged
main
dacbafa40716a2d87e593240cc5c1dc883b5066a
{ "arc:challenge": 61.1, "hellaswag": 82.5, "hendrycksTest": 55.3, "truthfulqa:mc": 38.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.4
radm/Philosophy-Platypus2-13b
main
fdf66c76a3c55cbac3ec1e8964d993521343a493
{ "arc:challenge": 58.6, "hellaswag": 78.5, "hendrycksTest": 54.3, "truthfulqa:mc": 37.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57.2
lizhuang144/starcoder_mirror
main
eb5f39bac15ccab9463001aa203e33d49f4ff7cb
{ "arc:challenge": 31.3, "hellaswag": 45.8, "hendrycksTest": 29.3, "truthfulqa:mc": 43.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
37.4
rameshm/llama-2-13b-mathgpt-v4
main
c5072a762070c6b3756385c63805348c155004b5
{ "arc:challenge": 50.9, "hellaswag": 75.6, "hendrycksTest": 43.8, "truthfulqa:mc": 42 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
53.1
abhishek/llama2guanacotest
main
679d17809939a0bf9b79bbb027898cbea64045b2
{ "arc:challenge": 51.6, "hellaswag": 77.6, "hendrycksTest": 48.5, "truthfulqa:mc": 43.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
55.4
abhishek/autotrain-llama-alpaca-peft-52508123785
main
{ "arc:challenge": 52.2, "hellaswag": 76.9, "hendrycksTest": 37.6, "truthfulqa:mc": 32.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
49.9
team-lucid/mptk-1b
main
aea467410ae0cead4fded6b98a3575e92b22862f
{ "arc:challenge": 22.7, "hellaswag": 25.5, "hendrycksTest": 27.1, "truthfulqa:mc": null }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
null
shaohang/SparseOPT-1.3B
main
06249d582b0cfefac537dd6bee2e578002ffff00
{ "arc:challenge": 27.1, "hellaswag": 48.7, "hendrycksTest": 25.6, "truthfulqa:mc": 39.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
35.1
clibrain/Llama-2-7b-ft-instruct-es
main
b62f431c88b232204ea7046f9d906ae1daa68437
{ "arc:challenge": 53.7, "hellaswag": 77.8, "hendrycksTest": 46.6, "truthfulqa:mc": 38.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
54.2
CalderaAI/13B-Thorns-l2
main
adc5e7befcc3d0a26f46198fdda4a098a2742fe6
{ "arc:challenge": 62.9, "hellaswag": 83.6, "hendrycksTest": 57, "truthfulqa:mc": 49.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.2
CalderaAI/13B-BlueMethod
main
315aa0924dd42840b8cced581c9db1240f9bae1d
{ "arc:challenge": 59.6, "hellaswag": 82.1, "hendrycksTest": 50.3, "truthfulqa:mc": 47.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.9
CalderaAI/30B-Epsilon
main
6962638c2b0368ad496af6e20e46e3de97a7772b
{ "arc:challenge": 63.1, "hellaswag": 83.6, "hendrycksTest": 56.9, "truthfulqa:mc": 59 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
65.6
CalderaAI/13B-Legerdemain-L2
main
d6624ce1bcc6b50c86b86e879a8c9822218b84d2
{ "arc:challenge": 61.3, "hellaswag": 83.3, "hendrycksTest": 56, "truthfulqa:mc": 42 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60.6
CalderaAI/13B-Ouroboros
main
97981254d4b0ac0d1472376f602c004670070fdd
{ "arc:challenge": 57.4, "hellaswag": 82.1, "hendrycksTest": 51.4, "truthfulqa:mc": 48 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.7
CalderaAI/30B-Lazarus
main
24da9e88f2b2b7946bc6fe9412d6728b9adc2c3d
{ "arc:challenge": 64.9, "hellaswag": 84.3, "hendrycksTest": 56.5, "truthfulqa:mc": 58.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
66.1
ashercn97/manatee-7b
main
e66094c43ffe6c5b3f4164cd4ba048d3bc422fd0
{ "arc:challenge": 54.5, "hellaswag": 78.9, "hendrycksTest": 49.3, "truthfulqa:mc": 46.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57.4
ashercn97/giraffe-7b
main
9af88449bed5be4709befcfbbba123ee75805479
{ "arc:challenge": 47.2, "hellaswag": 75.5, "hendrycksTest": 38.9, "truthfulqa:mc": 38.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
50
ajibawa-2023/scarlett-33b
main
305eea72fb9fe2ac5929a62483ea51f152bcc060
{ "arc:challenge": 67.7, "hellaswag": 85.5, "hendrycksTest": 59, "truthfulqa:mc": 61.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
68.3
ajibawa-2023/Uncensored-Frank-7B
main
65bbcb80158a6d2e133bba99a90142caf4e2e242
{ "arc:challenge": 54.3, "hellaswag": 76.5, "hendrycksTest": 37.5, "truthfulqa:mc": 43.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
53
ajibawa-2023/carl-7b
main
de4c7af9598bebc47dd43253c972be719f3195d6
{ "arc:challenge": 53.5, "hellaswag": 78.3, "hendrycksTest": 34, "truthfulqa:mc": 40.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
51.5
ajibawa-2023/carl-33b
main
5f80b372b493d901cab4490b4f23c71499023615
{ "arc:challenge": 64.6, "hellaswag": 85.3, "hendrycksTest": 58.4, "truthfulqa:mc": 45.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.4
ajibawa-2023/scarlett-7b
main
0715b738e750830ba7213f26fe32fa1cc1bb15b3
{ "arc:challenge": 57.2, "hellaswag": 80.3, "hendrycksTest": 36.1, "truthfulqa:mc": 48.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
55.5
ajibawa-2023/Uncensored-Frank-13B
main
73a27445e5e5a72857626e551c70542ec607f60c
{ "arc:challenge": 61.6, "hellaswag": 82.6, "hendrycksTest": 54.6, "truthfulqa:mc": 48.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
61.8
nkpz/llama2-22b-daydreamer-v3
main
e6c74222958328e50712aa00294dc818c24075b2
{ "arc:challenge": 56.1, "hellaswag": 80.1, "hendrycksTest": 52.5, "truthfulqa:mc": 42.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57.8
nkpz/llama2-22b-chat-wizard-uncensored
main
90cffebc8f530161505b84740ff6c8f646299d6c
{ "arc:challenge": 56.2, "hellaswag": 80.4, "hendrycksTest": 53.6, "truthfulqa:mc": 45.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59
Faradaylab/Aria-70B
main
57cd251f2cf4e832f64550ea0e2b90ecec155b54
{ "arc:challenge": 64.5, "hellaswag": 85.9, "hendrycksTest": 63.9, "truthfulqa:mc": 52.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
66.8
Faradaylab/ARIA-70B-V2
main
2bf026af438d522268533484a85a3e54178e7809
{ "arc:challenge": 62.1, "hellaswag": 85.7, "hendrycksTest": 63.5, "truthfulqa:mc": 49.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
65.3
simsim314/WizardLM-70B-V1.0-HF
main
97112db6d0fae8354c13437a5e7dc99fb37b8c2e
{ "arc:challenge": 64.1, "hellaswag": 85.5, "hendrycksTest": 64.8, "truthfulqa:mc": 54.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
67.3
PocketDoc/Dans-CreepingSenseOfDoom
main
efc7cbc5d0461c137e8ea0c83e54bc5357188783
{ "arc:challenge": 53.3, "hellaswag": 78.9, "hendrycksTest": 48.1, "truthfulqa:mc": 37.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
54.5
PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged
main
a7e5484df8aceae7800ae9301a3954cf74b527e9
{ "arc:challenge": 58.8, "hellaswag": 81.8, "hendrycksTest": 48.1, "truthfulqa:mc": 41.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57.5
PocketDoc/Dans-PersonalityEngine-30b
main
1990b46a2e2ac1f6282d961bce691ceceafed514
{ "arc:challenge": 63.5, "hellaswag": 84.4, "hendrycksTest": 59, "truthfulqa:mc": 47 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.5
royallab/Pygmalion-2-13b-SuperCOT
main
763b3fd5afc3e7fb6c7c8768d40f06901c8d5913
{ "arc:challenge": 63.2, "hellaswag": 83.7, "hendrycksTest": 54.9, "truthfulqa:mc": 53.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.7
totally-not-an-llm/PuddleJumper-13b
main
f3a8a475ff0c6ae37ac8ae0690980be11cac731a
{ "arc:challenge": 58.7, "hellaswag": 81.2, "hendrycksTest": 58.3, "truthfulqa:mc": 56.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.6
totally-not-an-llm/EverythingLM-13b-16k
main
8456a856a8b115b05e76a7d0d945853b10ac71e2
{ "arc:challenge": 56.6, "hellaswag": 80.6, "hendrycksTest": 50.2, "truthfulqa:mc": 47.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.7
bertin-project/bertin-gpt-j-6B-alpaca
main
636b17d6044189343475d1889f076aba73036905
{ "arc:challenge": 36, "hellaswag": 54.3, "hendrycksTest": 27.7, "truthfulqa:mc": 43.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
40.4
openaccess-ai-collective/manticore-30b-chat-pyg-alpha
main
0cff8e9718e57202171003d556d2e6630061879d
{ "arc:challenge": 64.2, "hellaswag": 84.4, "hendrycksTest": 57.5, "truthfulqa:mc": 51.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
64.4
openaccess-ai-collective/manticore-13b-chat-pyg
main
f9ef65a3cf50e3c09ccb443f99225148e08517aa
{ "arc:challenge": 58.5, "hellaswag": 82, "hendrycksTest": 48.8, "truthfulqa:mc": 48.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.5
openaccess-ai-collective/wizard-mega-13b
main
76e90314541be6cfa2b55208831c99f1351c1a33
{ "arc:challenge": 57.3, "hellaswag": 81.1, "hendrycksTest": 50.6, "truthfulqa:mc": 50.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.8
posicube/Llama2-chat-AYT-13B
main
dd12dced8076a959c03b8b5c4a4266f234d6639a
{ "arc:challenge": 63.3, "hellaswag": 83.5, "hendrycksTest": 59.7, "truthfulqa:mc": 55.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
65.6
PSanni/Deer-3b
main
53ea8f8862fc1820f0cd31f62953b7290fd79867
{ "arc:challenge": 38.5, "hellaswag": 57.4, "hendrycksTest": 25.6, "truthfulqa:mc": 40 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
40.4
acrastt/Marx-3B
main
c0dcc44989cf4e006efae31abbcef7e8be8547c0
{ "arc:challenge": 43.2, "hellaswag": 72.7, "hendrycksTest": 28.5, "truthfulqa:mc": 39.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
45.9
acrastt/RedPajama-INCITE-Chat-Instruct-3B-V1
main
e19eef572d57fc734bf3ea07c7d0098b3901ec9b
{ "arc:challenge": 42.6, "hellaswag": 67.5, "hendrycksTest": 26, "truthfulqa:mc": 33.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
42.4
acrastt/Griffin-3B
main
edbea6fe86d0bc2673c10269828008a1cb451919
{ "arc:challenge": 41.8, "hellaswag": 72.3, "hendrycksTest": 26.4, "truthfulqa:mc": 38.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
44.7
acrastt/Marx-3B-V2
main
5fba568304f6f876f5b9e42026f986ea245b836b
{ "arc:challenge": 44, "hellaswag": 72.9, "hendrycksTest": 27.8, "truthfulqa:mc": 39.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
46.2
acrastt/Puma-3B
main
1159e9cdd05c03d31331f329ba58e4e3444943be
{ "arc:challenge": 41.3, "hellaswag": 71.8, "hendrycksTest": 27.5, "truthfulqa:mc": 38.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
44.7
acrastt/OmegLLaMA-3B
main
520c5f1ceb5c90d4011887e2a8d3becf15e7e66e
{ "arc:challenge": 40.4, "hellaswag": 66.1, "hendrycksTest": 28, "truthfulqa:mc": 33.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
42
acrastt/Bean-3B
main
4a1ce189a3fb1d58b3fa47ebe30b3c037592670c
{ "arc:challenge": 40.4, "hellaswag": 72, "hendrycksTest": 26.4, "truthfulqa:mc": 36.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
43.7
health360/Healix-410M
main
df5a3cec54a0bdd22e1644bfe576c7b58eca6bfd
{ "arc:challenge": 25.1, "hellaswag": 32, "hendrycksTest": 24.9, "truthfulqa:mc": 44.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
31.6
health360/Healix-3B
main
52297e0b6845b3c1b26f336fd2a2c9b2f56ce6ba
{ "arc:challenge": 37.7, "hellaswag": 65.9, "hendrycksTest": 26, "truthfulqa:mc": 37.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
41.8
Phind/Phind-CodeLlama-34B-v2
main
949f61e203f91b412efe8f679c798f09f0ff4b0c
{ "arc:challenge": 24.6, "hellaswag": 27.6, "hendrycksTest": 25.8, "truthfulqa:mc": 48.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
31.6
Phind/Phind-CodeLlama-34B-v1
main
b073c9bb418ae52ca76b4ab48ac2dfbc8622f434
{ "arc:challenge": 27.1, "hellaswag": 28.3, "hendrycksTest": 28.9, "truthfulqa:mc": 44.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
32.3
Phind/Phind-CodeLlama-34B-Python-v1
main
3aabef8c9bc1b3ec2fffed053645bc1e2d829b6c
{ "arc:challenge": 24.7, "hellaswag": 29.8, "hendrycksTest": 28, "truthfulqa:mc": 45.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
32
marcchew/test1
main
7444355ad764584ef05805f58ccf174bb03e0f46
{ "arc:challenge": 27.6, "hellaswag": 26.2, "hendrycksTest": 24.5, "truthfulqa:mc": 48.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
31.6
marcchew/Marcoroni-7B-LaMini-40K
main
27868e4faed5d68d059c8c57dbd3e24e4933ca28
{ "arc:challenge": 27.6, "hellaswag": 26.2, "hendrycksTest": 26.9, "truthfulqa:mc": 47.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
32
marcchew/LaMini-40k-Platypus2-7B
main
e8c03e43eab479a216b5f4f182a711c3624f38bd
{ "arc:challenge": 28.5, "hellaswag": 26.3, "hendrycksTest": 27, "truthfulqa:mc": 47.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
32.3
marcchew/Platypus-2-7B-LaMini-14K
main
50199ba51c4d002cc86cf3fb2ac921ec52bf4828
{ "arc:challenge": 29.5, "hellaswag": 26.2, "hendrycksTest": 23.1, "truthfulqa:mc": 48.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
31.8
SebastianSchramm/Cerebras-GPT-111M-instruction
main
09f1ec782ae2243fc605b24eb13ec8d5e4fd2734
{ "arc:challenge": 24.4, "hellaswag": 26.1, "hendrycksTest": 25.9, "truthfulqa:mc": 49.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
31.5
ziqingyang/chinese-llama-2-7b
main
557b5cbd48a4a4eb5a08e975c4b6e11ac1ed4cbc
{ "arc:challenge": 44.5, "hellaswag": 69.5, "hendrycksTest": 37.5, "truthfulqa:mc": 37 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
47.1
ziqingyang/chinese-alpaca-2-7b
main
ab2476bffedeed752daedd77e71900578e136e7c
{ "arc:challenge": 49.6, "hellaswag": 72.6, "hendrycksTest": 46.5, "truthfulqa:mc": 48.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
54.3
ziqingyang/chinese-alpaca-2-13b
main
576094cbf4988baf88b3bb66678be1db70bd720a
{ "arc:challenge": 58.7, "hellaswag": 79.7, "hendrycksTest": 55.1, "truthfulqa:mc": 50.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60.9
ziqingyang/chinese-llama-2-13b
main
484c8a18b02f95eb2b6f6302105cf9a329e76ec8
{ "arc:challenge": 55.8, "hellaswag": 79.5, "hendrycksTest": 53, "truthfulqa:mc": 38.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
56.6
ai-business/Luban-13B
main
01b0f2046083dd8d9d8f9e626d78d83eaa1d57dd
{ "arc:challenge": 63.1, "hellaswag": 82.8, "hendrycksTest": 58.7, "truthfulqa:mc": 55.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
65
gpt2-xl
main
33cdb5c0db5423c1879b1b9f16c352988e8754a8
{ "arc:challenge": 30.3, "hellaswag": 51.4, "hendrycksTest": 26.4, "truthfulqa:mc": 38.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
36.6
TurkuNLP/gpt3-finnish-13B
main
ade35fd78ac2c29f7a56ffd3087321d297bb97a9
{ "arc:challenge": 24.7, "hellaswag": 46.8, "hendrycksTest": 23.5, "truthfulqa:mc": 44.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
34.9
TurkuNLP/gpt3-finnish-small
main
20a19af481bf59f38610a2977b2b513e9df51e3a
{ "arc:challenge": 20.5, "hellaswag": 28.1, "hendrycksTest": 24.5, "truthfulqa:mc": 46.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
29.9
NobodyExistsOnTheInternet/GiftedConvo13bLoraNoEcons
main
9d7031e7d956dd2d25c61d85f594d115ce65b172
{ "arc:challenge": 59.4, "hellaswag": 83.2, "hendrycksTest": 55.2, "truthfulqa:mc": 40.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.6
NobodyExistsOnTheInternet/GiftedConvo13bLoraNoEconsE4
main
f3d421aadb29830345bf392f793ce3c33e7d68c5
{ "arc:challenge": 59.9, "hellaswag": 84.1, "hendrycksTest": 54.7, "truthfulqa:mc": 41.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60.2
NobodyExistsOnTheInternet/PuffedLIMA13bQLORA
main
7da6d235d625e16c850ccd0b947dee40071b1f89
{ "arc:challenge": 59.9, "hellaswag": 84.4, "hendrycksTest": 53.7, "truthfulqa:mc": 39.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.5
NobodyExistsOnTheInternet/PuffedConvo13bLoraE4
main
40e4fce0c25bd23f6011b424748ee2b5374b98d5
{ "arc:challenge": 59.6, "hellaswag": 84.4, "hendrycksTest": 53.7, "truthfulqa:mc": 39.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.4
robowaifudev/megatron-gpt2-345m
main
b39f8d00fb9f33da4271be2035da848da896a23b
{ "arc:challenge": 24.2, "hellaswag": 39.2, "hendrycksTest": 24.3, "truthfulqa:mc": 41.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
32.3
JosephusCheung/Qwen-LLaMAfied-7B-Chat
main
4d70cf0047a7a5cd2c864bc2606e81f0830e4405
{ "arc:challenge": 50.9, "hellaswag": 83.5, "hendrycksTest": 53.5, "truthfulqa:mc": 46.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.5
JosephusCheung/Guanaco
main
bed6f3bd18f07a4a379525645cbd86d622b12836
{ "arc:challenge": 50.2, "hellaswag": 72.7, "hendrycksTest": 30.3, "truthfulqa:mc": 37.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
47.7
lu-vae/llama2-13b-sharegpt4-test
main
2be36a2dab4ed0f97727a1508367f53d59950818
{ "arc:challenge": 58, "hellaswag": 82.7, "hendrycksTest": 56, "truthfulqa:mc": 48.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
61.2
lu-vae/llama2-13B-sharegpt4-orca-openplatypus-8w
main
ad086aacf0176911133b6cccfb34364afce9de5a
{ "arc:challenge": 62.8, "hellaswag": 84, "hendrycksTest": 55.1, "truthfulqa:mc": 45.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
61.9
csitfun/llama-7b-logicot
main
8e9c93c09e6a6c7d504c88d6ca598144829bced8
{ "arc:challenge": 47, "hellaswag": 72.6, "hendrycksTest": 38.9, "truthfulqa:mc": 43.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
50.5
vihangd/smartyplats-3b-v1
main
89272b9edb323f5ace09e097a6449554c0dcd4e7
{ "arc:challenge": 40.5, "hellaswag": 70.9, "hendrycksTest": 25.3, "truthfulqa:mc": 36.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
43.3
vihangd/smartyplats-3b-v2
main
920609897049f674bc4a9678579f6869f6cbed13
{ "arc:challenge": 41, "hellaswag": 71.2, "hendrycksTest": 24.3, "truthfulqa:mc": 36.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
43.3
lgaalves/gpt2_camel_physics-platypus
main
66165ff32ed8de6c39f3524a810f5e97ba6d3347
{ "arc:challenge": 23, "hellaswag": 31.3, "hendrycksTest": 26.9, "truthfulqa:mc": 39.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
30.2
lgaalves/gpt2_platypus-camel_physics
main
66165ff32ed8de6c39f3524a810f5e97ba6d3347
{ "arc:challenge": 23, "hellaswag": 31.3, "hendrycksTest": 26.9, "truthfulqa:mc": 39.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
30.2
lgaalves/gpt2_platypus-dolly-guanaco
main
bfa144d3eb087e54f1798fd2e2fb17e894cc39d3
{ "arc:challenge": 23.2, "hellaswag": 31, "hendrycksTest": 26.2, "truthfulqa:mc": 40.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
30.2
lgaalves/llama-2-7b-hf_open-platypus
main
c7e776f3f3afc0fa22cb7aff0d00522e571e9b29
{ "arc:challenge": 51.5, "hellaswag": 78.6, "hendrycksTest": 43.6, "truthfulqa:mc": 43.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
54.4
lgaalves/llama-2-13b-hf-platypus
main
39e07f6213a64d79cf31e9c0773dea6224f7f021
{ "arc:challenge": 58.9, "hellaswag": 82.1, "hendrycksTest": 55, "truthfulqa:mc": 42.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.7
lgaalves/gpt2-dolly
main
52fcf61a8eef255a981be6efde187481086e1a48
{ "arc:challenge": 21.8, "hellaswag": 30.8, "hendrycksTest": 24.7, "truthfulqa:mc": 42.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
29.9
lgaalves/gpt2_guanaco-dolly-platypus
main
6bf0a8146cf255c829ec2ad83926c8b80945b431
{ "arc:challenge": 23.5, "hellaswag": 31, "hendrycksTest": 26.4, "truthfulqa:mc": 40 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
30.2
lgaalves/gpt2_open-platypus
main
745c1864b752525789cad2b75166c519a327325e
{ "arc:challenge": 22.2, "hellaswag": 31.3, "hendrycksTest": 26.2, "truthfulqa:mc": 40.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
30
lgaalves/llama-2-13b-chat-platypus
main
828aa1020fc7d394fe8ee2c596e3211df7656eac
{ "arc:challenge": 53.8, "hellaswag": 80.7, "hendrycksTest": 54.4, "truthfulqa:mc": 46.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.8
lilloukas/GPlatty-30B
main
836cf4dcd60ebe2ff09415c72f809d94639e8d35
{ "arc:challenge": 65.8, "hellaswag": 84.8, "hendrycksTest": 63.5, "truthfulqa:mc": 52.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
66.6
v2ray/LLaMA-2-Wizard-70B-QLoRA
main
4bff676fe29f56d31961794c062aebc36312446e
{ "arc:challenge": 67.6, "hellaswag": 87.5, "hendrycksTest": 69.1, "truthfulqa:mc": 61.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
71.5
v2ray/LLaMA-2-Jannie-70B-QLoRA
main
e552ddca841a2b86e36bbe5f99840afedfdbcd14
{ "arc:challenge": 68.9, "hellaswag": 86.9, "hendrycksTest": 69.4, "truthfulqa:mc": 53.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
69.7
FabbriSimo01/GPT_Large_Quantized
main
c2df1904aa18de22d03ba0fee925e831d8468898
{ "arc:challenge": 27, "hellaswag": 26.3, "hendrycksTest": 24.1, "truthfulqa:mc": 48.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
31.5
FabbriSimo01/Cerebras_1.3b_Quantized
main
e2126a42a1c8a938553dd513e4adafec41cb793e
{ "arc:challenge": 25.9, "hellaswag": 38.6, "hendrycksTest": 26.8, "truthfulqa:mc": 42.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
33.5
Charlie911/vicuna-7b-v1.5-lora-mctaco-modified1
main
a7749ff092ef03900de34b69d41c767a6a48ea9e
{ "arc:challenge": 40.9, "hellaswag": 73.4, "hendrycksTest": 47.4, "truthfulqa:mc": 39.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
50.4
Charlie911/vicuna-7b-v1.5-lora-mctaco-modified4
main
715b03c8573df06f3825d1c08b307e2a83fa8bf9
{ "arc:challenge": 40.7, "hellaswag": 73.1, "hendrycksTest": 47.3, "truthfulqa:mc": 41.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
50.7
Charlie911/vicuna-7b-v1.5-lora-mctaco-modified2
main
8e1930bbbbdeb4f6f4639e837f09d9878bbf7831
{ "arc:challenge": 42.9, "hellaswag": 74, "hendrycksTest": 48.5, "truthfulqa:mc": 40.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
51.4
Charlie911/vicuna-7b-v1.5-lora-mctaco
main
883b0fa4158de8207d0a94f4b8cb188e6250aa9d
{ "arc:challenge": 45.6, "hellaswag": 75.7, "hendrycksTest": 49.3, "truthfulqa:mc": 43.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
53.4
IkariDev/Athena-tmp
main
a8d08541b0b1c1123d51867a594dce60c241ec34
{ "arc:challenge": 59.2, "hellaswag": 82.1, "hendrycksTest": 58.9, "truthfulqa:mc": 55.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.9
IkariDev/Athena-v1
main
8f96e561c8c795e383ca0faeb1696fa1e33e87de
{ "arc:challenge": 60.1, "hellaswag": 82.6, "hendrycksTest": 55.6, "truthfulqa:mc": 46.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
61.2