model
stringlengths
4
89
revision
stringclasses
1 value
model_sha
stringlengths
0
40
results
dict
commit
stringlengths
40
40
date
unknown
score
float64
21.8
83
OpenBuddy/openbuddy-llama2-13b-v11-bf16
main
4d4e72c553e9d60fdc208663b0a1c0364caa2f30
{ "arc:challenge": 53, "hellaswag": 75.4, "hendrycksTest": 51.4, "truthfulqa:mc": 47.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
56.9
OpenBuddy/openbuddy-llama-65b-v8-bf16
main
445b77821fac8e6cfb77d0399fb827400b5bb71e
{ "arc:challenge": 62.8, "hellaswag": 83.6, "hendrycksTest": 62, "truthfulqa:mc": 55.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
65.9
OpenBuddy/openbuddy-llama2-13b-v11.1-bf16
main
76fb7d00836eb2f1d9c9605d8881d73b782cf324
{ "arc:challenge": 51.8, "hellaswag": 76.2, "hendrycksTest": 56.1, "truthfulqa:mc": 49.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.4
OpenBuddy/openbuddy-llama2-70b-v10.1-bf16
main
a6ee90d262ac729f90ed8de97127766df070074c
{ "arc:challenge": 61.9, "hellaswag": 83.1, "hendrycksTest": 67.4, "truthfulqa:mc": 56.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
67.2
OpenBuddy/openbuddy-atom-13b-v9-bf16
main
35bb2c73953f6ea40be6f0c8c6b2dfa7ecbaa0df
{ "arc:challenge": 51.2, "hellaswag": 76, "hendrycksTest": 49.3, "truthfulqa:mc": 48.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
56.3
OpenBuddy/openbuddy-llama2-13b-v8.1-fp16
main
b51c6b29abdf7c420cb5e5f4f309ff83179c7bb8
{ "arc:challenge": 56, "hellaswag": 79.8, "hendrycksTest": 55, "truthfulqa:mc": 51.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60.5
OpenBuddy/openbuddy-openllama-3b-v10-bf16
main
7f24d32de53aa4bc150f04ca2418604475173921
{ "arc:challenge": 36.3, "hellaswag": 58.4, "hendrycksTest": 23.9, "truthfulqa:mc": 42 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
40.2
pe-nlp/llama-2-13b-platypus-vicuna-wizard
main
71aa919fc15fa9d9def9185791b15a3f76e7bd8d
{ "arc:challenge": 61.3, "hellaswag": 82.3, "hendrycksTest": 55.2, "truthfulqa:mc": 41.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60.2
pe-nlp/llama-2-13b-vicuna-wizard
main
b51bf8c4e132308751cc8b9d9c1131539f79f07f
{ "arc:challenge": 57.8, "hellaswag": 82.2, "hendrycksTest": 54.7, "truthfulqa:mc": 41.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59
tiiuae/falcon-7b
main
378337427557d1df3e742264a2901a49f25d4eb1
{ "arc:challenge": 47.9, "hellaswag": 78.1, "hendrycksTest": 27.8, "truthfulqa:mc": 34.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
47
tiiuae/falcon-40b-instruct
main
7475ff8cfc36ed9a962b658ae3c33391566a85a5
{ "arc:challenge": 61.6, "hellaswag": 84.3, "hendrycksTest": 55.4, "truthfulqa:mc": 52.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.4
tiiuae/falcon-rw-1b
main
e4b9872bb803165eb22f0a867d4e6a64d34fce19
{ "arc:challenge": 35.1, "hellaswag": 63.6, "hendrycksTest": 25.3, "truthfulqa:mc": 36 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
40
tiiuae/falcon-7b-instruct
main
eb410fb6ffa9028e97adb801f0d6ec46d02f8b07
{ "arc:challenge": 45.8, "hellaswag": 70.8, "hendrycksTest": 25.7, "truthfulqa:mc": 44.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
46.6
tiiuae/falcon-180B
main
71a1a70b629e9963f7b4601e82f3f9079d48011e
{ "arc:challenge": 69.2, "hellaswag": 88.9, "hendrycksTest": 69.6, "truthfulqa:mc": 45.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
68.2
tiiuae/falcon-180B-chat
main
{ "arc:challenge": 63.8, "hellaswag": 88.1, "hendrycksTest": 67.8, "truthfulqa:mc": 53.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
68.2
jlevin/guanaco-13b-llama-2
main
f1649a9de898859684b15ef8bf5652a8f86ddcfc
{ "arc:challenge": 55.4, "hellaswag": 81.9, "hendrycksTest": 47.2, "truthfulqa:mc": 46.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57.8
Monero/Manticore-13b-Chat-Pyg-Guanaco
main
de665d6002f1f2ef460408e8fa5bc1e0b7bb99b1
{ "arc:challenge": 56.8, "hellaswag": 82.3, "hendrycksTest": 47.8, "truthfulqa:mc": 52.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.8
Monero/WizardLM-13b-OpenAssistant-Uncensored
main
ff8e15fd68119d36ae1f0cebaa87f16e2ad3c732
{ "arc:challenge": 48.5, "hellaswag": 76, "hendrycksTest": 43.1, "truthfulqa:mc": 49.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
54.2
Monero/WizardLM-Uncensored-SuperCOT-StoryTelling-30b
main
e58bafedf660477c206ad64f3118a571951bb28e
{ "arc:challenge": 59.6, "hellaswag": 79.9, "hendrycksTest": 54.4, "truthfulqa:mc": 55.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
62.4
Monero/WizardLM-30B-Uncensored-Guanaco-SuperCOT-30b
main
f25d922536e602af035d476a287b68361fdda5de
{ "arc:challenge": 55.5, "hellaswag": 80.4, "hendrycksTest": 54, "truthfulqa:mc": 51.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60.3
Devio/test100
main
6bd139260f60232328b05b2cd973c3d8f07c0c02
{ "arc:challenge": 37.4, "hellaswag": 58.5, "hendrycksTest": 27.3, "truthfulqa:mc": 34 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
39.3
Devio/test-1400
main
95194d494effb691edae0d596bc5df9856ee92d7
{ "arc:challenge": 38.1, "hellaswag": 66.2, "hendrycksTest": 28.6, "truthfulqa:mc": 36.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
42.4
Devio/testC
main
318159010931164dcacb5dc2a7a54d48990fb969
{ "arc:challenge": 39.6, "hellaswag": 62.9, "hendrycksTest": 27.8, "truthfulqa:mc": 35.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
41.5
Devio/test-3b
main
b81c038ee2fa2addd285acde08b1a7ca3cb2854d
{ "arc:challenge": 27.6, "hellaswag": 44.8, "hendrycksTest": 23.5, "truthfulqa:mc": 41.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
34.3
Devio/test-22B
main
cd72f5954ab5801dd2c1b499e59265f7504f9ee6
{ "arc:challenge": 39.4, "hellaswag": 64.5, "hendrycksTest": 27.1, "truthfulqa:mc": 37.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
42
jondurbin/airoboros-33b-2.1
main
12ccd0e6c9ef12c7d3c2eab8266cd32c0b2f7683
{ "arc:challenge": 63.7, "hellaswag": 85, "hendrycksTest": 57.4, "truthfulqa:mc": 52.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
64.6
jondurbin/airoboros-13b-gpt4-1.2
main
482bd38b65e73fde13f5d03fed2bee7acda8fadd
{ "arc:challenge": 58.4, "hellaswag": 81.6, "hendrycksTest": 48.8, "truthfulqa:mc": 47.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.1
jondurbin/airoboros-33b-gpt4
main
5b6bd680b1c008e52521dc8c663dbc87820da3d0
{ "arc:challenge": 63.7, "hellaswag": 84.9, "hendrycksTest": 58.5, "truthfulqa:mc": 47.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.6
jondurbin/airoboros-l2-13b-gpt4-m2.0
main
a852b77f7d0777092c76898bc83f8e657ca2af3e
{ "arc:challenge": 59.2, "hellaswag": 81, "hendrycksTest": 53.7, "truthfulqa:mc": 39.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.4
jondurbin/airoboros-c34b-2.1
main
2caa8ce3aab012bf34c7c531827f6befc7cc1c98
{ "arc:challenge": 54.7, "hellaswag": 76.4, "hendrycksTest": 55.1, "truthfulqa:mc": 46.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.1
jondurbin/airoboros-7b-gpt4-1.3
main
7b5f77827636bbf3174c48ca16e774c89d71d7bd
{ "arc:challenge": 52.5, "hellaswag": 78, "hendrycksTest": 42, "truthfulqa:mc": 35.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
52
jondurbin/airoboros-13b-gpt4-1.4
main
d0d2687ed2b4a63a644ed6c5b3f6401844718659
{ "arc:challenge": 59.6, "hellaswag": 83.2, "hendrycksTest": 47.6, "truthfulqa:mc": 48.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.8
jondurbin/airoboros-l2-7b-2.1
main
699491e2e73cc2936205db143f59c1a686b88f14
{ "arc:challenge": 54.4, "hellaswag": 78.7, "hendrycksTest": 44.5, "truthfulqa:mc": 44 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
55.4
jondurbin/airoboros-l2-70b-gpt4-m2.0
main
1cccd0b60a988bf6ddc4e2688895837845afa076
{ "arc:challenge": 70.1, "hellaswag": 87.8, "hendrycksTest": 70.7, "truthfulqa:mc": 49.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
69.6
jondurbin/airoboros-33b-gpt4-1.2
main
b3254a827fb1dfe0d4e428bf5ab1c3a2bac82d68
{ "arc:challenge": 64.4, "hellaswag": 84.9, "hendrycksTest": 60.3, "truthfulqa:mc": 49.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
64.7
jondurbin/airoboros-7b-gpt4-1.4.1-qlora
main
91ffa900ed637cf5fd904d96e6985b6f7857ad64
{ "arc:challenge": 52.7, "hellaswag": 77.9, "hendrycksTest": 38.8, "truthfulqa:mc": 36.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
51.4
jondurbin/airoboros-l2-13b-gpt4-2.0
main
ec556571acc6783fea4414e4ca72d291c563b6dc
{ "arc:challenge": 59, "hellaswag": 82.8, "hendrycksTest": 54.7, "truthfulqa:mc": 36.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.2
jondurbin/airoboros-65b-gpt4-1.3
main
4373e66135c6fb4a6063777c4270a34509e7e932
{ "arc:challenge": 66.1, "hellaswag": 86, "hendrycksTest": 63.9, "truthfulqa:mc": 51.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
66.8
jondurbin/airoboros-33b-gpt4-m2.0
main
96af3dc6c9f2248d964cf14cef6e5f2e5894583a
{ "arc:challenge": 63.1, "hellaswag": 85.2, "hendrycksTest": 57.3, "truthfulqa:mc": 48.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.4
jondurbin/airoboros-gpt-3.5-turbo-100k-7b
main
53887996c0f17f7711d182537505a895fb404542
{ "arc:challenge": 53.1, "hellaswag": 76.2, "hendrycksTest": 33.6, "truthfulqa:mc": 45.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
52
jondurbin/airoboros-33b-gpt4-1.3
main
f94e5249d2b998933466d42e08fa9551e3238205
{ "arc:challenge": 63.8, "hellaswag": 85.1, "hendrycksTest": 58.9, "truthfulqa:mc": 45.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.3
jondurbin/airoboros-l2-70b-2.1
main
b512d6cc06dcc41201e564fd4ca78cd6f8e8e6da
{ "arc:challenge": 70.6, "hellaswag": 86.8, "hendrycksTest": 69.2, "truthfulqa:mc": 56 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
70.6
jondurbin/airoboros-7b-gpt4-1.2
main
431fda60009d9b37a73211123ffb9c797764e182
{ "arc:challenge": 52.1, "hellaswag": 78.1, "hendrycksTest": 38.6, "truthfulqa:mc": 41.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
52.6
jondurbin/airoboros-7b
main
7ea67f85ff3a7a8ec77f1819dec3e56779b764b1
{ "arc:challenge": 53, "hellaswag": 77.7, "hendrycksTest": 37.3, "truthfulqa:mc": 43.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
52.8
jondurbin/airoboros-l2-13b-gpt4-1.4.1
main
35ff51ebe5668269dfd33a9ed94412d88f1f4b65
{ "arc:challenge": 59.1, "hellaswag": 82.8, "hendrycksTest": 55.6, "truthfulqa:mc": 40.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.4
jondurbin/airoboros-l2-7b-gpt4-1.4.1
main
77bdd1f049f27876c38b68782fc240518208f391
{ "arc:challenge": 55.1, "hellaswag": 79.6, "hendrycksTest": 45.2, "truthfulqa:mc": 40.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
55
jondurbin/airoboros-13b-gpt4-1.1
main
19c7060adcb34d42e742fe51dd36b8657ac069b7
{ "arc:challenge": 59, "hellaswag": 83.1, "hendrycksTest": 49.4, "truthfulqa:mc": 46.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.5
jondurbin/airoboros-7b-gpt4
main
d9bcb0ad365bfacdf95128bc1272b4106aff7be9
{ "arc:challenge": 53.1, "hellaswag": 78.7, "hendrycksTest": 38.9, "truthfulqa:mc": 40.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
52.9
jondurbin/airoboros-l2-7b-gpt4-m2.0
main
67729407add902e3d4d36bb105d7c011fb368ea5
{ "arc:challenge": 50.5, "hellaswag": 76.9, "hendrycksTest": 45.3, "truthfulqa:mc": 41.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
53.5
jondurbin/airoboros-65b-gpt4-1.4-peft
main
85ae3b595c6b8415df87000c22bc14ea18c174f5
{ "arc:challenge": 65.8, "hellaswag": 85.8, "hendrycksTest": 62.3, "truthfulqa:mc": 52.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
66.6
jondurbin/airoboros-33b-gpt4-1.4
main
04e1e194247a95cc60ba3cd70d026bc94c1f1764
{ "arc:challenge": 64.4, "hellaswag": 85.1, "hendrycksTest": 59.5, "truthfulqa:mc": 50.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
64.9
jondurbin/airoboros-13b-gpt4-1.4-fp16
main
037e369be06a8a0eef87f2cddfd3469670483f29
{ "arc:challenge": 59.6, "hellaswag": 83.2, "hendrycksTest": 47.6, "truthfulqa:mc": 48.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.8
jondurbin/spicyboros-7b-2.2
main
fdf075081555f3ed84c037e8dd3fe85c3b3609d7
{ "arc:challenge": 56.6, "hellaswag": 80.1, "hendrycksTest": 48.5, "truthfulqa:mc": 47.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.1
jondurbin/airoboros-33b-gpt4-2.0
main
ddc598f492f5098a8e308f51a82834f98f29a4ce
{ "arc:challenge": 63.8, "hellaswag": 85.7, "hendrycksTest": 58.4, "truthfulqa:mc": 45.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.4
jondurbin/airocoder-34b-2.1
main
f66e783ac783837b3f59f274ecf55f18a9221cd0
{ "arc:challenge": 54.2, "hellaswag": 73.8, "hendrycksTest": 50.7, "truthfulqa:mc": 40.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
54.8
jondurbin/airoboros-l2-70b-gpt4-2.0
main
f16526d9bb814dc10adc911f94e8c7a520beb5b6
{ "arc:challenge": 68.2, "hellaswag": 87.9, "hendrycksTest": 70.1, "truthfulqa:mc": 49.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
69
jondurbin/airoboros-l2-13b-2.1
main
172e30e56e939f73d7d00a165c2d49cbd284481f
{ "arc:challenge": 59.5, "hellaswag": 82.5, "hendrycksTest": 54.8, "truthfulqa:mc": 44.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60.4
jondurbin/airoboros-65b-gpt4-1.4
main
ae256799615c16443f9c423c653ed9f60577e99e
{ "arc:challenge": 65.5, "hellaswag": 85.8, "hendrycksTest": 61.9, "truthfulqa:mc": 52.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
66.4
jondurbin/airoboros-65b-gpt4-m2.0
main
fa081d52619b35d7016fb40ce855187d6a8e7e4c
{ "arc:challenge": 65.1, "hellaswag": 86.3, "hendrycksTest": 64.3, "truthfulqa:mc": 46.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
65.6
jondurbin/airoboros-13b-gpt4-1.3
main
32a474742c2a235ca12c96afaea57dcb6b46ef56
{ "arc:challenge": 58.5, "hellaswag": 81.6, "hendrycksTest": 47, "truthfulqa:mc": 45.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.1
jondurbin/airoboros-13b
main
44830f9e1559f318f5dad875bab40d1d1beddbfc
{ "arc:challenge": 58.3, "hellaswag": 81, "hendrycksTest": 50, "truthfulqa:mc": 51.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60.2
jondurbin/airoboros-l2-70b-gpt4-1.4.1
main
ea98153fa721ed7110c77e73388e3b6f3996f2bb
{ "arc:challenge": 70.4, "hellaswag": 87.8, "hendrycksTest": 70.3, "truthfulqa:mc": 55.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
70.9
jondurbin/airoboros-65b-gpt4-1.2
main
50ab86e198e1c82ec81aefc628f23501c101d390
{ "arc:challenge": 65.9, "hellaswag": 86.1, "hendrycksTest": 63.4, "truthfulqa:mc": 52.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
67
jondurbin/airoboros-7b-gpt4-1.1
main
5a45a16bac51ed9529a6dc2eab7355cc61eefb5b
{ "arc:challenge": 54.6, "hellaswag": 80.2, "hendrycksTest": 39.2, "truthfulqa:mc": 41.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
53.8
jondurbin/airoboros-65b-gpt4-2.0
main
ea4bdd0221f77de9b0343cd8291cbd0fd6033ca8
{ "arc:challenge": 66.8, "hellaswag": 86.7, "hendrycksTest": 63.4, "truthfulqa:mc": 49.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
66.5
jondurbin/airoboros-13b-gpt4
main
c0eef6e6f63d4b11953539308717cea0079b44f9
{ "arc:challenge": 59.4, "hellaswag": 83.3, "hendrycksTest": 47.9, "truthfulqa:mc": 47.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.6
jondurbin/airoboros-7b-gpt4-1.4
main
cae1ab8991f66bbe66ae95ed23a87846e7343047
{ "arc:challenge": 53.9, "hellaswag": 80.3, "hendrycksTest": 38.6, "truthfulqa:mc": 41 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
53.4
jondurbin/airoboros-l2-7b-gpt4-2.0
main
8432fe95c426ca7709cf2d31a64eee612c4dea42
{ "arc:challenge": 52.9, "hellaswag": 78.5, "hendrycksTest": 45.1, "truthfulqa:mc": 39.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
54
gaodrew/OpenOrca-Platypus2-13B-thera-1250
main
b1c2ebcda387211732e87911e39edca503502a33
{ "arc:challenge": 59.2, "hellaswag": 81, "hendrycksTest": 57, "truthfulqa:mc": 48.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
61.4
gaodrew/gaodrew-llama-30b-instruct-2048-Open-Platypus-100steps
main
1114ff08ed15ef417502da58f0237d2f6650c9ce
{ "arc:challenge": 61.5, "hellaswag": 84.1, "hendrycksTest": 60.2, "truthfulqa:mc": 51 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
64.2
gaodrew/gaodrew-gorgonzola-13b
main
a53fbe358d4cb546916847d861ccfaf7c724a103
{ "arc:challenge": 50.9, "hellaswag": 77.7, "hendrycksTest": 68.9, "truthfulqa:mc": 40.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.5
Lajonbot/vicuna-13b-v1.3-PL-lora_unload
main
5582369752583b02df3cba4bd2a733d12265cddb
{ "arc:challenge": 54.9, "hellaswag": 80.4, "hendrycksTest": 52.2, "truthfulqa:mc": 49.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.3
Lajonbot/tableBeluga-7B-instruct-pl-lora_unload
main
eeb22ca9481a5ed7e131a329324494f234300a45
{ "arc:challenge": 56.2, "hellaswag": 79.1, "hendrycksTest": 52.7, "truthfulqa:mc": 50.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.6
Lajonbot/Llama-2-7b-chat-hf-instruct-pl-lora_unload
main
f838fda8d2b97effae1e8af4dbb6217eab14fb7e
{ "arc:challenge": 53, "hellaswag": 77.5, "hendrycksTest": 47.1, "truthfulqa:mc": 42.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
55
Lajonbot/WizardLM-13B-V1.2-PL-lora_unload
main
5f14e6f5ea67fd2840791c46b3e00846cbdb32cf
{ "arc:challenge": 58.5, "hellaswag": 81.1, "hendrycksTest": 55.1, "truthfulqa:mc": 46.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
60.2
Lajonbot/Llama-2-13b-hf-instruct-pl-lora_unload
main
4ef2c736641c2983996c4662bf481782a9de5055
{ "arc:challenge": 59.5, "hellaswag": 82.2, "hendrycksTest": 54.8, "truthfulqa:mc": 41.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.5
Lajonbot/vicuna-7b-v1.5-PL-lora_unload
main
92bf763ce7ae0bfe155bfd60190eed64582e5080
{ "arc:challenge": 53.5, "hellaswag": 76.7, "hendrycksTest": 49.7, "truthfulqa:mc": 49.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57.4
DataLinguistic/DataLinguistic-34B-V1.0
main
6744f1442d8ec2716d091cfddbf5766a1ec8d533
{ "arc:challenge": 27.6, "hellaswag": 33, "hendrycksTest": 23.1, "truthfulqa:mc": 48.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
33.1
porkorbeef/Llama-2-13b-public
main
e1b32a8fcfc0f37fd5f50cf765151897574c73c7
{ "arc:challenge": 29.9, "hellaswag": 26.6, "hendrycksTest": 22.7, "truthfulqa:mc": 49 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
32
porkorbeef/Llama-2-13b-sf
main
06253ee259e6b205c4734ab6ec3fa850737b2110
{ "arc:challenge": 29.5, "hellaswag": 26.5, "hendrycksTest": 26, "truthfulqa:mc": 49 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
32.8
porkorbeef/Llama-2-13b
main
06253ee259e6b205c4734ab6ec3fa850737b2110
{ "arc:challenge": 29.4, "hellaswag": 26.3, "hendrycksTest": 24.9, "truthfulqa:mc": 48.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
32.2
porkorbeef/Llama-2-13b-12_153950
main
ee9b0cf26f521b5cb2322d743880e8b6bfadb0b7
{ "arc:challenge": 28.6, "hellaswag": 26.6, "hendrycksTest": 20.8, "truthfulqa:mc": 49 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
31.2
pankajmathur/orca_mini_v3_7b
main
f9849ea6bf0f6ebb78dca1cea1c7a3ef8f7d715c
{ "arc:challenge": 56.9, "hellaswag": 79.6, "hendrycksTest": 52.4, "truthfulqa:mc": 50.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.8
concedo/Vicuzard-30B-Uncensored
main
e2329c05a6e59660ba3cbcc01adf30a78f852594
{ "arc:challenge": 63, "hellaswag": 83.7, "hendrycksTest": 58.2, "truthfulqa:mc": 52.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
64.3
concedo/OPT-19M-ChatSalad
main
3930ca6bf3976e9b603815403cb373398ae509e5
{ "arc:challenge": 24.4, "hellaswag": 25.2, "hendrycksTest": 23.1, "truthfulqa:mc": 51.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
31
concedo/Pythia-70M-ChatSalad
main
692289413c47c219cf83b1596783a8e9223541eb
{ "arc:challenge": 21, "hellaswag": 27.3, "hendrycksTest": 24.8, "truthfulqa:mc": 49.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
30.7
guardrail/llama-2-7b-guanaco-instruct-sharded
main
fc7a3abbc3b9a9b3e163ef3c4844307ac270fca7
{ "arc:challenge": 53.8, "hellaswag": 78.7, "hendrycksTest": 46.7, "truthfulqa:mc": 43.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
55.8
chargoddard/ypotryll-22b-epoch2-qlora
main
26fdd8fa420d72ed835c7d17086f0441db0985d4
{ "arc:challenge": 59.2, "hellaswag": 80.7, "hendrycksTest": 54.5, "truthfulqa:mc": 40.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.7
chargoddard/llama2-22b-blocktriangular
main
40a51343ae776b5cb39f2b4343ae8f9b676ffd58
{ "arc:challenge": 58.5, "hellaswag": 82.6, "hendrycksTest": 54.6, "truthfulqa:mc": 39.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.8
chargoddard/MelangeC-70b
main
e54a2b924dec135f3fa2373933ab8485178cde1b
{ "arc:challenge": 71.7, "hellaswag": 87.6, "hendrycksTest": 70.4, "truthfulqa:mc": 58.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
72
chargoddard/llama-2-34b-uncode
main
d434d06249feb6ca511b0a09162130bcc59d84e3
{ "arc:challenge": 39.5, "hellaswag": 33.9, "hendrycksTest": 38.5, "truthfulqa:mc": 40.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
38.2
chargoddard/Chronorctypus-Limarobormes-13b
main
75c1bf5f4b40cf61873ff6487ccd3efc4f684330
{ "arc:challenge": 59.9, "hellaswag": 82.8, "hendrycksTest": 58.4, "truthfulqa:mc": 51.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
63.2
chargoddard/MelangeA-70b
main
d48cf79d1ead50154b1e70120779ae91bc5fafb4
{ "arc:challenge": 71.2, "hellaswag": 87.3, "hendrycksTest": 70.6, "truthfulqa:mc": 60.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
72.4
chargoddard/llama2-22b
main
2bece0787009b4b584f49d0e0d1b49ecf4a52da9
{ "arc:challenge": 58.5, "hellaswag": 82.6, "hendrycksTest": 54.7, "truthfulqa:mc": 39.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
58.9
chargoddard/platypus-2-22b-relora
main
15bca3e9b25cc2f280fec21686ef3bc445217503
{ "arc:challenge": 57.7, "hellaswag": 82.4, "hendrycksTest": 55.3, "truthfulqa:mc": 43.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.8
chargoddard/llama-2-26b-trenchcoat-stack
main
075d67c3223f4b379ab7f997c3787cd0630d80f7
{ "arc:challenge": 55, "hellaswag": 79.9, "hendrycksTest": 53.7, "truthfulqa:mc": 40.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
57.3
chargoddard/platypus2-22b-relora
main
15bca3e9b25cc2f280fec21686ef3bc445217503
{ "arc:challenge": 57.5, "hellaswag": 82.4, "hendrycksTest": 54.9, "truthfulqa:mc": 43.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
59.6
chargoddard/llama-2-16b-nastychat
main
6fb7f82d486b3eee53d750f83cc7eae434349809
{ "arc:challenge": 57.4, "hellaswag": 80.6, "hendrycksTest": 56, "truthfulqa:mc": 53.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
61.8
chargoddard/MelangeB-70b
main
08239fb1e30b1e42b14370f23e942bc51e76027c
{ "arc:challenge": 71.7, "hellaswag": 87.5, "hendrycksTest": 70, "truthfulqa:mc": 59.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
72.1
augtoma/qCammel-70-x
main
cf1e917e42fd1e56ee1edef7ee1a98cbe705c18c
{ "arc:challenge": 68.3, "hellaswag": 87.9, "hendrycksTest": 70.2, "truthfulqa:mc": 57.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
"2023-09-20T10:22:33"
71