model
stringlengths
4
89
revision
stringclasses
1 value
model_sha
stringlengths
0
40
results
dict
commit
stringlengths
40
40
date
timestamp[ns]
score
float64
21.8
83
Weyaxi/TekniumAiroboros-Nebula-7B
main
ef964d514cc25a600b0de78fc469d1acbec34591
{ "arc:challenge": 57.2, "hellaswag": 81.7, "hendrycksTest": 55.3, "truthfulqa:mc": 51.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
61.4
Weyaxi/SlimOpenOrca-Mistral-7B
main
b0134a7512444dfbb60a2e2d81469a5bbbb18026
{ "arc:challenge": 63, "hellaswag": 83.5, "hendrycksTest": 62.3, "truthfulqa:mc": 57.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
66.6
Weyaxi/SynthIA-v1.3-Nebula-v2-7B
main
c6030620e9d4390d54ec221a18ff3e530f4dcd84
{ "arc:challenge": 59.4, "hellaswag": 82.8, "hendrycksTest": 57.6, "truthfulqa:mc": 50.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
62.6
Weyaxi/OpenOrca-Zephyr-7B
main
2a2c7d287a46243cccf3ff6628375d0d190394ac
{ "arc:challenge": 64.1, "hellaswag": 83.8, "hendrycksTest": 62.5, "truthfulqa:mc": 54.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
66.2
Weyaxi/Samantha-Nebula-7B
main
a7d4b8a1683e33dd3c60064d7dd9d5c35691323f
{ "arc:challenge": 57, "hellaswag": 82.3, "hendrycksTest": 54.2, "truthfulqa:mc": 49.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
60.8
Weyaxi/Dolphin2.1-OpenOrca-7B
main
076c0f7de93307e8fb3ad3bd820fb5f73325ca70
{ "arc:challenge": 63.9, "hellaswag": 84.3, "hendrycksTest": 62.7, "truthfulqa:mc": 53.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
66.2
Weyaxi/zephyr-beta-Nebula-v2-7B
main
226caedb50a12730232c1f8fe9c96b6dcf818ba7
{ "arc:challenge": 56.6, "hellaswag": 82.5, "hendrycksTest": 56.4, "truthfulqa:mc": 58.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
63.6
Weyaxi/Dolphin-Nebula-7B
main
c14b3545066e5ee5562c1724a037b41db95f1f0d
{ "arc:challenge": 55.2, "hellaswag": 78.6, "hendrycksTest": 53.4, "truthfulqa:mc": 58 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
61.3
Weyaxi/test-help-steer-filtered-orig
main
bda6d45ddb3ef73df4d198d95416c66872429927
{ "arc:challenge": 57.6, "hellaswag": 80.4, "hendrycksTest": 57.2, "truthfulqa:mc": 41.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
59.1
Weyaxi/Luban-Marcoroni-13B
main
bf152c36935acd67a9029c017f0c1ff2d7a92314
{ "arc:challenge": 63.7, "hellaswag": 82.9, "hendrycksTest": 58.7, "truthfulqa:mc": 55.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
65.2
jerryjalapeno/nart-100k-7b
main
50e61b8e6cc17cb3fbcb490fe3dc7e2c8b248378
{ "arc:challenge": 54.1, "hellaswag": 78.5, "hendrycksTest": 35, "truthfulqa:mc": 36.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
51.1
SLAM-group/NewHope
main
560ca6df8335d6d2998ac8f079218816a5742b02
{ "arc:challenge": 60.9, "hellaswag": 84, "hendrycksTest": 55.7, "truthfulqa:mc": 44.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
61.4
psmathur/model_007
main
0f5d81b13718a866cb078bd8762ab80a41972663
{ "arc:challenge": 71.1, "hellaswag": 87.7, "hendrycksTest": 69, "truthfulqa:mc": 63.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
72.7
psmathur/model_101
main
884c53a64a3c5faf7b0706d36a587ca1532ed8f5
{ "arc:challenge": 68.7, "hellaswag": 86.4, "hendrycksTest": 69.9, "truthfulqa:mc": 58.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
71
psmathur/orca_mini_v2_13b
main
1058709314f7ca090937d0a2b7b37b0b3a8f12a3
{ "arc:challenge": 55.1, "hellaswag": 79.7, "hendrycksTest": 50.1, "truthfulqa:mc": 52.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
59.4
psmathur/orca_mini_13b
main
ca900c8f3145de40cd188c559b2901a2e4711546
{ "arc:challenge": 42.1, "hellaswag": 63.4, "hendrycksTest": 35.4, "truthfulqa:mc": 43.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
46
psmathur/orca_mini_v3_70b
main
c1d4f997f8ed685a6efc72229523b2e56fd0774b
{ "arc:challenge": 71.2, "hellaswag": 87.9, "hendrycksTest": 70.2, "truthfulqa:mc": 61.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
72.6
psmathur/orca_mini_v3_7b
main
a1583d2f02041fb37df28eeae4da644d8dff33eb
{ "arc:challenge": 56.9, "hellaswag": 79.6, "hendrycksTest": 52.4, "truthfulqa:mc": 50.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
59.8
psmathur/orca_mini_7b
main
6ed0dca683685cb5b9e7df599f87d311f00ba6db
{ "arc:challenge": 43.9, "hellaswag": 65.2, "hendrycksTest": 30, "truthfulqa:mc": 42 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
45.3
psmathur/model_007_v2
main
3d95e0f3598f7a76ab97cb2cc0e4aae957d77479
{ "arc:challenge": 71.4, "hellaswag": 87.3, "hendrycksTest": 68.6, "truthfulqa:mc": 62.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
72.5
psmathur/orca_mini_3b
main
fd2754e80ce80757a3a68a840d7d287dd7def676
{ "arc:challenge": 41.6, "hellaswag": 61.5, "hendrycksTest": 26.8, "truthfulqa:mc": 42.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
43.1
psmathur/model_007_13b
main
0436ba68d245c8a2c04a2cc9637630d2e163cbbe
{ "arc:challenge": 22.7, "hellaswag": 25, "hendrycksTest": 23.1, "truthfulqa:mc": null }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
null
psmathur/model_42_70b
main
ca3789cd6b683e97dcd6a5f0367f90a63d7a4e7b
{ "arc:challenge": 68.3, "hellaswag": 87.7, "hendrycksTest": 70, "truthfulqa:mc": 48.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
68.7
psmathur/test_42_70b
main
ca3789cd6b683e97dcd6a5f0367f90a63d7a4e7b
{ "arc:challenge": 68.3, "hellaswag": 87.7, "hendrycksTest": 70, "truthfulqa:mc": 48.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
68.7
rinna/bilingual-gpt-neox-4b
main
f02f6f3c8da0093f3c1ce59220409bc2fa9fbb17
{ "arc:challenge": 29.2, "hellaswag": 43.7, "hendrycksTest": 23.1, "truthfulqa:mc": 45 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
35.2
rinna/bilingual-gpt-neox-4b-instruction-sft
main
c20e42bd49a3b1b0d0a07151899a322c4760e871
{ "arc:challenge": 28.1, "hellaswag": 47.5, "hendrycksTest": 23.1, "truthfulqa:mc": 43.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
35.6
rinna/bilingual-gpt-neox-4b-8k
main
ad56d7fc86db4ad5a7036bc9f80e11cd6f435a60
{ "arc:challenge": 28.6, "hellaswag": 43.9, "hendrycksTest": 25.4, "truthfulqa:mc": 47.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
36.4
Kiddyz/testlm
main
e00d8c50a007eb1da3fbfb4d5f5a73c1af3aa104
{ "arc:challenge": 53.5, "hellaswag": 75.8, "hendrycksTest": 51.2, "truthfulqa:mc": 48.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
57.2
Kiddyz/testlm2
main
9bffd9acfb12b5da1a1dd09825a633f804126dfa
{ "arc:challenge": 53, "hellaswag": 75.6, "hendrycksTest": 51.5, "truthfulqa:mc": 48.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
57.2
Kiddyz/testlm-1
main
e00d8c50a007eb1da3fbfb4d5f5a73c1af3aa104
{ "arc:challenge": 53.5, "hellaswag": 75.8, "hendrycksTest": 51.2, "truthfulqa:mc": 48.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
57.2
Kiddyz/testlm-1-1
main
e00d8c50a007eb1da3fbfb4d5f5a73c1af3aa104
{ "arc:challenge": 53.5, "hellaswag": 75.8, "hendrycksTest": 51.2, "truthfulqa:mc": 48.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
57.2
Kiddyz/testllm-c2
main
b87c798bc27522824451dfccf5eae50edbd4263b
{ "arc:challenge": 60.6, "hellaswag": 81.9, "hendrycksTest": 61.2, "truthfulqa:mc": 49.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
63.4
Kiddyz/testlm-3
main
6ba288ac39fc4145144e360a8f2641d6f5a6a33a
{ "arc:challenge": 53.6, "hellaswag": 78.5, "hendrycksTest": 51.8, "truthfulqa:mc": 46.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
57.6
AIDC-ai-business/Marcoroni-70B-v1
main
55a30d29db194832c0b5de1392a6598a63582144
{ "arc:challenge": 73.5, "hellaswag": 87.6, "hendrycksTest": 70.7, "truthfulqa:mc": 64.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
74.1
cerebras/Cerebras-GPT-1.3B
main
5b95400ee8d1e3cc9f79f0dec7182ed9c1009c34
{ "arc:challenge": 26.3, "hellaswag": 38.5, "hendrycksTest": 26.6, "truthfulqa:mc": 42.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
33.5
cerebras/Cerebras-GPT-256M
main
d77812ac95aece1f1edef6745ae2a1b325ad01a4
{ "arc:challenge": 22, "hellaswag": 29, "hendrycksTest": 26.8, "truthfulqa:mc": 46 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
31
cerebras/Cerebras-GPT-590M
main
67a653304fd782a34906d59f3795a37f9e053397
{ "arc:challenge": 23.7, "hellaswag": 32.4, "hendrycksTest": 26, "truthfulqa:mc": 44.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
31.6
cerebras/Cerebras-GPT-2.7B
main
4383dfd80aafdbcfd0876419d246de51e6cbf7c1
{ "arc:challenge": 29.1, "hellaswag": 49.3, "hendrycksTest": 25.2, "truthfulqa:mc": 41.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
36.2
jordiclive/Llama-2-70b-oasst-1-200
main
153b209007e688d713cd670c9972f2827c597b45
{ "arc:challenge": 67.7, "hellaswag": 87.2, "hendrycksTest": 70, "truthfulqa:mc": 51.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
69
ziqingyang/chinese-alpaca-2-7b
main
ab2476bffedeed752daedd77e71900578e136e7c
{ "arc:challenge": 49.6, "hellaswag": 72.6, "hendrycksTest": 46.5, "truthfulqa:mc": 48.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
54.3
ziqingyang/chinese-alpaca-2-13b
main
576094cbf4988baf88b3bb66678be1db70bd720a
{ "arc:challenge": 58.7, "hellaswag": 79.7, "hendrycksTest": 55.1, "truthfulqa:mc": 50.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
60.9
Severian/ANIMA-Phi-Neptune-Mistral-7B-v1
main
35dd5fee8563b61c41743e88be6c557f409c1c10
{ "arc:challenge": 52.9, "hellaswag": 74.7, "hendrycksTest": 52.2, "truthfulqa:mc": 59.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
59.8
Severian/ANIMA-Phi-Neptune-Mistral-7B-v3
main
fd6fda131561917202905be1f4f3b0adc13efdb5
{ "arc:challenge": 56.8, "hellaswag": 78.8, "hendrycksTest": 53.8, "truthfulqa:mc": 59.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
62.2
Severian/ANIMA-Phi-Neptune-Mistral-7B-v4
main
a8e18f970f7ca994740177d6c228adee9e17aba9
{ "arc:challenge": 55.5, "hellaswag": 77.6, "hendrycksTest": 53.1, "truthfulqa:mc": 59 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
61.3
Severian/ANIMA-Phi-Neptune-Mistral-LoRa
main
feef1ab8eeb7ba21685b93e074141136d95174bf
{ "arc:challenge": 53.1, "hellaswag": 74.7, "hendrycksTest": 52.1, "truthfulqa:mc": 59.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
59.8
Yukang/Llama-2-13b-chat-longlora-32k-sft
main
6f2924e354c3ab035aa2ff7c7e28d0e5327e2667
{ "arc:challenge": 26.1, "hellaswag": 26.2, "hendrycksTest": 23.1, "truthfulqa:mc": 49.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
31.1
Yukang/Llama-2-7b-longlora-32k-ft
main
ab48674ffc55568ffe2a1207ef0e711c2febbaaf
{ "arc:challenge": 27.9, "hellaswag": 25.6, "hendrycksTest": 23.1, "truthfulqa:mc": 49.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
31.6
Yukang/Llama-2-7b-longlora-16k-ft
main
c86de31b80866d047e680e08dbd3572e2965d4c5
{ "arc:challenge": 26.4, "hellaswag": 26.4, "hendrycksTest": 23.7, "truthfulqa:mc": 47.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
31.1
Yukang/Llama-2-7b-longlora-100k-ft
main
242c6469cab41b41d30826e850afa4687e422f24
{ "arc:challenge": 28.2, "hellaswag": 25.4, "hendrycksTest": 23.5, "truthfulqa:mc": 49.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
31.6
Yukang/Llama-2-13b-longlora-16k-ft
main
5f0cfdef590fc9bd7642042fb5f1ed9679260b93
{ "arc:challenge": 25.9, "hellaswag": 27.6, "hendrycksTest": 23.1, "truthfulqa:mc": 48.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
31.4
Yukang/Llama-2-13b-longlora-32k-ft
main
6d17c854025b0bd54ce572ac803f1bb052875dbf
{ "arc:challenge": 59.5, "hellaswag": 82.6, "hendrycksTest": 52.1, "truthfulqa:mc": 37.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
57.9
dfurman/llama-2-7b-instruct-peft
main
0fc43413117187e0723cdac133068ab527c80fe2
{ "arc:challenge": 51.2, "hellaswag": 78.9, "hendrycksTest": 46.6, "truthfulqa:mc": 48.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
56.3
dfurman/Llama-2-13B-Instruct-v0.2
main
ac4b0962df8430f0b31c76a3d97a61134114c87e
{ "arc:challenge": 60.6, "hellaswag": 82, "hendrycksTest": 55.5, "truthfulqa:mc": 45.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
61
dfurman/llama-2-13b-guanaco-peft
main
10b58a7c31d5513fa56a9b8b38739253bf5cc0b4
{ "arc:challenge": 60, "hellaswag": 82.4, "hendrycksTest": 55.8, "truthfulqa:mc": 42.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
60.2
dfurman/llama-2-70b-dolphin-peft
main
a1190dee60b5854e80d340958dc3cc956bc56f68
{ "arc:challenge": 69.6, "hellaswag": 86.8, "hendrycksTest": 69.2, "truthfulqa:mc": 57.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
70.7
PY007/TinyLlama-1.1B-intermediate-step-240k-503b
main
213ebf60d7fdd3258fa5574840b06c97a7e8cf5d
{ "arc:challenge": 29.3, "hellaswag": 49.7, "hendrycksTest": 26.3, "truthfulqa:mc": 40.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
36.4
PY007/TinyLlama-1.1B-step-50K-105b
main
c1f1ef67c12e4bb85fe0bdf1747c645a202cc118
{ "arc:challenge": 25.9, "hellaswag": 44.1, "hendrycksTest": 26.8, "truthfulqa:mc": 39.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
34.1
FelixChao/llama2-13b-math1.1
main
3c4d83d3525e54a493ff510443fdcca44bf63b59
{ "arc:challenge": 57.3, "hellaswag": 80.7, "hendrycksTest": 53.6, "truthfulqa:mc": 48.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
60
sauce1337/AppleSauce-L2-13b
main
ba253c52eb85e24987c81e5d36b5a9a00e276ce7
{ "arc:challenge": 61, "hellaswag": 83.6, "hendrycksTest": 57.1, "truthfulqa:mc": 47.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
62.4
glaiveai/glaive-coder-7b
main
72a255a58480ef0713eed988312fe82f77f94f37
{ "arc:challenge": 42.7, "hellaswag": 64.7, "hendrycksTest": 37.2, "truthfulqa:mc": 39.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
46.1
Voicelab/trurl-2-7b
main
e26ca5f157c60fc527170cc04db7fc0ea04ad26f
{ "arc:challenge": 53.4, "hellaswag": 75.3, "hendrycksTest": 50, "truthfulqa:mc": 45.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
56
lloorree/kssht-castor-70b
main
e49a6bdc5e6024fb0e60dbba4601b346b4369377
{ "arc:challenge": 69.5, "hellaswag": 87.5, "hendrycksTest": 70.4, "truthfulqa:mc": 56.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
70.9
lloorree/jfdslijsijdgis
main
1e67eaa4ef618a5a0d8c52e5e107635c706b34c5
{ "arc:challenge": 69.6, "hellaswag": 87.3, "hendrycksTest": 70, "truthfulqa:mc": 59.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
71.5
lloorree/kssht-dahj-70b
main
85901aab2c3faf09de5ba8e9d65ec03aee4b20e4
{ "arc:challenge": 70.8, "hellaswag": 87.3, "hendrycksTest": 70.4, "truthfulqa:mc": 58.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
71.8
lloorree/kssht-euripedes-70b
main
04ae5f2187697a7e9a2d57f327a7131f23d3e927
{ "arc:challenge": 69.8, "hellaswag": 87.6, "hendrycksTest": 70.4, "truthfulqa:mc": 55.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
70.8
chavinlo/gpt4-x-alpaca
main
6a571f458cab9a23d14324ec63e0abd1744c8353
{ "arc:challenge": 52.8, "hellaswag": 79.6, "hendrycksTest": 48.2, "truthfulqa:mc": 48.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
57.4
SkunkworksAI/Mistralic-7B-1
main
ebf138de4fb7a57f0d187ad0ab43abd6b35bfb62
{ "arc:challenge": 60.8, "hellaswag": 82.3, "hendrycksTest": 60.8, "truthfulqa:mc": 52.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
64.1
yulan-team/YuLan-Chat-2-13b-fp16
main
2d439187efd6edd91a0c0146f08dff52d92aa7bc
{ "arc:challenge": 59, "hellaswag": 80.7, "hendrycksTest": 56.7, "truthfulqa:mc": 52.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
62.1
Undi95/MXLewd-L2-20B
main
ac279478abd9ddb8d1f5adcc548be0287b963adf
{ "arc:challenge": 63.2, "hellaswag": 85.3, "hendrycksTest": 57.4, "truthfulqa:mc": 51.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
64.4
Undi95/CreativityEngine
main
7870cc50b82b5cbebfa9935b6d73a9d20170299a
{ "arc:challenge": 59.3, "hellaswag": 82.4, "hendrycksTest": 53.6, "truthfulqa:mc": 52.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
62
Undi95/OpenRP-13B
main
d11815287c51ef51485fb003f8f72773cf6f19a4
{ "arc:challenge": 62.1, "hellaswag": 82.6, "hendrycksTest": 57.5, "truthfulqa:mc": 48.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
62.6
Undi95/U-Amethyst-20B
main
c0cbe0b3c88041bb6beef27dbe85146af8dddec9
{ "arc:challenge": 62.2, "hellaswag": 83.1, "hendrycksTest": 55.9, "truthfulqa:mc": 53.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
63.6
Undi95/Amethyst-13B
main
d4a85b1006f0b9439e64f0e7400533a7b867c24d
{ "arc:challenge": 62.6, "hellaswag": 83.2, "hendrycksTest": 55.9, "truthfulqa:mc": 52.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
63.5
Undi95/Nous-Hermes-13B-Code
main
5a45cb2a6442581ce32cc19c561c49cec1db4ebb
{ "arc:challenge": 61.2, "hellaswag": 83.2, "hendrycksTest": 55.1, "truthfulqa:mc": 50.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
62.5
Undi95/ReMM-v2-L2-13B
main
bc42c77f88482c37c72c85c66135e99972bbca1b
{ "arc:challenge": 61.9, "hellaswag": 84, "hendrycksTest": 56.1, "truthfulqa:mc": 50.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
63.2
Undi95/Llama2-13B-no_robots-alpaca-lora
main
581aba329e607533c299746bb9eb4154a7aab139
{ "arc:challenge": 58.9, "hellaswag": 82.4, "hendrycksTest": 53.1, "truthfulqa:mc": 40.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
58.7
Undi95/MLewd-v2.4-13B
main
6f6ec6024ee054020e49fd96f149919692848f0b
{ "arc:challenge": 61.7, "hellaswag": 83.8, "hendrycksTest": 55.1, "truthfulqa:mc": 53.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
63.5
Undi95/Mistral-11B-TestBench3
main
7eb397ad2ec67400e31dc010f9b364a72d64d965
{ "arc:challenge": 62, "hellaswag": 83.9, "hendrycksTest": 63.1, "truthfulqa:mc": 53.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
65.7
Undi95/ReMM-L2-13B
main
c4710577003a23ca8e9040d16dfb8f3e9bc5d636
{ "arc:challenge": 59.7, "hellaswag": 83.1, "hendrycksTest": 54.1, "truthfulqa:mc": 49.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
61.7
Undi95/ReMM-v2.1-L2-13B
main
e6b5ac97f74355cb281a621261debe5720fb4da2
{ "arc:challenge": 61.4, "hellaswag": 83.9, "hendrycksTest": 56, "truthfulqa:mc": 50.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
62.9
Undi95/CodeEngine
main
f57879831c39f2dcb656cb2c9e9ce5878e92bb44
{ "arc:challenge": 58.4, "hellaswag": 82.3, "hendrycksTest": 54.2, "truthfulqa:mc": 45.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
60
Undi95/MM-ReMM-L2-20B
main
37869800c15fb37d017ea83bb50fec6d6141f6ba
{ "arc:challenge": 60.8, "hellaswag": 85.2, "hendrycksTest": 56.5, "truthfulqa:mc": 53.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
64
Undi95/ReMM-L2-13B-PIPPA
main
79e711178c6881496ae1f5635b08bc193f370709
{ "arc:challenge": 59.7, "hellaswag": 83.1, "hendrycksTest": 54.1, "truthfulqa:mc": 49.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
61.7
Undi95/Mistral-11B-TestBench11
main
9aae2b156b24557bb98e515f3a90c7865529d2e9
{ "arc:challenge": 64.4, "hellaswag": 83.9, "hendrycksTest": 63.8, "truthfulqa:mc": 56.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
67.2
Undi95/ReMM-v2.2-L2-13B
main
d55031fbcd41d749bc0c0ffbcd85636718d373b6
{ "arc:challenge": 61.3, "hellaswag": 84.2, "hendrycksTest": 56.2, "truthfulqa:mc": 51.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
63.3
Undi95/Mistral-11B-TestBench7
main
3d4d99f90ec582e0d532e11f6da419d6b962c536
{ "arc:challenge": 63.3, "hellaswag": 82.9, "hendrycksTest": 64.1, "truthfulqa:mc": 46.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
64.3
Undi95/UndiMix-v4-13B
main
6dd97c74cfe1d22432d5c993814e230f333ba401
{ "arc:challenge": 61.9, "hellaswag": 83.9, "hendrycksTest": 56.9, "truthfulqa:mc": 49 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
62.9
Undi95/Mistral-11B-TestBench10
main
730429d6132c7702885840098885081c2df878df
{ "arc:challenge": 64.2, "hellaswag": 84.2, "hendrycksTest": 63.9, "truthfulqa:mc": 55.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
67
Undi95/Emerald-13B
main
f7696299463d8ec402a4e1eb001f3a447f1c5552
{ "arc:challenge": 62.3, "hellaswag": 83.7, "hendrycksTest": 55.7, "truthfulqa:mc": 50.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
63.2
Undi95/ReMM-Mistral-13B
main
a5ef9385d9430a81778183d71b58eb2b869d6a7e
{ "arc:challenge": 62.2, "hellaswag": 83.8, "hendrycksTest": 55.4, "truthfulqa:mc": 53.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
63.7
IGeniusDev/llama13B-quant8-testv1-openorca-customdataset
main
f364d000bedac80e72aa103c08b77aee1b61b7da
{ "arc:challenge": 60.2, "hellaswag": 83, "hendrycksTest": 54.3, "truthfulqa:mc": 37.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
58.7
harborwater/open-llama-3b-claude-30k
main
049db7fda44e5ce1e8febf5c3f45e3a93aaaa859
{ "arc:challenge": 41.7, "hellaswag": 72.6, "hendrycksTest": 24, "truthfulqa:mc": 38.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
44.2
harborwater/open-llama-3b-everything-v2
main
31ce2c1611d9f7d56184ceb5bff6a7e95a180c03
{ "arc:challenge": 42.8, "hellaswag": 73.3, "hendrycksTest": 26.9, "truthfulqa:mc": 37.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
45.1
AA051610/VA
main
3c60daea2db0840475b3f67149122d9e033eab5b
{ "arc:challenge": 41.4, "hellaswag": 62.5, "hendrycksTest": 50, "truthfulqa:mc": 44.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
49.7
AA051610/T1B
main
6f3664328e9f07a6578ccb0c5713b747cc0549d5
{ "arc:challenge": 56.1, "hellaswag": 79.8, "hendrycksTest": 60, "truthfulqa:mc": 47 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
60.7
AA051610/T2A
main
c30e3b053299c7ecf250af143a816ef8a9a45c22
{ "arc:challenge": 51.5, "hellaswag": 74, "hendrycksTest": 62.1, "truthfulqa:mc": 47 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
58.6
AA051610/T1C
main
1b1185ddc427341df12dd1aa8c68090fde16b5f3
{ "arc:challenge": 50.2, "hellaswag": 72.2, "hendrycksTest": 56.3, "truthfulqa:mc": 42.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
55.3
huashiyiqike/testmodel
main
1ac5d244402e2433b6abfcff1fe65e84af15766b
{ "arc:challenge": 19.7, "hellaswag": 26.7, "hendrycksTest": 25.3, "truthfulqa:mc": 43.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
28.8
maywell/Synatra-7B-v0.3-RP
main
372f6e0ab2c20b93e0c42218f76a71a4f9bb282e
{ "arc:challenge": 62.2, "hellaswag": 82.3, "hendrycksTest": 60.8, "truthfulqa:mc": 52.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
64.5
maywell/Synatra-7B-v0.3-dpo
main
405a4f1e6513cd1b8de5eb4e003bb49cc86d1f8a
{ "arc:challenge": 62.8, "hellaswag": 82.6, "hendrycksTest": 61.5, "truthfulqa:mc": 56.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
65.8