model
stringlengths 4
89
| revision
stringclasses 1
value | model_sha
stringlengths 0
40
| results
dict | commit
stringlengths 40
40
| date
timestamp[ns] | score
float64 21.8
83
⌀ |
---|---|---|---|---|---|---|
HWERI/pythia-70m-deduped-cleansharegpt | main | 6ea42abd94cb0017918f6fe5e71d78bcb7c75548 | {
"arc:challenge": 25.7,
"hellaswag": 25.4,
"hendrycksTest": 23.1,
"truthfulqa:mc": 51.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 31.4 |
GigaML/X1-large | main | 6a5b816c84fe0a0cf9d7b4539fab5c88f66992a6 | {
"arc:challenge": 22.7,
"hellaswag": 25,
"hendrycksTest": 23.1,
"truthfulqa:mc": null
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | null |
duliadotio/dulia-13b-8k-alpha | main | c3bcafd7f6133a7e7c069f8765a99fe84989d926 | {
"arc:challenge": 60.7,
"hellaswag": 82,
"hendrycksTest": 56.9,
"truthfulqa:mc": 42.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 60.6 |
vibhorag101/llama-2-7b-chat-hf-phr_mental_health-2048 | main | 81d424a431ab7fa4ff725925b6d0e4269d4563e4 | {
"arc:challenge": 52.4,
"hellaswag": 75.4,
"hendrycksTest": 39.8,
"truthfulqa:mc": 42.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 52.6 |
stabilityai/StableBeluga-7B | main | 329adcfc39f48dce183eb0b155b732dbe03c6304 | {
"arc:challenge": 56.3,
"hellaswag": 79.1,
"hendrycksTest": 52.7,
"truthfulqa:mc": 50.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.6 |
stabilityai/StableBeluga-13B | main | 1d6eef4cc2b73f39600a568803ad8183f2da4514 | {
"arc:challenge": 62,
"hellaswag": 82.3,
"hendrycksTest": 57.7,
"truthfulqa:mc": 49.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 62.9 |
stabilityai/stablelm-3b-4e1t | main | a4750ace0db6f08d7bbba0aa52a585f231ea3cde | {
"arc:challenge": 46.6,
"hellaswag": 75.9,
"hendrycksTest": 45.2,
"truthfulqa:mc": 37.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 51.2 |
stabilityai/stablelm-tuned-alpha-3b | main | d1c03d2114451d562416b9efe4281d319ceff99e | {
"arc:challenge": 27.8,
"hellaswag": 44.1,
"hendrycksTest": 23.1,
"truthfulqa:mc": 42.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 34.3 |
stabilityai/StableBeluga1-Delta | main | 40a78d91d43ad9aef6663ff15ddc15be9922bce5 | {
"arc:challenge": 68.2,
"hellaswag": 85.9,
"hendrycksTest": 64.8,
"truthfulqa:mc": 55.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 68.7 |
postbot/pythia-160m-hq-emails | main | 6eeded627780b47b5221ed72ebea436514621964 | {
"arc:challenge": 23.1,
"hellaswag": 30.1,
"hendrycksTest": 26.6,
"truthfulqa:mc": 45.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 31.3 |
postbot/gpt2-medium-emailgen | main | 1b9b03d00b2b300d3c04c37fe3782c180ef51a27 | {
"arc:challenge": 26.5,
"hellaswag": 34.3,
"hendrycksTest": 24.1,
"truthfulqa:mc": 44
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 32.2 |
postbot/distilgpt2-emailgen | main | fe96d63cc2edcbd1ae444ada293cc59d1e01a6ad | {
"arc:challenge": 21.8,
"hellaswag": 27.5,
"hendrycksTest": 26,
"truthfulqa:mc": 46.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 30.4 |
postbot/emailgen-pythia-410m-deduped | main | e0208b02990c49138350da791f0b6fcb8a65e738 | {
"arc:challenge": 27.9,
"hellaswag": 40,
"hendrycksTest": 27.4,
"truthfulqa:mc": 38.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 33.4 |
postbot/distilgpt2-emailgen-V2 | main | 9750ba00e79a02e1bf98d3faa3d49b8ae0f8ae63 | {
"arc:challenge": 21,
"hellaswag": 26.8,
"hendrycksTest": 25.5,
"truthfulqa:mc": 46.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 30 |
AtomEchoAI/AtomGPT_56k | main | f69ecfd630ec89afffa4ca7bd8a5eda0daf57643 | {
"arc:challenge": 53.2,
"hellaswag": 76.7,
"hendrycksTest": 45.3,
"truthfulqa:mc": 40.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 53.9 |
clibrain/Llama-2-7b-ft-instruct-es | main | b62f431c88b232204ea7046f9d906ae1daa68437 | {
"arc:challenge": 53.7,
"hellaswag": 77.8,
"hendrycksTest": 46.6,
"truthfulqa:mc": 38.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 54.2 |
tiiuae/falcon-7b | main | 378337427557d1df3e742264a2901a49f25d4eb1 | {
"arc:challenge": 47.9,
"hellaswag": 78.1,
"hendrycksTest": 27.8,
"truthfulqa:mc": 34.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 47 |
tiiuae/falcon-40b | main | c47b371b31a68349c233104050ac76680b8485db | {
"arc:challenge": 61.9,
"hellaswag": 85.3,
"hendrycksTest": 57,
"truthfulqa:mc": 41.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61.5 |
tiiuae/falcon-180B-chat | main | {
"arc:challenge": 63.8,
"hellaswag": 88.1,
"hendrycksTest": 67.8,
"truthfulqa:mc": 53.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 68.2 |
|
tiiuae/falcon-180B | main | 71a1a70b629e9963f7b4601e82f3f9079d48011e | {
"arc:challenge": 69.8,
"hellaswag": 88.9,
"hendrycksTest": 70.5,
"truthfulqa:mc": 45.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 68.7 |
tiiuae/falcon-rw-1b | main | e4b9872bb803165eb22f0a867d4e6a64d34fce19 | {
"arc:challenge": 35.1,
"hellaswag": 63.6,
"hendrycksTest": 25.3,
"truthfulqa:mc": 36
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 40 |
bsp-albz/llama2-13b-platypus-ckpt-1000 | main | d9f3e490df2134784afc3a86f5c617a9bab8db4d | {
"arc:challenge": 28.2,
"hellaswag": 26.5,
"hendrycksTest": 23.2,
"truthfulqa:mc": 48.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 31.7 |
WhoTookMyAmogusNickname/NewHope_HF_not_official | main | f587f4a31de6818f4200d9cdc7f116ca8ba1cdc2 | {
"arc:challenge": 61.1,
"hellaswag": 84,
"hendrycksTest": 55.7,
"truthfulqa:mc": 45
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61.4 |
euclaise/falcon_1b_stage1 | main | f85d91ff3f6cadc93f7222a19b9c4930c8842366 | {
"arc:challenge": 35.2,
"hellaswag": 62.4,
"hendrycksTest": 24.5,
"truthfulqa:mc": 40
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 40.5 |
euclaise/falcon_1b_stage3 | main | 593e48197e91537b203ba288260f6580b9cbcbe6 | {
"arc:challenge": 33.1,
"hellaswag": 54.1,
"hendrycksTest": 25.1,
"truthfulqa:mc": 37.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 37.6 |
euclaise/Ferret-7B | main | b1ef5adff5ceb06d2d9808bccf5e06705f9e19dc | {
"arc:challenge": 62.2,
"hellaswag": 81.7,
"hendrycksTest": 60.8,
"truthfulqa:mc": 40.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61.4 |
euclaise/falcon_1b_stage2 | main | c3ef73a8c9dc06fae4bfe4460d2f293147aecbb0 | {
"arc:challenge": 35.5,
"hellaswag": 65.6,
"hendrycksTest": 23.8,
"truthfulqa:mc": 38.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 40.8 |
davzoku/cria-llama2-7b-v1.3_peft | main | 6864fa8ee43fa4d6b4f3ae055bbf464a5dcca570 | {
"arc:challenge": 51.5,
"hellaswag": 77.3,
"hendrycksTest": 46.5,
"truthfulqa:mc": 45.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 55.2 |
edor/Platypus2-mini-7B | main | 4ede4a6f8a8d6cc3bfff8b98837116c74c280f63 | {
"arc:challenge": 53.3,
"hellaswag": 78.8,
"hendrycksTest": 45.6,
"truthfulqa:mc": 42
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 54.9 |
edor/Stable-Platypus2-mini-7B | main | a595cdcbee7562e5ff13ff720245a8c5cf26ffdf | {
"arc:challenge": 54.9,
"hellaswag": 78.9,
"hendrycksTest": 51.8,
"truthfulqa:mc": 51.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.2 |
edor/Hermes-Platypus2-mini-7B | main | 2797c255626b396cc89c416110a4d785aa5cbe25 | {
"arc:challenge": 53.8,
"hellaswag": 79.2,
"hendrycksTest": 47.1,
"truthfulqa:mc": 49.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 57.4 |
huggingface/llama-65b | main | 4ae2e56610e8b9b9a78472708390668e9096b4f9 | {
"arc:challenge": 63.5,
"hellaswag": 86.1,
"hendrycksTest": 63.9,
"truthfulqa:mc": 43.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 64.2 |
huggingface/llama-13b | main | 4022c52fcc7473ce7364bb5ac166195903ea1efb | {
"arc:challenge": 56.2,
"hellaswag": 80.9,
"hendrycksTest": 47.7,
"truthfulqa:mc": 39.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 56.1 |
huggingface/llama-30b | main | 13c77caa472bfa79d4f3f0ec82cbdc9dd88e5d22 | {
"arc:challenge": 61.3,
"hellaswag": 84.7,
"hendrycksTest": 58.5,
"truthfulqa:mc": 42.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61.7 |
huggingtweets/bladeecity-jerma985 | main | 9bf3a0db7f6bc960c51f2c0dc6fb66ed982b0180 | {
"arc:challenge": 22.9,
"hellaswag": 30.5,
"hendrycksTest": 26.6,
"truthfulqa:mc": 45
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 31.2 |
gpt2-large | main | 97935fc1a406f447320c3db70fe9e9875dca2595 | {
"arc:challenge": 25.9,
"hellaswag": 45.6,
"hendrycksTest": 26.1,
"truthfulqa:mc": 38.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 34.1 |
layoric/llama-2-13b-code-alpaca | main | aa1d543fe3391fe9f0e6143ef785fffe9c871225 | {
"arc:challenge": 60.8,
"hellaswag": 82.1,
"hendrycksTest": 55.9,
"truthfulqa:mc": 38.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.3 |
MrNJK/gpt2-xl-sft | main | 53250831436460254b7ee9afc4014d4d3156b372 | {
"arc:challenge": 30,
"hellaswag": 49.2,
"hendrycksTest": 25.6,
"truthfulqa:mc": 38.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 35.9 |
microsoft/DialoGPT-medium | main | 9d5c5fadcc072b693fb5a5e29416bbf3f503c26c | {
"arc:challenge": 24.5,
"hellaswag": 26.2,
"hendrycksTest": 25.8,
"truthfulqa:mc": 47.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 30.9 |
microsoft/Orca-2-7b | main | 60e31e6bdcf582ad103b807cb74b73ee1d2c4b17 | {
"arc:challenge": 54.2,
"hellaswag": 76.2,
"hendrycksTest": 56.4,
"truthfulqa:mc": 52.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.8 |
microsoft/CodeGPT-small-py | main | e5f31df92bfb7b7a808ea8d1c7557488e1bdff7f | {
"arc:challenge": 22.7,
"hellaswag": 27.3,
"hendrycksTest": 25,
"truthfulqa:mc": 51.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 31.6 |
microsoft/Orca-2-13b | main | 2539ff53e6baa4cc603774ad5a2d646f4041ea4e | {
"arc:challenge": 60.7,
"hellaswag": 79.8,
"hendrycksTest": 60.4,
"truthfulqa:mc": 56.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 64.3 |
iGenius-AI-Team/LLAMA-13B-test-finetuning | main | 5bd0eb026b12c59fd198f307c0c17188af69744c | {
"arc:challenge": 58,
"hellaswag": 82.4,
"hendrycksTest": 54.3,
"truthfulqa:mc": 44.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.7 |
Corianas/Quokka_1.3b | main | 8a8d738e841a524d658897d89b9e39e7b9272ed8 | {
"arc:challenge": 27.7,
"hellaswag": 37.9,
"hendrycksTest": 26.7,
"truthfulqa:mc": 40.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 33.1 |
Corianas/1.3b | main | 9831f95df82155ef95ff46a505506bf6194b131a | {
"arc:challenge": 27.3,
"hellaswag": 38.3,
"hendrycksTest": 26.8,
"truthfulqa:mc": 39
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 32.8 |
Corianas/590m | main | ec721c97ef0e6ebfc578ab98b3ff6e2bd19b3e27 | {
"arc:challenge": 24.1,
"hellaswag": 31.9,
"hendrycksTest": 26.6,
"truthfulqa:mc": 42.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 31.2 |
Corianas/Quokka_256m | main | d4e69f714d360d39979eb7b8cbc9decdb7190c88 | {
"arc:challenge": 22.9,
"hellaswag": 28.8,
"hendrycksTest": 26.5,
"truthfulqa:mc": 39.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 29.4 |
heegyu/WizardVicuna-3B-0719 | main | 62d3d450b8ab2bd2fb9f82383b55d1ecae33a401 | {
"arc:challenge": 40.7,
"hellaswag": 65.4,
"hendrycksTest": 25.4,
"truthfulqa:mc": 40.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 43 |
heegyu/LIMA2-7b-hf | main | 6a1aa59cb7624f059728840ce68b20b1070ebdcb | {
"arc:challenge": 53.2,
"hellaswag": 80.6,
"hendrycksTest": 43.2,
"truthfulqa:mc": 44.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 55.4 |
heegyu/LIMA-13b-hf | main | 98faa74a9b41cbd9033904cd58420705936849eb | {
"arc:challenge": 57.4,
"hellaswag": 81.7,
"hendrycksTest": 48.7,
"truthfulqa:mc": 41.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 57.4 |
heegyu/LIMA2-13b-hf | main | ed3535921eb24e0737f9a6cda70b1a3fd71532cd | {
"arc:challenge": 60.2,
"hellaswag": 83.7,
"hendrycksTest": 53.2,
"truthfulqa:mc": 41.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.7 |
Qwen/Qwen-14B | main | 5eda9482e32a8ea7ed2dc47178f3b491eb207939 | {
"arc:challenge": 58.3,
"hellaswag": 84,
"hendrycksTest": 67.7,
"truthfulqa:mc": 49.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 64.8 |
Aeala/Alpaca-elina-65b | main | 51ce30a69b3c3363c8cfcd6395bf1df974ba2977 | {
"arc:challenge": 65.3,
"hellaswag": 85.7,
"hendrycksTest": 63.4,
"truthfulqa:mc": 47.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 65.4 |
Aeala/GPT4-x-Alpasta-13b | main | 50af05b015446110a2dc52a1b4b341142c98e62b | {
"arc:challenge": 58.5,
"hellaswag": 79.9,
"hendrycksTest": 46,
"truthfulqa:mc": 53.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.4 |
Aeala/VicUnlocked-alpaca-30b | main | c63d117d1ec5794766dd6dc5e1469769df8aba1d | {
"arc:challenge": 61.9,
"hellaswag": 83.8,
"hendrycksTest": 57.6,
"truthfulqa:mc": 51
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 63.6 |
AtAndDev/ShortKingv0.1 | main | 6cd9b5bc13ee15b5e7e7cfb46477bc6a7c0b5d47 | {
"arc:challenge": 34.2,
"hellaswag": 54.6,
"hendrycksTest": 25.8,
"truthfulqa:mc": 41.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 39.1 |
nicholasKluge/Aira-2-774M | main | f43044cfe7bf0827a176f0d319c63251c2b29373 | {
"arc:challenge": 28.8,
"hellaswag": 40.8,
"hendrycksTest": 25.1,
"truthfulqa:mc": 41.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 34 |
nicholasKluge/Aira-1B5 | main | 4bca81e6a8fbe73956b9e3cda47fb017fd147973 | {
"arc:challenge": 28.9,
"hellaswag": 43.1,
"hendrycksTest": 27.3,
"truthfulqa:mc": 41.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 35.1 |
nicholasKluge/Aira-2-355M | main | 2479f5b1bb62251ec88e60182ba81390a4c19cf9 | {
"arc:challenge": 27.6,
"hellaswag": 38.9,
"hendrycksTest": 27.3,
"truthfulqa:mc": 38.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 33.1 |
nicholasKluge/Aira-124M | main | 0c0d509ec9ce057e7b506e15c868eecf79cc8ae5 | {
"arc:challenge": 24.6,
"hellaswag": 31.3,
"hendrycksTest": 25.3,
"truthfulqa:mc": 41
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 30.6 |
Lazycuber/Janemalion-6B | main | e72ae3ec110121115b1ae6c2e5fb3995997a2d96 | {
"arc:challenge": 42.4,
"hellaswag": 68.4,
"hendrycksTest": 28.3,
"truthfulqa:mc": 34.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 43.4 |
Lazycuber/L2-7b-Orca-WVG-Test | main | 6073a87872eb36149404bfb7d60e0108074ee1c3 | {
"arc:challenge": 54.9,
"hellaswag": 78.3,
"hendrycksTest": 51.1,
"truthfulqa:mc": 43.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 57 |
Lazycuber/L2-7b-Guanaco-Random-Test | main | 9ffff7d0f58ba1de5e5fc59a61b7dc6ca571c9bf | {
"arc:challenge": 50.6,
"hellaswag": 77.2,
"hendrycksTest": 47.7,
"truthfulqa:mc": 42.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 54.4 |
Lazycuber/pyg-instruct-wizardlm | main | f00ef7a7b0cc6f02af2a11ac764270dfd61b9e2f | {
"arc:challenge": 41,
"hellaswag": 66.7,
"hendrycksTest": 26.3,
"truthfulqa:mc": 31.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 41.5 |
RobbeD/Orca-Platypus-3B | main | 243f51d75ed6d425addde839740f6fd5bcc4630f | {
"arc:challenge": 43.1,
"hellaswag": 65.3,
"hendrycksTest": 26.8,
"truthfulqa:mc": 41.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 44.3 |
RobbeD/OpenLlama-Platypus-3B | main | d3a0bf8e1181be02cc9c4c4cdfedaedacaefbfac | {
"arc:challenge": 41.2,
"hellaswag": 71.7,
"hendrycksTest": 29.9,
"truthfulqa:mc": 36.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 44.8 |
hakurei/lotus-12B | main | f212b695aabf5dafb5dccf5013ddb765ba1e47d7 | {
"arc:challenge": 30.7,
"hellaswag": 52.7,
"hendrycksTest": 24.5,
"truthfulqa:mc": 40.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 37 |
hakurei/instruct-12b | main | ff4699b502b79c716330b6f761002588a65dcba6 | {
"arc:challenge": 42.6,
"hellaswag": 66.8,
"hendrycksTest": 26.8,
"truthfulqa:mc": 32
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 42.1 |
elyza/ELYZA-japanese-Llama-2-7b-fast-instruct | main | 89de33d1ad568855853196802aeaecd799c6586f | {
"arc:challenge": 53.8,
"hellaswag": 77.6,
"hendrycksTest": 46.9,
"truthfulqa:mc": 38.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 54.3 |
elyza/ELYZA-japanese-Llama-2-7b-instruct | main | 48fa08b3098a23d3671e09565499a4cfbaff1923 | {
"arc:challenge": 53.2,
"hellaswag": 78.3,
"hendrycksTest": 47.1,
"truthfulqa:mc": 39.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 54.4 |
elyza/ELYZA-japanese-Llama-2-7b-fast | main | e326078aa122fb1c4973997952d7b8630071776a | {
"arc:challenge": 51.9,
"hellaswag": 75.5,
"hendrycksTest": 44.3,
"truthfulqa:mc": 36.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 52 |
TheBloke/landmark-attention-llama7b-fp16 | main | bf8bdcb0c30cceb0ceda33cf5fde683807e39a58 | {
"arc:challenge": 47.4,
"hellaswag": 65.8,
"hendrycksTest": 31.6,
"truthfulqa:mc": 42.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 46.8 |
TheBloke/Wizard-Vicuna-7B-Uncensored-HF | main | b802f1b4401d0b2242137160c20cc11b9ffd3a4c | {
"arc:challenge": 53.4,
"hellaswag": 78.8,
"hendrycksTest": 37.1,
"truthfulqa:mc": 43.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 53.2 |
TheBloke/CAMEL-33B-Combined-Data-SuperHOT-8K-fp16 | main | 14744d11eab7028c5c845f89db2edc9c6fe2becb | {
"arc:challenge": 25.9,
"hellaswag": 31.6,
"hendrycksTest": 23.7,
"truthfulqa:mc": 48.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 32.3 |
TheBloke/tulu-7B-fp16 | main | 8a026683f79119643f4007da4e9155c7849792cc | {
"arc:challenge": 50.2,
"hellaswag": 77,
"hendrycksTest": 47.6,
"truthfulqa:mc": 41.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 54.1 |
TheBloke/orca_mini_13B-GPTQ | main | 8ec18e5c597da86fa123c08b6e6bef7da6ec7440 | {
"arc:challenge": 27.3,
"hellaswag": 25.9,
"hendrycksTest": 25.3,
"truthfulqa:mc": 48.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 31.6 |
TheBloke/VicUnlocked-30B-LoRA-HF | main | 3259cb3c2a10cfb429fb51c4a76fffa049f4c44d | {
"arc:challenge": 59.7,
"hellaswag": 84,
"hendrycksTest": 57.8,
"truthfulqa:mc": 48.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 62.5 |
TheBloke/tulu-30B-fp16 | main | 37c3655676c37662f60c68dacfce3f0e861be846 | {
"arc:challenge": 60,
"hellaswag": 83.4,
"hendrycksTest": 56.1,
"truthfulqa:mc": 45.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61.2 |
TheBloke/OpenOrca-Platypus2-13B-GPTQ | main | 0fa9a56066656fbc94e3ec088bc900fd1d4d38e8 | {
"arc:challenge": 62.5,
"hellaswag": 82.7,
"hendrycksTest": 58.6,
"truthfulqa:mc": 51.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 63.9 |
TheBloke/WizardLM-30B-GPTQ | main | e2e97475a9775d2fe7afba098aee37e694b9220f | {
"arc:challenge": 28.8,
"hellaswag": 26.1,
"hendrycksTest": 24.6,
"truthfulqa:mc": 49.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 32.2 |
TheBloke/Llama-2-7b-Chat-AWQ | main | a065961fd627aa3b3e6dde21e77fd5e20f712189 | {
"arc:challenge": 27.2,
"hellaswag": 25.5,
"hendrycksTest": 24.7,
"truthfulqa:mc": 50
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 31.8 |
TheBloke/LongChat-13B-GPTQ | main | 8ec25a29033b7be5daeafa26f08e1ea7cf232b98 | {
"arc:challenge": 28.3,
"hellaswag": 26.1,
"hendrycksTest": 25.6,
"truthfulqa:mc": 48.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 32.1 |
TheBloke/gpt4-x-vicuna-13B-HF | main | a247577c882940e0c6b040fe8239d760c0d10d40 | {
"arc:challenge": 53.4,
"hellaswag": 80.1,
"hendrycksTest": 51.2,
"truthfulqa:mc": 53.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.6 |
TheBloke/llama-30b-supercot-SuperHOT-8K-fp16 | main | 7efdff78a90132c1c66e1d27518ad7cbadffa139 | {
"arc:challenge": 25.9,
"hellaswag": 30.5,
"hendrycksTest": 23.5,
"truthfulqa:mc": 47
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 31.7 |
TheBloke/EverythingLM-13B-16K-GPTQ | main | f14d3df05577f3e1ac35e2c4ec32ce0d39b97508 | {
"arc:challenge": 29.3,
"hellaswag": 26.2,
"hendrycksTest": 25.4,
"truthfulqa:mc": 48.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 32.4 |
TheBloke/wizard-vicuna-13B-HF | main | 12dc8aacb474522ae2a83c18cb0fdf0907987f8f | {
"arc:challenge": 54.7,
"hellaswag": 79.2,
"hendrycksTest": 48.9,
"truthfulqa:mc": 49.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 58.1 |
TheBloke/stable-vicuna-13B-HF | main | 2b099b2be0dafb2606ae9808c0f6183fe4bff7bc | {
"arc:challenge": 53.3,
"hellaswag": 78.5,
"hendrycksTest": 50.3,
"truthfulqa:mc": 48.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 57.6 |
TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ | main | ec9eb4f471b5bb6a7e5e505369628586c0c72252 | {
"arc:challenge": 61.3,
"hellaswag": 82.1,
"hendrycksTest": 57.8,
"truthfulqa:mc": 50.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 62.8 |
TheBloke/WizardLM-13B-V1-1-SuperHOT-8K-GPTQ | main | 085eb5cd394f30d72bf5efcf83a580e87264b3e8 | {
"arc:challenge": 57,
"hellaswag": 80.3,
"hendrycksTest": 47.1,
"truthfulqa:mc": 53.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.5 |
TheBloke/Project-Baize-v2-13B-GPTQ | main | 8dee7c7129aaad1ded245fce712ff5dbb2845258 | {
"arc:challenge": 27.6,
"hellaswag": 26.4,
"hendrycksTest": 25.9,
"truthfulqa:mc": 48.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 32 |
TheBloke/gpt4-alpaca-lora-13B-HF | main | 49678a2dd15fb4e1f1b99616ccc1ffd269912833 | {
"arc:challenge": 59.6,
"hellaswag": 82.1,
"hendrycksTest": 47.5,
"truthfulqa:mc": 49
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.6 |
TheBloke/Nous-Hermes-13B-SuperHOT-8K-fp16 | main | b407c1ece029ad5693d38e6e0931e9482962ed15 | {
"arc:challenge": 55.3,
"hellaswag": 81.9,
"hendrycksTest": 48.2,
"truthfulqa:mc": 51.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.1 |
TheBloke/h2ogpt-oasst1-512-30B-HF | main | 3dc93836e4b08b7b2ee43e69c1e590a36fd24687 | {
"arc:challenge": 57.3,
"hellaswag": 81.4,
"hendrycksTest": 48.1,
"truthfulqa:mc": 45.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 58.1 |
TheBloke/WizardLM-30B-Uncensored-GPTQ | main | 43c701ddbe0bceac26c860307e06763cc5203500 | {
"arc:challenge": 29.4,
"hellaswag": 26.5,
"hendrycksTest": 24.4,
"truthfulqa:mc": 49.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 32.4 |
TheBloke/robin-13B-v2-fp16 | main | f4dd8fc4440ed84fcf3ff1122f2b7f6024cca29d | {
"arc:challenge": 56.5,
"hellaswag": 80.4,
"hendrycksTest": 48.8,
"truthfulqa:mc": 50.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.1 |
TheBloke/robin-33B-v2-fp16 | main | c0ed7d40c3e52379780638dac3bd1f69597b8e18 | {
"arc:challenge": 62.4,
"hellaswag": 83.6,
"hendrycksTest": 54.7,
"truthfulqa:mc": 53.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 63.6 |
TheBloke/vicuna-13B-1.1-HF | main | 8c71dbe9221e83d2ec72e4dc08beccfc78b563c0 | {
"arc:challenge": 52.7,
"hellaswag": 80.1,
"hendrycksTest": 51.9,
"truthfulqa:mc": 52.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.2 |
TheBloke/Project-Baize-v2-7B-GPTQ | main | 5dc039834e1ea42ac334458b2e3090fe3705cc59 | {
"arc:challenge": 46,
"hellaswag": 73.4,
"hendrycksTest": 35.5,
"truthfulqa:mc": 39.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 48.7 |
TheBloke/Lemur-70B-Chat-v1-GPTQ | main | 12499165df1785f50df3e95940406032776401ea | {
"arc:challenge": 65.3,
"hellaswag": 84.4,
"hendrycksTest": 64.7,
"truthfulqa:mc": 57.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 67.9 |
TheBloke/guanaco-65B-HF | main | 7f83ae526f8b83705ca8434535da8fd8c692f9d0 | {
"arc:challenge": 65.4,
"hellaswag": 86.5,
"hendrycksTest": 62.9,
"truthfulqa:mc": 52.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 66.9 |