model
stringlengths 4
89
| revision
stringclasses 1
value | model_sha
stringlengths 0
40
| results
dict | commit
stringlengths 40
40
| date
timestamp[ns] | score
float64 21.8
83
⌀ |
---|---|---|---|---|---|---|
Delcos/NATE-7b | main | dd844a22b3b1ec4ad1757ce1ce184b8c765ae4c9 | {
"arc:challenge": 60.9,
"hellaswag": 82.1,
"hendrycksTest": 58.9,
"truthfulqa:mc": 57.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 64.8 |
alibidaran/medical_transcription_generator | main | f622239151c89c2db0f1cef495d1b42afd16ce64 | {
"arc:challenge": 22.8,
"hellaswag": 30.6,
"hendrycksTest": 23.8,
"truthfulqa:mc": 46.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 30.9 |
nkpz/llama2-22b-daydreamer-v3 | main | e6c74222958328e50712aa00294dc818c24075b2 | {
"arc:challenge": 56.1,
"hellaswag": 80.1,
"hendrycksTest": 52.5,
"truthfulqa:mc": 42.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 57.8 |
nkpz/llama2-22b-chat-wizard-uncensored | main | 90cffebc8f530161505b84740ff6c8f646299d6c | {
"arc:challenge": 56.2,
"hellaswag": 80.4,
"hendrycksTest": 53.6,
"truthfulqa:mc": 45.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59 |
fangloveskari/Platypus_QLoRA_LLaMA_70b | main | b9b8560832276f60ba6bf37ac913b230a85ac19b | {
"arc:challenge": 72.1,
"hellaswag": 87.5,
"hendrycksTest": 71,
"truthfulqa:mc": 61.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 73 |
fangloveskari/ORCA_LLaMA_70B_QLoRA | main | ef9b04ef02ccc4d96f1181467da92bb6b5baf835 | {
"arc:challenge": 72.3,
"hellaswag": 87.7,
"hendrycksTest": 70.2,
"truthfulqa:mc": 63.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 73.4 |
kajdun/viwaai-30b_v4 | main | 9519ed20f06dd5a88e280ba6a8c5c9956213f10a | {
"arc:challenge": 63.5,
"hellaswag": 84.2,
"hendrycksTest": 57.5,
"truthfulqa:mc": 53.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 64.6 |
kajdun/iubaris-13b-v3 | main | eafacff141e7714da7e58625cc779e07c0034263 | {
"arc:challenge": 59.1,
"hellaswag": 81.8,
"hendrycksTest": 54.4,
"truthfulqa:mc": 48.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61 |
TehVenom/PPO_Shygmalion-6b | main | 573e4546fdccc5c8a52b9d7cb23a2e10f0f2ef51 | {
"arc:challenge": 40.3,
"hellaswag": 66.9,
"hendrycksTest": 27.5,
"truthfulqa:mc": 34.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 42.2 |
TehVenom/GPT-J-Pyg_PPO-6B | main | cde5bab3ae16e1704c5fec54a6a7ff1169c935e6 | {
"arc:challenge": 42.1,
"hellaswag": 67.5,
"hendrycksTest": 28.5,
"truthfulqa:mc": 32
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 42.5 |
TehVenom/Dolly_Shygmalion-6b | main | 108fabf8a916900525492c294c50998d7c09f10b | {
"arc:challenge": 41.9,
"hellaswag": 68.5,
"hendrycksTest": 27.6,
"truthfulqa:mc": 33.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 43 |
TehVenom/Metharme-13b-Merged | main | 90c02cc338afcdd890a948af06432674743363ad | {
"arc:challenge": 59.9,
"hellaswag": 81.1,
"hendrycksTest": 47.2,
"truthfulqa:mc": 51.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.8 |
TehVenom/Moderator-Chan_GPT-JT-6b | main | f2b7cda25f6965c1551fa78e9e38676994bc6638 | {
"arc:challenge": 43.7,
"hellaswag": 70.8,
"hendrycksTest": 35.6,
"truthfulqa:mc": 36
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 46.5 |
TehVenom/Dolly_Shygmalion-6b-Dev_V8P2 | main | 6413b1d9e8b58df9d3aac91a862e8d505d8c6716 | {
"arc:challenge": 41.4,
"hellaswag": 67.7,
"hendrycksTest": 28.5,
"truthfulqa:mc": 36.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 43.6 |
psyche/kollama2-7b-v2 | main | d5b6e9d5b882d4f6ba322396e027925ed915f848 | {
"arc:challenge": 53.3,
"hellaswag": 78.5,
"hendrycksTest": 43.6,
"truthfulqa:mc": 46.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 55.4 |
psyche/kogpt | main | 4c02d48f548103ba53a5e481b8aa81bf7a259287 | {
"arc:challenge": 21.2,
"hellaswag": 28.1,
"hendrycksTest": 26.6,
"truthfulqa:mc": 42.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 29.5 |
psyche/kollama2-7b-v3 | main | b211c57902035342b6a92d61cd5f3afd306ddc9b | {
"arc:challenge": 49.7,
"hellaswag": 78.5,
"hendrycksTest": 40.4,
"truthfulqa:mc": 42.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 52.9 |
AGI-inc/lora_moe_7b | main | 3a528bdd73a12adc73f841a6d46bd363fe690023 | {
"arc:challenge": 50.9,
"hellaswag": 77.8,
"hendrycksTest": 35.7,
"truthfulqa:mc": 34.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 49.7 |
AGI-inc/lora_moe_7b_baseline | main | ad8065c8357945e6c07569033f5eba82c67c72ed | {
"arc:challenge": 50.9,
"hellaswag": 77.8,
"hendrycksTest": 35.7,
"truthfulqa:mc": 34.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 49.7 |
ValiantLabs/ShiningValiantXS | main | 8c1f86bd2e646408eed2ed3a2634b38ea4e5c599 | {
"arc:challenge": 63.5,
"hellaswag": 83.6,
"hendrycksTest": 59.8,
"truthfulqa:mc": 56
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 65.7 |
ValiantLabs/ShiningValiant | main | 28bf6be8cac2c2e33df7d4d4fb4f7370c680e23f | {
"arc:challenge": 73,
"hellaswag": 87.9,
"hendrycksTest": 71,
"truthfulqa:mc": 64.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 74.2 |
GeneZC/MiniChat-3B | main | 123d23bd291bb2d5fdb3b91dc1570d0b11654a78 | {
"arc:challenge": 44,
"hellaswag": 67.2,
"hendrycksTest": 39.2,
"truthfulqa:mc": 45.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 49 |
GeneZC/MiniMA-3B | main | 0a2f9d6bbb3959d68fe52e07ee6f54e8242f91ec | {
"arc:challenge": 43.4,
"hellaswag": 68.1,
"hendrycksTest": 28.7,
"truthfulqa:mc": 39.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 45 |
Devio/test-1400 | main | 95194d494effb691edae0d596bc5df9856ee92d7 | {
"arc:challenge": 38.1,
"hellaswag": 66.2,
"hendrycksTest": 28.6,
"truthfulqa:mc": 36.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 42.4 |
Devio/test100 | main | 6bd139260f60232328b05b2cd973c3d8f07c0c02 | {
"arc:challenge": 37.4,
"hellaswag": 58.5,
"hendrycksTest": 27.3,
"truthfulqa:mc": 34
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 39.3 |
Devio/test-9k-fn | main | b2fc754748ee94428298de3528e549b296d51c1e | {
"arc:challenge": 40.9,
"hellaswag": 69.4,
"hendrycksTest": 29.5,
"truthfulqa:mc": 39.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 44.7 |
Devio/testC | main | 318159010931164dcacb5dc2a7a54d48990fb969 | {
"arc:challenge": 39.6,
"hellaswag": 62.9,
"hendrycksTest": 27.8,
"truthfulqa:mc": 35.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 41.5 |
Devio/test-22B | main | cd72f5954ab5801dd2c1b499e59265f7504f9ee6 | {
"arc:challenge": 39.4,
"hellaswag": 64.5,
"hendrycksTest": 27.1,
"truthfulqa:mc": 37.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 42 |
Kunhao/pile-7b | main | aa1c2fff615235b007e15ce191b35816959ace99 | {
"arc:challenge": 26.8,
"hellaswag": 38.8,
"hendrycksTest": 26.5,
"truthfulqa:mc": 42.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 33.6 |
42dot/42dot_LLM-PLM-1.3B | main | a72bf57eb02cd4ea4388a344b4a5893aa95698da | {
"arc:challenge": 32.4,
"hellaswag": 56.4,
"hendrycksTest": 27.1,
"truthfulqa:mc": 38.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 38.7 |
42dot/42dot_LLM-SFT-1.3B | main | 7474cafe5dc60549c19f89f7c49392a8a32b9199 | {
"arc:challenge": 36.1,
"hellaswag": 59,
"hendrycksTest": 25.5,
"truthfulqa:mc": 40
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 40.2 |
winglian/basilisk-4b | main | b91c2e5389f4f0ce2d6042fdce5927343d8dcb06 | {
"arc:challenge": 25.9,
"hellaswag": 39.6,
"hendrycksTest": 24.6,
"truthfulqa:mc": 43.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 33.4 |
winglian/Llama-2-3b-hf | main | 293f071b223efd7959f9e1fac66285369aaa959d | {
"arc:challenge": 27,
"hellaswag": 26.5,
"hendrycksTest": 23.3,
"truthfulqa:mc": 50.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 31.9 |
JosephusCheung/Guanaco | main | bed6f3bd18f07a4a379525645cbd86d622b12836 | {
"arc:challenge": 50.2,
"hellaswag": 72.7,
"hendrycksTest": 30.3,
"truthfulqa:mc": 37.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 47.7 |
JosephusCheung/Pwen-7B-Chat-20_30 | main | e6c38a7d2f4ba7b867fff421c08c02ba1908224e | {
"arc:challenge": 51.5,
"hellaswag": 74,
"hendrycksTest": 62.1,
"truthfulqa:mc": 47
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 58.6 |
roneneldan/TinyStories-1M | main | 8cd14d5339178f1b285f55baee14a0deff7103ac | {
"arc:challenge": 23.5,
"hellaswag": 25.2,
"hendrycksTest": 24.6,
"truthfulqa:mc": 49.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 30.7 |
roneneldan/TinyStories-28M | main | 52dabea9997faf578489d619249616926e54ed18 | {
"arc:challenge": 22.8,
"hellaswag": 25.8,
"hendrycksTest": 23.5,
"truthfulqa:mc": 48.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 30 |
AlekseyKorshuk/pygmalion-6b-vicuna-chatml | main | ee3ada91a69a194cedfabbfeab98f1499b75cb44 | {
"arc:challenge": 40.6,
"hellaswag": 67.7,
"hendrycksTest": 33.9,
"truthfulqa:mc": 42.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 46.2 |
AlekseyKorshuk/chatml-pyg-v1 | main | 79d5a4d53953ca1c26bc2155f168b7e2108f377f | {
"arc:challenge": 37.9,
"hellaswag": 63.3,
"hendrycksTest": 32.8,
"truthfulqa:mc": 42.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 44.2 |
nnpy/Nape-0 | main | 47e07bd518b989890a7f694d39e2772e703384c9 | {
"arc:challenge": 32.7,
"hellaswag": 58.7,
"hendrycksTest": 24.9,
"truthfulqa:mc": 39
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 38.8 |
ajibawa-2023/Uncensored-Jordan-7B | main | 96a9fbe5aaef8410a8d0dad25f3cc97b408c4efb | {
"arc:challenge": 51.3,
"hellaswag": 77.4,
"hendrycksTest": 45.7,
"truthfulqa:mc": 47.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 55.5 |
ajibawa-2023/scarlett-33b | main | 305eea72fb9fe2ac5929a62483ea51f152bcc060 | {
"arc:challenge": 67.7,
"hellaswag": 85.5,
"hendrycksTest": 59,
"truthfulqa:mc": 61.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 68.3 |
ajibawa-2023/Uncensored-Frank-13B | main | 73a27445e5e5a72857626e551c70542ec607f60c | {
"arc:challenge": 61.6,
"hellaswag": 82.6,
"hendrycksTest": 54.6,
"truthfulqa:mc": 48.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61.8 |
ajibawa-2023/Python-Code-13B | main | 981454b6a2275f787592589609df7f2bf558706d | {
"arc:challenge": 58.8,
"hellaswag": 81.7,
"hendrycksTest": 54.8,
"truthfulqa:mc": 42.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 59.5 |
ajibawa-2023/Uncensored-Jordan-13B | main | c56a396342133bbd75ab3f79622c85cb55be49a4 | {
"arc:challenge": 57.4,
"hellaswag": 82.7,
"hendrycksTest": 55.7,
"truthfulqa:mc": 50.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61.6 |
ajibawa-2023/carl-7b | main | de4c7af9598bebc47dd43253c972be719f3195d6 | {
"arc:challenge": 53.5,
"hellaswag": 78.3,
"hendrycksTest": 34,
"truthfulqa:mc": 40.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 51.5 |
ICBU-NPU/FashionGPT-70B-V1.1 | main | 05941a3eaacff0dead79b09d2175b5d7b98c525b | {
"arc:challenge": 71.8,
"hellaswag": 88.2,
"hendrycksTest": 71,
"truthfulqa:mc": 65.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 74.1 |
beaugogh/pythia-1.4b-deduped-sharegpt | main | 03dfdc25c111a6a4a16d3da12190697611936426 | {
"arc:challenge": 34.3,
"hellaswag": 54.5,
"hendrycksTest": 24,
"truthfulqa:mc": 41.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 38.6 |
beaugogh/Llama2-7b-sharegpt4 | main | 922d1d963ad1b042c30b774a818d9f6180c28075 | {
"arc:challenge": 55.9,
"hellaswag": 80.8,
"hendrycksTest": 47.2,
"truthfulqa:mc": 45.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 57.4 |
beaugogh/Llama2-13b-sharegpt4 | main | 294c40349bf0c5377f71d92e7539bf5de3176a74 | {
"arc:challenge": 61.8,
"hellaswag": 84.5,
"hendrycksTest": 55.2,
"truthfulqa:mc": 45.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61.8 |
L-R/LLmRa-2.7B | main | 93201b7d778272fb3252481c1cbd56f726d43e6b | {
"arc:challenge": 37,
"hellaswag": 60.6,
"hendrycksTest": 25.6,
"truthfulqa:mc": 35.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 39.6 |
ausboss/llama7b-wizardlm-unfiltered | main | 2123beec77083c414b2ae51dd25b7a870b0b936c | {
"arc:challenge": 53,
"hellaswag": 77.9,
"hendrycksTest": 36.4,
"truthfulqa:mc": 37.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 51.3 |
Expert68/llama2_13b_instructed_version2 | main | ea321257d81e0f41c985f5155297b7fbd6ac375a | {
"arc:challenge": 60.1,
"hellaswag": 84,
"hendrycksTest": 55.6,
"truthfulqa:mc": 46.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61.4 |
garage-bAInd/Stable-Platypus2-13B | main | 0e54aa49c24617e30a23a20c0c5da61419b9fe68 | {
"arc:challenge": 62.7,
"hellaswag": 82.3,
"hendrycksTest": 58.3,
"truthfulqa:mc": 52.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 64 |
garage-bAInd/Platypus2-70B | main | 16b6583ad58313331f86be18e531ab03f1857695 | {
"arc:challenge": 70.6,
"hellaswag": 87.2,
"hendrycksTest": 70.1,
"truthfulqa:mc": 52.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 70.1 |
garage-bAInd/Platypus2-13B | main | b5e926e3d6c03e83c7983e87eb71098b5e80a62e | {
"arc:challenge": 61.3,
"hellaswag": 82.6,
"hendrycksTest": 56.7,
"truthfulqa:mc": 44.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61.4 |
garage-bAInd/Platypus2-7B | main | f784afa7887b0738d92ea470797582756f02e630 | {
"arc:challenge": 55.2,
"hellaswag": 78.8,
"hendrycksTest": 49.8,
"truthfulqa:mc": 40.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 56.1 |
garage-bAInd/Platypus-30B | main | c5d21054f8dd71099696bd7790df07ac54990f29 | {
"arc:challenge": 64.6,
"hellaswag": 84.3,
"hendrycksTest": 64.2,
"truthfulqa:mc": 45.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 64.6 |
Panchovix/WizardLM-33B-V1.0-Uncensored-SuperHOT-8k | main | b6d0002b10d43ab48aa14e365d9e7b40655ec160 | {
"arc:challenge": 25.4,
"hellaswag": 32,
"hendrycksTest": 23.4,
"truthfulqa:mc": 47
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 32 |
FPHam/Karen_TheEditor_V2_STRICT_Mistral_7B | main | 0935960b2765aa23d7a63c49873361b09dd12f60 | {
"arc:challenge": 59.6,
"hellaswag": 81.8,
"hendrycksTest": 59.6,
"truthfulqa:mc": 49.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 62.6 |
922-CA/monika-ddlc-7b-v1 | main | 4157d696bb0015da3ba26a58c1d24925515e4125 | {
"arc:challenge": 54.9,
"hellaswag": 76.8,
"hendrycksTest": 45.6,
"truthfulqa:mc": 43.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 55.3 |
ToolBench/ToolLLaMA-7b-LoRA | main | 67f2e8af850049a86fb9ee8ef581deb0f51e58e6 | {
"arc:challenge": 53,
"hellaswag": 78.6,
"hendrycksTest": 46.9,
"truthfulqa:mc": 38.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 54.3 |
yeontaek/llama-2-13B-ensemble-v4 | main | 2c8efc96563bfb80b4a9b9141b8c0bf64eb5056c | {
"arc:challenge": 63,
"hellaswag": 82.4,
"hendrycksTest": 56.5,
"truthfulqa:mc": 51.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 63.4 |
yeontaek/llama-2-13b-Beluga-QLoRA | main | c0d3c0a5d4e9001ea933c6b71ca3adc99d1f71a2 | {
"arc:challenge": 59.2,
"hellaswag": 81.9,
"hendrycksTest": 56.7,
"truthfulqa:mc": 48.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61.5 |
yeontaek/Platypus2xOpenOrca-13B-LoRa-v2 | main | 568ac6a5f1a9f5eb6bc09efb2188740d771ed0e9 | {
"arc:challenge": 58.6,
"hellaswag": 81.2,
"hendrycksTest": 50.2,
"truthfulqa:mc": 43.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 58.4 |
yeontaek/Platypus2xOpenOrca-13B-IA3-ensemble | main | 39d43c517b2847048111b971a600ce9998cdfddc | {
"arc:challenge": 62.1,
"hellaswag": 82.3,
"hendrycksTest": 59.1,
"truthfulqa:mc": 47.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 62.8 |
yeontaek/llama-2-13b-Guanaco-QLoRA | main | 67e68284234538d3851d5c0c334383daffec57a2 | {
"arc:challenge": 61.1,
"hellaswag": 83,
"hendrycksTest": 55.5,
"truthfulqa:mc": 44.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 60.9 |
yeontaek/Platypus2xOpenOrca-13B-LoRa | main | 8b2f5d65c03d415b7c43530def622e133e1ef014 | {
"arc:challenge": 60.8,
"hellaswag": 82.1,
"hendrycksTest": 58.8,
"truthfulqa:mc": 45.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61.7 |
yeontaek/llama-2-70B-ensemble-v7 | main | 38f0f101ba06039bdc9677c686d9502ba942362a | {
"arc:challenge": 70.3,
"hellaswag": 87.3,
"hendrycksTest": 68.3,
"truthfulqa:mc": 63.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 72.2 |
yeontaek/airoboros-2.1-llama-2-13B-QLoRa | main | ebf991c8d34314caab6ccc6b078c681d20bac39a | {
"arc:challenge": 59.7,
"hellaswag": 82.9,
"hendrycksTest": 54.8,
"truthfulqa:mc": 45.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 60.6 |
yeontaek/Platypus2xOpenOrca-13B-IA3-v3 | main | 17493c1f2e4620a44d7947edad0386d338e805ce | {
"arc:challenge": 62.5,
"hellaswag": 82.1,
"hendrycksTest": 58.7,
"truthfulqa:mc": 47
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 62.6 |
yeontaek/llama-2-70B-ensemble-v4 | main | fed7f766c2977bb13ba372ba63c1bb9f8af263d8 | {
"arc:challenge": 70.9,
"hellaswag": 87.3,
"hendrycksTest": 69.7,
"truthfulqa:mc": 62.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 72.6 |
yeontaek/WizardCoder-Python-13B-LoRa | main | 32ffc44ffdf1adfe2d8ef219327fbd534f3d5955 | {
"arc:challenge": 47.8,
"hellaswag": 69.6,
"hendrycksTest": 38.8,
"truthfulqa:mc": 44
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 50 |
yeontaek/llama-2-70B-ensemble-v8 | main | 81c4b32ec8062b3e9af4492fc6590f2efa6451d9 | {
"arc:challenge": 67.2,
"hellaswag": 84.6,
"hendrycksTest": 63.6,
"truthfulqa:mc": 62.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 69.4 |
yeontaek/Platypus2xOpenOrcaxGuanaco-13B-LoRa | main | 2f33c341f9308b3d851f3d04f2f078f86972b5a0 | {
"arc:challenge": 61.3,
"hellaswag": 80.5,
"hendrycksTest": 57.8,
"truthfulqa:mc": 41.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 60.4 |
yeontaek/llama-2-13B-ensemble-v1 | main | 95c37a1fcf3a2f8ef2f410550c2a8002e4fe24f9 | {
"arc:challenge": 62.3,
"hellaswag": 82.4,
"hendrycksTest": 57.6,
"truthfulqa:mc": 50.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 63.1 |
yeontaek/llama-2-13B-ensemble-v6 | main | 8134f0296812055a7008c9e2f31f68d59f962908 | {
"arc:challenge": 52.2,
"hellaswag": 81,
"hendrycksTest": 57.4,
"truthfulqa:mc": 52.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 60.8 |
yeontaek/Platypus2-13B-LoRa | main | 1450c541cf9e378e81862fabeb234b8e0a2bdf5a | {
"arc:challenge": 60.7,
"hellaswag": 82.5,
"hendrycksTest": 56.3,
"truthfulqa:mc": 43.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 60.8 |
yeontaek/Platypus2xOpenOrca-13B-IA3-v2 | main | 6c8129720b3909afea42f3e38516f4f531063b17 | {
"arc:challenge": 62.3,
"hellaswag": 82.1,
"hendrycksTest": 57.9,
"truthfulqa:mc": 47
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 62.3 |
yeontaek/llama-2-13B-ensemble-v5 | main | 177a954f7362bf6d1b9e06dbf17e9afccd774f3e | {
"arc:challenge": 62.6,
"hellaswag": 83.1,
"hendrycksTest": 59.5,
"truthfulqa:mc": 53.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 64.6 |
yeontaek/llama-2-70B-ensemble-v3 | main | 4fff2168beb2f7d4ec18320138f0ef0209bb0c40 | {
"arc:challenge": 68.5,
"hellaswag": 87.2,
"hendrycksTest": 68.2,
"truthfulqa:mc": 64.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 72 |
yeontaek/Platypus2-13B-LoRa-v2 | main | e85fb36125224af68581b2e2583532f3314b8b29 | {
"arc:challenge": 59.5,
"hellaswag": 82.4,
"hendrycksTest": 57.1,
"truthfulqa:mc": 41.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 60.2 |
yeontaek/llama-2-13B-ensemble-v3 | main | 738f5b53fecaf2b51789c77c4c28fe5b77fbd7d8 | {
"arc:challenge": 62.4,
"hellaswag": 82.3,
"hendrycksTest": 57.7,
"truthfulqa:mc": 49.8
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 63 |
yeontaek/llama-2-70B-ensemble-v6 | main | 2edcbe2de75f565c5ed0f8055fbd13aa09e8bef6 | {
"arc:challenge": 71,
"hellaswag": 87.2,
"hendrycksTest": 68.1,
"truthfulqa:mc": 62.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 72.2 |
yeontaek/llama-2-70B-ensemble-v5 | main | cf4292aae32e62d27baf59b0d3db1be38f817631 | {
"arc:challenge": 71.2,
"hellaswag": 87.2,
"hendrycksTest": 69.6,
"truthfulqa:mc": 63.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 72.8 |
yeontaek/llama-2-70B-ensemble-v2 | main | 7ee1ce68410f18e2bff12925fcc8354f4a7410cd | {
"arc:challenge": 68.8,
"hellaswag": 85.4,
"hendrycksTest": 68,
"truthfulqa:mc": 64.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 71.7 |
bhenrym14/airophin-v2-13b-PI-8k-fp16 | main | 26b7edfd282af223d86d5e539451357bb114247b | {
"arc:challenge": 60.6,
"hellaswag": 83,
"hendrycksTest": 56.7,
"truthfulqa:mc": 40.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 60.1 |
bhenrym14/airophin-13b-pntk-16k-fp16 | main | 6b5418b69e8270df659eacb192f469e7c3af70b3 | {
"arc:challenge": 61.4,
"hellaswag": 82.8,
"hendrycksTest": 55.4,
"truthfulqa:mc": 43.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 60.7 |
bhenrym14/mistral-7b-platypus-fp16 | main | d836a261afa0871d3734a7dfd1a28dc23c173ea7 | {
"arc:challenge": 63.1,
"hellaswag": 84.1,
"hendrycksTest": 64.1,
"truthfulqa:mc": 45.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 64.1 |
cmarkea/bloomz-7b1-mt-sft-chat | main | 8c2dc302780fe320ee3428f3db2ee7ff3684dcef | {
"arc:challenge": 44,
"hellaswag": 62.6,
"hendrycksTest": 38.6,
"truthfulqa:mc": 44.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 47.4 |
khoantap/wizard-limarp | main | 7301565c37edfe74296dbb280c69aab05e82d39a | {
"arc:challenge": 58.6,
"hellaswag": 81.9,
"hendrycksTest": 55,
"truthfulqa:mc": 48.3
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 61 |
digitous/Adventien-GPTJ | main | 4fbfe9eae03a1d6ecf60fda8cf39c4123f0438bd | {
"arc:challenge": 42.5,
"hellaswag": 69.2,
"hendrycksTest": 25.4,
"truthfulqa:mc": 36.9
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 43.5 |
digitous/Janin-GPTJ | main | a6773861798f2abea3849514aa6f60961518af9c | {
"arc:challenge": 40.9,
"hellaswag": 67.3,
"hendrycksTest": 27.4,
"truthfulqa:mc": 36.2
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 43 |
digitous/Janin-R | main | f6963f77098d8421ff4a1cf4d36f1e94c6c8f44b | {
"arc:challenge": 40.4,
"hellaswag": 67.4,
"hendrycksTest": 31.2,
"truthfulqa:mc": 34.5
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 43.4 |
digitous/13B-Chimera | main | 85cfe8e6db2bee804873cfdb48955696cc5b0689 | {
"arc:challenge": 57.6,
"hellaswag": 81.5,
"hendrycksTest": 49.9,
"truthfulqa:mc": 52.6
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 60.4 |
digitous/Alpacino13b | main | 7092a5c8dec649694dd66ff8cfe5452ce52e6a40 | {
"arc:challenge": 58.5,
"hellaswag": 81.3,
"hendrycksTest": 47.9,
"truthfulqa:mc": 41.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 57.4 |
digitous/Javelin-GPTJ | main | bee7068ab002784420a1a30170db3906185359f2 | {
"arc:challenge": 42.7,
"hellaswag": 70.5,
"hendrycksTest": 26.2,
"truthfulqa:mc": 36.1
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 43.9 |
digitous/Skegma-GPTJ | main | 4dff006b2ea7e8d9b067dfe8af8ca1a16bc44dce | {
"arc:challenge": 43.8,
"hellaswag": 69.2,
"hendrycksTest": 25.4,
"truthfulqa:mc": 34.7
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 43.3 |
digitous/Javalion-R | main | b881231ab6ea85da2a9a139f282df85d1d18b002 | {
"arc:challenge": 41.7,
"hellaswag": 68,
"hendrycksTest": 30.8,
"truthfulqa:mc": 34.4
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 43.7 |
rameshm/llama-2-13b-mathgpt-v4 | main | c5072a762070c6b3756385c63805348c155004b5 | {
"arc:challenge": 50.9,
"hellaswag": 75.6,
"hendrycksTest": 43.8,
"truthfulqa:mc": 42
} | 9ba100d35ce48d3d4c132947464c93c861932caa | 2023-11-23T17:28:23 | 53.1 |