model
stringlengths
4
89
revision
stringclasses
1 value
model_sha
stringlengths
0
40
results
dict
commit
stringlengths
40
40
date
unknown
score
float64
21.8
83
TheBloke/Llama-2-70B-chat-GPTQ
main
054fbf6f65e7ab7691ec07ec9ad366acf2dd90bf
{ "arc:challenge": 62.6, "hellaswag": 84.8, "hendrycksTest": 62.7, "truthfulqa:mc": 51 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
65.3
TheBloke/guanaco-13B-HF
main
bd59c700815124df616a17f5b49a0bc51590b231
{ "arc:challenge": 57.8, "hellaswag": 83.8, "hendrycksTest": 48.3, "truthfulqa:mc": 46.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.1
TheBloke/Wizard-Vicuna-30B-Uncensored-GPTQ
main
56a82ece7a9309189561a590e8f4d2fe0d4be92b
{ "arc:challenge": 61.1, "hellaswag": 82.4, "hendrycksTest": 56.5, "truthfulqa:mc": 49.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
62.5
TheBloke/robin-65b-v2-fp16
main
40edb31ba93045d673735361bc98f56125bbc77b
{ "arc:challenge": 61.9, "hellaswag": 84.6, "hendrycksTest": 62.5, "truthfulqa:mc": 52.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
65.3
TheBloke/koala-13B-HF
main
b20f96a0171ce4c0fa27d6048215ebe710521587
{ "arc:challenge": 53, "hellaswag": 77.6, "hendrycksTest": 45.3, "truthfulqa:mc": 50.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
56.5
TheBloke/Kimiko-v2-13B-fp16
main
0fed305667508e50330e71a2d43e9cee5ea73783
{ "arc:challenge": 61, "hellaswag": 83.3, "hendrycksTest": 55.2, "truthfulqa:mc": 40.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
60
TheBloke/GPlatty-30B-SuperHOT-8K-fp16
main
e2103a424c1700756df1c0c0b334195f37efe17b
{ "arc:challenge": 28.3, "hellaswag": 33.5, "hendrycksTest": 24.9, "truthfulqa:mc": 46.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
33.2
TheBloke/Platypus-30B-SuperHOT-8K-fp16
main
e8ac508308911475125252dcf2677fe355dd3700
{ "arc:challenge": 25.7, "hellaswag": 30.8, "hendrycksTest": 23.6, "truthfulqa:mc": 47.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
31.8
TheBloke/WizardLM-7B-uncensored-GPTQ
main
cc30c031fd795ee3d3a50312ab4549415bfbdb46
{ "arc:challenge": 28.5, "hellaswag": 25.4, "hendrycksTest": 24.9, "truthfulqa:mc": 50.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
32.4
TheBloke/Genz-70b-GPTQ
main
7d38987a43d2445b193db99a029a264b39dc6c8e
{ "arc:challenge": 71.1, "hellaswag": 87.6, "hendrycksTest": 70.3, "truthfulqa:mc": 62.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
72.8
TheBloke/CodeLlama-34B-Python-fp16
main
875f9d97fb6c9619d8867887dd1d80918ff0f593
{ "arc:challenge": 38.1, "hellaswag": 34.8, "hendrycksTest": 32.9, "truthfulqa:mc": 43.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
37.4
TheBloke/orca_mini_v3_7B-GPTQ
main
06ddd48cd904907e3c73d2dfe47d28626053598b
{ "arc:challenge": 30.1, "hellaswag": 26, "hendrycksTest": 24.3, "truthfulqa:mc": 48.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
32.2
TheBloke/wizard-vicuna-13B-GPTQ
main
936a51c0219744d7a9598d0c65a7d18e01660601
{ "arc:challenge": 28.7, "hellaswag": 25.9, "hendrycksTest": 25.8, "truthfulqa:mc": 48.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
32.2
TheBloke/fiction.live-Kimiko-V2-70B-fp16
main
6b0c2cb654133cad2d4920e7da2e3f6cb1c4f7fd
{ "arc:challenge": 67.7, "hellaswag": 87.7, "hendrycksTest": 69.8, "truthfulqa:mc": 49.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
68.6
TheBloke/manticore-13b-chat-pyg-GPTQ
main
923f27245d13058c9c1b3ab0eab6c6c93ffc162e
{ "arc:challenge": 57.8, "hellaswag": 81.1, "hendrycksTest": 47.6, "truthfulqa:mc": 47.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
58.6
TheBloke/Vicuna-33B-1-3-SuperHOT-8K-fp16
main
0b6484697d5cca5baa534b882dcad8101add8cda
{ "arc:challenge": 25.4, "hellaswag": 34.6, "hendrycksTest": 23.6, "truthfulqa:mc": 46.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
32.6
TheBloke/tulu-13B-fp16
main
532aeb363b0ceee155b3cf9479ef635b797cee7c
{ "arc:challenge": 53.9, "hellaswag": 80.7, "hendrycksTest": 53.2, "truthfulqa:mc": 43.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
57.9
TheBloke/Planner-7B-fp16
main
afb4604a06c8541960fb51240259777764c4ce7e
{ "arc:challenge": 51, "hellaswag": 77.8, "hendrycksTest": 35.7, "truthfulqa:mc": 34.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
49.7
TheBloke/Platypus2-70B-Instruct-GPTQ
main
4a44568aadd8a4babfa5549cf33e6e84cbae7ab8
{ "arc:challenge": 71.2, "hellaswag": 87.6, "hendrycksTest": 69.9, "truthfulqa:mc": 62.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
72.8
TheBloke/wizardLM-7B-HF
main
a8e22531a48cece989e670f539eb18ebd2dbd0cf
{ "arc:challenge": 50.3, "hellaswag": 75.3, "hendrycksTest": 38.1, "truthfulqa:mc": 45.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
52.3
TheBloke/Wizard-Vicuna-30B-Superhot-8K-fp16
main
062fe5409861d7386279fb534b435be39c88ceaf
{ "arc:challenge": 26.2, "hellaswag": 33, "hendrycksTest": 23.5, "truthfulqa:mc": 47.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
32.6
TheBloke/Wizard-Vicuna-13B-Uncensored-HF
main
fff9ac7f0e2e7b340f2301f5f089d989fc03be67
{ "arc:challenge": 59, "hellaswag": 81.9, "hendrycksTest": 47.9, "truthfulqa:mc": 51.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
60.1
TheBloke/airoboros-33B-gpt4-1-4-SuperHOT-8K-fp16
main
53fdac1cdb8a37647e5dbe4199bc3fb70e617fce
{ "arc:challenge": 26, "hellaswag": 30.7, "hendrycksTest": 23.6, "truthfulqa:mc": 47.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
32.1
TheBloke/Chinese-Alpaca-33B-SuperHOT-8K-fp16
main
a55ce761bace8be6d17c357c57ef927751afd40c
{ "arc:challenge": 26.8, "hellaswag": 29.6, "hendrycksTest": 24.1, "truthfulqa:mc": 47.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
32
TheBloke/WizardLM-33B-V1.0-Uncensored-GPTQ
main
1c65902c620fcdf6b9c8e36ce17f21360e186a1e
{ "arc:challenge": 27.4, "hellaswag": 26, "hendrycksTest": 25.8, "truthfulqa:mc": 48.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
32
TheBloke/Guanaco-3B-Uncensored-v2-GPTQ
main
c80e2f01377d551ad17c8c9bac3f52578c38d653
{ "arc:challenge": 41.6, "hellaswag": 64.8, "hendrycksTest": 26.2, "truthfulqa:mc": 36.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
42.3
TheBloke/CodeLlama-13B-Python-fp16
main
442282f4207442b828953a72c51a919c332cba5c
{ "arc:challenge": 33.2, "hellaswag": 44.5, "hendrycksTest": 25.9, "truthfulqa:mc": 44 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
36.9
TheBloke/airoboros-13B-HF
main
9219b61a0e8bc880e4cd0f8bebc48a97ee0950c7
{ "arc:challenge": 58.3, "hellaswag": 81, "hendrycksTest": 50, "truthfulqa:mc": 51.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
60.2
TheBloke/guanaco-7B-HF
main
293c24105fa15afa127a2ec3905fdc2a0a3a6dac
{ "arc:challenge": 53, "hellaswag": 80.1, "hendrycksTest": 35.3, "truthfulqa:mc": 39.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
51.9
TheBloke/alpaca-lora-65B-HF
main
113b61b37a2862b950ada68620e57acafbcefe13
{ "arc:challenge": 64.8, "hellaswag": 85.6, "hendrycksTest": 63.1, "truthfulqa:mc": 45.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
64.6
breadlicker45/dough-instruct-base-001
main
3e1b0bf0a887feeb342982eee4f6d8041772a7dd
{ "arc:challenge": 23.9, "hellaswag": 24.8, "hendrycksTest": 23.1, "truthfulqa:mc": 53.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
31.3
gaodrew/gaodrew-llama-30b-instruct-2048-Open-Platypus-100steps
main
1114ff08ed15ef417502da58f0237d2f6650c9ce
{ "arc:challenge": 61.5, "hellaswag": 84.1, "hendrycksTest": 60.2, "truthfulqa:mc": 51 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
64.2
gaodrew/OpenOrca-Platypus2-13B-thera-1250
main
b1c2ebcda387211732e87911e39edca503502a33
{ "arc:challenge": 59.2, "hellaswag": 81, "hendrycksTest": 57, "truthfulqa:mc": 48.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
61.4
openbmb/UltraRM-13b
main
4b231ae58c15244e6e15f0d2f4e26ec37b846229
{ "arc:challenge": 28.2, "hellaswag": 26.1, "hendrycksTest": 26, "truthfulqa:mc": 47.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
32
openbmb/UltraLM-65b
main
38c0a06d99d4e2b32acffa79b011094b27118935
{ "arc:challenge": 67.1, "hellaswag": 85, "hendrycksTest": 63.5, "truthfulqa:mc": 53.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
67.3
TurkuNLP/gpt3-finnish-large
main
b9a3dd97387fc70d07010d469888a918842d3449
{ "arc:challenge": 21.8, "hellaswag": 32.9, "hendrycksTest": 24.1, "truthfulqa:mc": 44.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
30.8
TurkuNLP/gpt3-finnish-13B
main
ade35fd78ac2c29f7a56ffd3087321d297bb97a9
{ "arc:challenge": 24.7, "hellaswag": 46.8, "hendrycksTest": 23.5, "truthfulqa:mc": 44.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
34.9
TurkuNLP/gpt3-finnish-small
main
20a19af481bf59f38610a2977b2b513e9df51e3a
{ "arc:challenge": 20.5, "hellaswag": 28.1, "hendrycksTest": 24.5, "truthfulqa:mc": 46.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
29.9
Phind/Phind-CodeLlama-34B-v2
main
949f61e203f91b412efe8f679c798f09f0ff4b0c
{ "arc:challenge": 24.6, "hellaswag": 27.6, "hendrycksTest": 25.8, "truthfulqa:mc": 48.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
31.6
gradientputri/MegaMix-A1-13B
main
14e0756c210bcf420fbf825e6b8087ee5c716e7f
{ "arc:challenge": 61.6, "hellaswag": 83.5, "hendrycksTest": 58.3, "truthfulqa:mc": 47.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
62.7
gradientputri/MegaMix-S1-13B
main
afca2c9488cf8738faec4db6721f6a4c755a5d81
{ "arc:challenge": 62.5, "hellaswag": 83.6, "hendrycksTest": 57.9, "truthfulqa:mc": 44.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
62.1
Gryphe/MythoLogic-L2-13b
main
665948fc79acc2bcce3e9e7d2b0689ca43ae62d4
{ "arc:challenge": 61, "hellaswag": 83.9, "hendrycksTest": 55.7, "truthfulqa:mc": 48.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
62.3
BEE-spoke-data/smol_llama-101M-GQA
main
cac68b3377fd0a1eb1aca92a2e661d81f59d8b08
{ "arc:challenge": 23.5, "hellaswag": 28.7, "hendrycksTest": 24.3, "truthfulqa:mc": 45.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
30.6
BEE-spoke-data/TinyLlama-1.1bee
main
5889ec467cf80a83c4092b55686f8121e81bf001
{ "arc:challenge": 30.5, "hellaswag": 51.8, "hendrycksTest": 24.3, "truthfulqa:mc": 39 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
36.4
BEE-spoke-data/smol_llama-81M-tied
main
096e543bd36d067a819ea867c66f14d946849053
{ "arc:challenge": 22.2, "hellaswag": 29.3, "hendrycksTest": 24.1, "truthfulqa:mc": 44 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
29.9
BEE-spoke-data/verysmol_llama-v11-KIx2
main
1cd271d3d62a9e1dc4b7c2978e54806d74705439
{ "arc:challenge": 22.7, "hellaswag": 27.6, "hendrycksTest": 25.3, "truthfulqa:mc": 44.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
30.1
aqweteddy/Tulpar-tv_marcoroni-7b
main
89f76fc1520fdf54dab892c63196e3a871b7d1ad
{ "arc:challenge": 41.6, "hellaswag": 67.1, "hendrycksTest": 32.7, "truthfulqa:mc": 49.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
47.7
aqweteddy/llama_chat-tv_en_luban-tv_stable_platypus2
main
8c7858640053c11058906b0e3c73f3d3d1bf08ba
{ "arc:challenge": 44.5, "hellaswag": 61, "hendrycksTest": 49.6, "truthfulqa:mc": 51.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
51.8
bertin-project/bertin-gpt-j-6B-alpaca
main
636b17d6044189343475d1889f076aba73036905
{ "arc:challenge": 36, "hellaswag": 54.3, "hendrycksTest": 27.7, "truthfulqa:mc": 43.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
40.4
victor123/WizardLM-13B-1.0
main
2ea86d3c02ca0c2abb086a2145e1e85eaea4a23e
{ "arc:challenge": 28.5, "hellaswag": 26, "hendrycksTest": 23.1, "truthfulqa:mc": 48.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
31.6
willnguyen/lacda-2-7B-chat-v0.1
main
afca346816726b83e331bb4d93246ed5146e1675
{ "arc:challenge": 53.1, "hellaswag": 77.6, "hendrycksTest": 46, "truthfulqa:mc": 44.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
55.3
aisquared/dlite-v2-355m
main
f51d310aebc16a9fe0d999d2a437b5faff635716
{ "arc:challenge": 28.3, "hellaswag": 40.5, "hendrycksTest": 26.8, "truthfulqa:mc": 38.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
33.6
aisquared/chopt-2_7b
main
45f57352c10a1fb1ec13c4bf387a15552ca1fe65
{ "arc:challenge": 36, "hellaswag": 63.4, "hendrycksTest": 25.4, "truthfulqa:mc": 37.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
40.6
aisquared/dlite-v1-774m
main
d3f5401d07965fb13c2cb8b458ffaed9a5a79c2d
{ "arc:challenge": 28.1, "hellaswag": 44.4, "hendrycksTest": 25.9, "truthfulqa:mc": 36.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
33.6
aisquared/dlite-v1-124m
main
f6fd5f3960f31881e6cee23f5a872ecc80b40283
{ "arc:challenge": 24.3, "hellaswag": 31.2, "hendrycksTest": 25.1, "truthfulqa:mc": 36.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
29.2
aisquared/chopt-1_3b
main
fdd3691978f557baf9d1c20d4ede900c47f7e135
{ "arc:challenge": 31.5, "hellaswag": 56.6, "hendrycksTest": 25.4, "truthfulqa:mc": 40.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
38.4
aisquared/dlite-v2-124m
main
bc719f990748ea72be4b6c270df34fc3d37291dc
{ "arc:challenge": 24, "hellaswag": 31.1, "hendrycksTest": 25.3, "truthfulqa:mc": 39 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
29.8
OpenBuddy/openbuddy-llama2-13b-v11.1-bf16
main
fdbd9cc550b58aed9bee58649255191c88011829
{ "arc:challenge": 51.6, "hellaswag": 76.2, "hendrycksTest": 56.4, "truthfulqa:mc": 49.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
58.5
OpenBuddy/openbuddy-openllama-7b-v12-bf16
main
bb94ff691996484b1a9d899a6c0956ef6750d86a
{ "arc:challenge": 42.1, "hellaswag": 62, "hendrycksTest": 46.5, "truthfulqa:mc": 45.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
49
OpenBuddy/openbuddy-llama2-13b-v11-bf16
main
4d4e72c553e9d60fdc208663b0a1c0364caa2f30
{ "arc:challenge": 53, "hellaswag": 75.4, "hendrycksTest": 51.4, "truthfulqa:mc": 47.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
56.9
OpenBuddy/openbuddy-llama2-70b-v10.1-bf16
main
a6ee90d262ac729f90ed8de97127766df070074c
{ "arc:challenge": 61.9, "hellaswag": 83.1, "hendrycksTest": 67.4, "truthfulqa:mc": 56.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
67.2
OpenBuddy/openbuddy-atom-13b-v9-bf16
main
35bb2c73953f6ea40be6f0c8c6b2dfa7ecbaa0df
{ "arc:challenge": 51.2, "hellaswag": 76, "hendrycksTest": 49.7, "truthfulqa:mc": 48.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
56.4
OpenBuddy/openbuddy-falcon-180b-v13-preview0
main
7d7b93ffd67d1b0c39f3503050dbbcc951948120
{ "arc:challenge": 65.1, "hellaswag": 86.2, "hendrycksTest": 64.6, "truthfulqa:mc": 55 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
67.7
synapsoft/Llama-2-7b-hf-flan2022-1.2M
main
792f946a1413a7c58378d7a350b7d75b9df80561
{ "arc:challenge": 23.3, "hellaswag": 78.5, "hendrycksTest": 42.3, "truthfulqa:mc": 38 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
45.5
wannaphong/openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard
main
c1068f859a225e50d9d9ec74c572bfaf38573051
{ "arc:challenge": 51.3, "hellaswag": 77.5, "hendrycksTest": 33.2, "truthfulqa:mc": 43.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
51.3
hyunseoki/ko-en-llama2-13b
main
2768cf6f955b65868ccbb20658e2cc444b2f3be9
{ "arc:challenge": 58.2, "hellaswag": 81.9, "hendrycksTest": 52, "truthfulqa:mc": 40 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
58
hyunseoki/ko-ref-llama2-13b
main
c5d09631c88ab5012b48187ecd90ae773cd4bbd9
{ "arc:challenge": 48.4, "hellaswag": 73.6, "hendrycksTest": 34.8, "truthfulqa:mc": 35.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
48.2
OpenLemur/lemur-70b-v1
main
74432ae16ef50207fe17fb88b2f1c1d32ef3b481
{ "arc:challenge": 64.3, "hellaswag": 85.7, "hendrycksTest": 65.8, "truthfulqa:mc": 44.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
65.2
YeungNLP/firefly-llama-13b-v1.2
main
c0a56d9f5a15bea07493191b5a6295f6797a9b2c
{ "arc:challenge": 56.7, "hellaswag": 80.3, "hendrycksTest": 48.9, "truthfulqa:mc": 51 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.2
YeungNLP/firefly-bloom-7b1
main
6b4385dc45c47d509b6400c41a2ff3665ad1d189
{ "arc:challenge": 40.4, "hellaswag": 61.2, "hendrycksTest": 26.8, "truthfulqa:mc": 40.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
42.3
YeungNLP/firefly-llama2-13b
main
6e918dc8beb1e764def5938fdb8e3f64ba40a456
{ "arc:challenge": 59.1, "hellaswag": 82, "hendrycksTest": 55.5, "truthfulqa:mc": 51.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
62
YeungNLP/firefly-llama2-13b-chat
main
9497e3bd12e19e1300bc7b1980fbe232420134b9
{ "arc:challenge": 57.5, "hellaswag": 77.9, "hendrycksTest": 52.6, "truthfulqa:mc": 48.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59
YeungNLP/firefly-llama2-7b-chat-temp
main
53d99a756c790f231e20c5aec2f10b2546ce0d38
{ "arc:challenge": 51.2, "hellaswag": 73.3, "hendrycksTest": 45.5, "truthfulqa:mc": 46.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
54.2
YeungNLP/firefly-llama2-13b-pretrain
main
f87d66f9c4541c575a6fad3c19a31b11568e0dfb
{ "arc:challenge": 53.9, "hellaswag": 79.1, "hendrycksTest": 51.2, "truthfulqa:mc": 36.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
55.1
YeungNLP/firefly-llama2-13b-v1.2
main
97279d20a8c7e2d0576c9ff4b2e15a421c40d58a
{ "arc:challenge": 60.7, "hellaswag": 80.5, "hendrycksTest": 56.5, "truthfulqa:mc": 51 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
62.2
YeungNLP/firefly-llama-30b
main
7f035eabd1d0e7b38ace395847a623f475d90da8
{ "arc:challenge": 64.2, "hellaswag": 83.6, "hendrycksTest": 58.2, "truthfulqa:mc": 53.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
64.8
YeungNLP/firefly-llama-13b
main
dd326f89ce885844d714d9ab33603e0d17f56cc5
{ "arc:challenge": 59, "hellaswag": 79.7, "hendrycksTest": 49.1, "truthfulqa:mc": 49.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.4
prithivida/Asimov-7B-v1
main
0b33ad0a6dde60156ee6008ff47f7cfa6cd27937
{ "arc:challenge": 59, "hellaswag": 80, "hendrycksTest": 56.3, "truthfulqa:mc": 51.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
61.6
klosax/open_llama_7b_400bt_preview
main
4cd3a97dcc9c25b44b552ab53f0ec01ec36acc8d
{ "arc:challenge": 39.5, "hellaswag": 65.9, "hendrycksTest": 27.6, "truthfulqa:mc": 36 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
42.2
klosax/pythia-160m-deduped-step92k-193bt
main
9eac24dad1bd7194e38ce8083a0197cee456456c
{ "arc:challenge": 24.2, "hellaswag": 32.3, "hendrycksTest": 24.5, "truthfulqa:mc": 43.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
31.1
klosax/openllama-3b-350bt
main
4ed354d8f9537fe0a7400772eece1a93f2bd1366
{ "arc:challenge": 36.5, "hellaswag": 60.9, "hendrycksTest": 26.8, "truthfulqa:mc": 35 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
39.8
klosax/open_llama_3b_350bt_preview
main
4ed354d8f9537fe0a7400772eece1a93f2bd1366
{ "arc:challenge": 36.5, "hellaswag": 60.9, "hendrycksTest": 26.8, "truthfulqa:mc": 35 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
39.8
klosax/open_llama_13b_600bt_preview
main
3465eaca4d293ccc6ce66888e6c8bd9032ae7071
{ "arc:challenge": 44.3, "hellaswag": 72.4, "hendrycksTest": 31.5, "truthfulqa:mc": 34.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
45.7
wenge-research/yayi-70b-llama2
main
2799b262292f78f7c3965a1410d0ad6211438603
{ "arc:challenge": 60.7, "hellaswag": 83.9, "hendrycksTest": 64.4, "truthfulqa:mc": 47.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
64.2
wenge-research/yayi-13b-llama2
main
9fc1bc4409b9e71f54213245a91c2742fbf7b3d0
{ "arc:challenge": 48.5, "hellaswag": 74.8, "hendrycksTest": 38.7, "truthfulqa:mc": 42.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
51
deepse/CodeUp-Llama-2-13b-chat-hf
main
d4af0b233a5b6a214e96582e103396e99dcf5f95
{ "arc:challenge": 59, "hellaswag": 81.9, "hendrycksTest": 54.6, "truthfulqa:mc": 44.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.9
ikala/bloom-zh-3b-chat
main
4ea0ad223a2623fc15e8824c1c4f8e6539bc40b0
{ "arc:challenge": 38.8, "hellaswag": 54.7, "hendrycksTest": 31.6, "truthfulqa:mc": 41.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
41.6
wei123602/Llama-2-13b-FINETUNE4_compare8k2
main
fe1b604097aad9408ce63fa7ffc9c320cdd06e4f
{ "arc:challenge": 58.3, "hellaswag": 81.4, "hendrycksTest": 56.9, "truthfulqa:mc": 39.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.1
wei123602/llama2-13b-FINETUNE3_TEST
main
22cea7bf138eb0d6c962812df2b2235290acbee2
{ "arc:challenge": 53.7, "hellaswag": 79.7, "hendrycksTest": 54.5, "truthfulqa:mc": 40.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
57
wei123602/Llama-2-13b-FINETUNE4
main
939d06081210fa943c60210a47583f43b60901ad
{ "arc:challenge": 58.7, "hellaswag": 81.9, "hendrycksTest": 57.2, "truthfulqa:mc": 43.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
60.3
wei123602/FINETUNE3_TEST4
main
5195e87bb34317c5aaf201faa476aae78ecc9f1b
{ "arc:challenge": 55.6, "hellaswag": 81.3, "hendrycksTest": 52.1, "truthfulqa:mc": 41.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
57.5
wei123602/Llama-2-13b-FINETUNE4_TEST
main
0ed198a814192b06e60715112d2a4b6bfd630806
{ "arc:challenge": 54.8, "hellaswag": 81.5, "hendrycksTest": 56, "truthfulqa:mc": 39.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
57.8
Mikael110/llama-2-7b-guanaco-fp16
main
f769fed10874af73ad12115efd044cb4a64506b0
{ "arc:challenge": 54.9, "hellaswag": 79.6, "hendrycksTest": 46.4, "truthfulqa:mc": 43.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
56.2
xxyyy123/10k_v1_lora_qkvo_rank14_v3
main
3267b35a0215d937884a6228fdbb91f2fa23d935
{ "arc:challenge": 56, "hellaswag": 79.2, "hendrycksTest": 50.7, "truthfulqa:mc": 53.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.8
xxyyy123/test_qkvo_adptor
main
eeb9a04e95c03dd03f0d664e34c56099cabbc402
{ "arc:challenge": 55.4, "hellaswag": 79, "hendrycksTest": 51.6, "truthfulqa:mc": 53.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.9
xxyyy123/test_merge_p_ov1_w0.66_w0.5_n1
main
e2349e81b46839bc8bedfc2c944ab35c640a5b51
{ "arc:challenge": 62.5, "hellaswag": 82.4, "hendrycksTest": 58, "truthfulqa:mc": 56.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
64.8
xxyyy123/20k_v1_lora_qkvo_rank14_v2
main
fb849cfd4fd7c856e032d0576e3685ee54e68200
{ "arc:challenge": 55.4, "hellaswag": 79.1, "hendrycksTest": 50.7, "truthfulqa:mc": 51.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.2
xxyyy123/10k_v1_lora_qkvo_rank28_v2
main
70e38a7424544193f0ad6a93ae26a5bfd15e4e90
{ "arc:challenge": 55.4, "hellaswag": 79.2, "hendrycksTest": 50.5, "truthfulqa:mc": 52.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.5
Weyaxi/ChatAYT-Lora-Assamble-Marcoroni
main
51c9b600023cd26c4eb3754b9a89c60dde959ccc
{ "arc:challenge": 62.5, "hellaswag": 83.1, "hendrycksTest": 58.7, "truthfulqa:mc": 56.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
65.1
Weyaxi/OpenOrca-Nebula-7B
main
dbfa63b0d89340ecad26cb64385e9dd588456819
{ "arc:challenge": 58.7, "hellaswag": 81.8, "hendrycksTest": 57.8, "truthfulqa:mc": 53.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
62.9