IlyasMoutawwakil HF Staff commited on
Commit
1df1094
Β·
verified Β·
1 Parent(s): 196cc21

Upload llm-df.csv with huggingface_hub

Browse files
Files changed (1) hide show
  1. llm-df.csv +30 -28
llm-df.csv CHANGED
@@ -44,6 +44,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
44
  πŸ’¬,MTSAIR/MultiVerse_70B,31.73,52.49,0.52,46.14,0.62,16.16,0.16,13.87,0.35,18.82,0.47,42.89,0.49,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,other,72,38,True,063430cdc4d972a0884e3e3e3d45ea4afbdf71a2,True,True,2024-06-29,2024-03-25,False,False,MTSAIR/MultiVerse_70B,0
45
  🀝,paloalma/Le_Triomphant-ECE-TW3,31.66,54.02,0.54,44.96,0.61,17.45,0.17,13.2,0.35,18.5,0.47,41.81,0.48,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,72,3,True,f72399253bb3e65c0f55e50461488c098f658a49,True,True,2024-07-25,2024-04-01,False,False,paloalma/Le_Triomphant-ECE-TW3,0
46
  πŸ”Ά,failspy/Phi-3-medium-4k-instruct-abliterated-v3,31.55,63.19,0.63,46.73,0.63,14.12,0.14,8.95,0.32,18.52,0.46,37.78,0.44,πŸ”Ά fine-tuned on domain-specific datasets,Phi3ForCausalLM,Original,bfloat16,True,mit,13,22,True,959b09eacf6cae85a8eb21b25e998addc89a367b,True,True,2024-07-29,2024-05-22,True,False,failspy/Phi-3-medium-4k-instruct-abliterated-v3,0
 
47
  πŸ’¬,Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO,31.42,47.99,0.48,51.03,0.65,17.45,0.17,10.18,0.33,20.53,0.48,41.37,0.47,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,float16,True,mit,13,3,True,b749dbcb19901b8fd0e9f38c923a24533569f895,True,True,2024-08-13,2024-06-15,True,False,Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO,0
48
  πŸ’¬,CohereForAI/c4ai-command-r-plus,30.86,76.64,0.77,39.92,0.58,7.55,0.08,7.38,0.31,20.42,0.48,33.24,0.4,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",CohereForCausalLM,Original,float16,True,cc-by-nc-4.0,103,1640,True,fa1bd7fb1572ceb861bbbbecfa8af83b29fa8cca,True,True,2024-06-13,2024-04-03,True,True,CohereForAI/c4ai-command-r-plus,0
49
  πŸ’¬,internlm/internlm2_5-7b-chat,30.46,61.4,0.61,57.67,0.71,8.31,0.08,10.63,0.33,14.35,0.44,30.42,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",InternLM2ForCausalLM,Original,float16,True,other,7,148,True,bebb00121ee105b823647c3ba2b1e152652edc33,True,True,2024-07-03,2024-06-27,True,True,internlm/internlm2_5-7b-chat,0
@@ -51,6 +52,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
51
  🀝,altomek/YiSM-34B-0rn (Merge),30.15,42.84,0.43,45.38,0.61,20.62,0.21,16.22,0.37,14.76,0.44,41.06,0.47,🀝 base merges and moerges,LlamaForCausalLM,Original,float16,False,apache-2.0,34,1,True,7a481c67cbdd5c846d6aaab5ef9f1eebfad812c2,True,True,2024-06-27,2024-05-26,True,False,altomek/YiSM-34B-0rn,1
52
  🀝,paloalma/ECE-TW3-JRGL-V1,30.02,55.35,0.55,46.7,0.63,11.86,0.12,12.98,0.35,17.46,0.46,35.79,0.42,🀝 base merges and moerges,LlamaForCausalLM,Original,float16,False,apache-2.0,68,1,True,2f08c7ab9db03b1b9f455c7beee6a41e99aa910e,True,True,2024-08-04,2024-04-03,False,False,paloalma/ECE-TW3-JRGL-V1,0
53
  πŸ”Ά,jpacifico/Chocolatine-14B-Instruct-4k-DPO,29.83,46.89,0.47,48.02,0.63,14.88,0.15,12.19,0.34,15.15,0.44,41.82,0.48,πŸ”Ά fine-tuned on domain-specific datasets,Phi3ForCausalLM,Original,float16,True,mit,13,1,True,30677e58010979af26b70240846fdf7ff38cbbf2,True,True,2024-08-08,2024-08-01,False,False,jpacifico/Chocolatine-14B-Instruct-4k-DPO,0
 
54
  πŸ’¬,Qwen/Qwen2-57B-A14B,29.6,63.38,0.63,41.79,0.59,7.7,0.08,10.85,0.33,14.18,0.44,39.73,0.46,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2MoeForCausalLM,Original,bfloat16,True,apache-2.0,57,71,True,5ea455a449e61a92a5b194ee06be807647d3e8b5,True,True,2024-08-14,2024-06-04,True,True,Qwen/Qwen2-57B-A14B-Instruct,1
55
  🟒,Qwen/Qwen1.5-110B,29.56,34.22,0.34,44.28,0.61,23.04,0.23,13.65,0.35,13.71,0.44,48.45,0.54,🟒 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,other,111,88,True,16659038ecdcc771c1293cf47020fa7cc2750ee8,True,True,2024-06-13,2024-04-25,False,True,Qwen/Qwen1.5-110B,0
56
  πŸ”Ά,moreh/MoMo-72B-lora-1.8.7-DPO,29.35,51.67,0.52,43.13,0.6,16.77,0.17,9.84,0.32,14.42,0.45,40.26,0.46,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,72,463,True,a1d657156f82c24b670158406378648233487011,True,True,2024-06-12,2024-02-02,False,True,abacusai/Smaug-72B-v0.1,1
@@ -66,14 +68,14 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
66
  🟒,dnhkng/RYS-Phi-3-medium-4k-instruct,28.38,43.91,0.44,46.75,0.62,11.78,0.12,13.98,0.35,11.09,0.43,42.74,0.48,🟒 pretrained,Phi3ForCausalLM,Original,bfloat16,True,mit,17,1,True,1009e916b1ff8c9a53bc9d8ff48bea2a15ccde26,True,True,2024-08-07,2024-08-06,False,False,dnhkng/RYS-Phi-3-medium-4k-instruct,0
67
  πŸ”Ά,NLPark/AnFeng_v3.1-Avocet,28.05,50.96,0.51,40.31,0.58,13.9,0.14,9.96,0.32,14.98,0.45,38.2,0.44,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,cc-by-nc-nd-4.0,34,0,True,5170739731033323e6e66a0f68d34790042a3b2a,True,True,2024-08-07,2024-08-03,False,False,NLPark/AnFeng_v3.1-Avocet,0
68
  🀝,OpenBuddy/openbuddy-zero-56b-v21.2-32k,27.99,50.57,0.51,44.8,0.61,12.99,0.13,9.06,0.32,12.78,0.43,37.77,0.44,🀝 base merges and moerges,LlamaForCausalLM,Original,float16,True,other,56,0,True,c7a1a4a6e798f75d1d3219ab9ff9f2692e29f7d5,True,True,2024-06-26,2024-06-10,True,False,OpenBuddy/openbuddy-zero-56b-v21.2-32k,0
69
- πŸ’¬,meta-llama/Meta-Llama-3.1-8B,27.91,78.56,0.79,29.89,0.51,17.6,0.18,2.35,0.27,8.41,0.39,30.68,0.38,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,2165,True,df34336b42332c6d360959e259cd6271c6a09fd4,True,True,2024-08-15,2024-07-18,True,True,meta-llama/Meta-Llama-3.1-8B-Instruct,1
70
  πŸ’¬,vicgalle/Configurable-Llama-3.1-8B-Instruct,27.77,83.12,0.83,29.66,0.5,15.86,0.16,3.24,0.27,5.93,0.38,28.8,0.36,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,8,8,True,133b3ab1a5385ff9b3d17da2addfe3fc1fd6f733,True,True,2024-08-05,2024-07-24,True,False,vicgalle/Configurable-Llama-3.1-8B-Instruct,0
71
  πŸ”Ά,BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B,27.74,51.86,0.52,35.38,0.55,13.97,0.14,13.87,0.35,16.72,0.46,34.65,0.41,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,2,True,a42c86c61b98ca4fdf238d688fe6ea11cf414d29,True,True,2024-08-05,2024-07-09,True,False,BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B,0
72
  πŸ”Ά,01-ai/Yi-1.5-34B,27.73,38.53,0.39,44.17,0.61,15.18,0.15,12.42,0.34,16.97,0.46,39.1,0.45,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,34,34,True,1ec522298a6935c881df6dc29d3669833bd8672d,True,True,2024-07-27,2024-05-18,True,True,cognitivecomputations/dolphin-2.9.1-yi-1.5-34b,1
73
  πŸ’¬,01-ai/Yi-1.5-9B-Chat,27.71,60.46,0.6,36.95,0.56,11.63,0.12,11.3,0.33,12.84,0.43,33.06,0.4,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,126,True,bc87d8557c98dc1e5fdef6ec23ed31088c4d3f35,True,True,2024-06-12,2024-05-10,True,True,01-ai/Yi-1.5-9B-Chat,0
74
  πŸ’¬,jpacifico/Chocolatine-3B-Instruct-DPO-Revised,27.63,56.23,0.56,37.16,0.55,14.5,0.15,9.62,0.32,15.1,0.45,33.21,0.4,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,float16,True,mit,3,10,True,c403df6c0f78148cfb477972455cbd859149311a,True,True,2024-07-19,2024-07-17,True,False,jpacifico/Chocolatine-3B-Instruct-DPO-Revised,0
75
- πŸ’¬,microsoft/Phi-3.5-mini-instruct,27.4,57.75,0.58,36.75,0.55,14.95,0.15,11.97,0.34,10.1,0.4,32.91,0.4,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,bfloat16,True,mit,3,405,True,64963004ad95869fa73a30279371c8778509ac84,True,True,2024-08-21,2024-08-16,True,True,microsoft/Phi-3.5-mini-instruct,0
76
- πŸ’¬,microsoft/Phi-3-mini-4k-instruct,27.2,54.77,0.55,36.56,0.55,14.2,0.14,10.96,0.33,13.12,0.43,33.58,0.4,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,float16,True,mit,3,995,True,c1358f8a35e6d2af81890deffbbfa575b978c62f,True,True,2024-07-02,2024-04-22,True,True,microsoft/Phi-3-mini-4k-instruct,0
77
  πŸ’¬,mistralai/Mixtral-8x7B-v0.1,27.13,58.97,0.59,37.11,0.55,10.88,0.11,9.51,0.32,16.68,0.46,29.62,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MixtralForCausalLM,Original,bfloat16,True,apache-2.0,46,408,True,286ae6737d048ad1d965c2e830864df02db50f2f,True,False,2024-07-27,2024-01-11,True,True,NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO,1
78
  πŸ’¬,Qwen/Qwen1.5-32B-Chat,27.1,55.32,0.55,44.55,0.61,6.65,0.07,7.49,0.31,10.2,0.42,38.41,0.45,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,32,106,True,0997b012af6ddd5465d40465a8415535b2f06cfc,True,True,2024-06-12,2024-04-03,True,True,Qwen/Qwen1.5-32B-Chat,0
79
  🀝,mlabonne/NeuralDaredevil-8B-abliterated,27.01,75.61,0.76,30.31,0.51,8.01,0.08,7.49,0.31,9.08,0.4,31.57,0.38,🀝 base merges and moerges,LlamaForCausalLM,Original,float16,True,llama3,8,127,True,2f4a5e8a8522f19dff345c7189b7891468763061,True,True,2024-07-25,2024-05-27,True,True,mlabonne/NeuralDaredevil-8B-abliterated,0
@@ -94,13 +96,13 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
94
  🟒,meta-llama/Meta-Llama-3-70B,26.37,16.03,0.16,48.71,0.65,16.54,0.17,19.69,0.4,16.01,0.45,41.21,0.47,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3,70,795,True,b4d08b7db49d488da3ac49adf25a6b9ac01ae338,True,True,2024-06-12,2024-04-17,False,True,meta-llama/Meta-Llama-3-70B,0
95
  🀝,xxx777xxxASD/L3.1-ClaudeMaid-4x8B,26.19,66.96,0.67,29.44,0.51,12.84,0.13,5.48,0.29,13.75,0.43,28.67,0.36,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,True,llama3.1,24,7,True,2a98d9cb91c7aa775acbf5bfe7bb91beb2faf682,True,False,2024-07-28,2024-07-27,True,False,xxx777xxxASD/L3.1-ClaudeMaid-4x8B,0
96
  🀝,AbacusResearch/Jallabi-34B,25.97,35.29,0.35,43.62,0.6,3.93,0.04,11.86,0.34,20.24,0.48,40.91,0.47,🀝 base merges and moerges,LlamaForCausalLM,Original,float16,True,apache-2.0,34,2,True,f65696da4ed82c9a20e94b200d9dccffa07af682,True,True,2024-06-27,2024-03-01,False,False,AbacusResearch/Jallabi-34B,0
97
- πŸ’¬,microsoft/Phi-3-mini-4k-instruct,25.97,56.13,0.56,39.27,0.57,11.63,0.12,9.28,0.32,7.64,0.4,31.85,0.39,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,bfloat16,True,mit,3,995,True,ff07dc01615f8113924aed013115ab2abd32115b,True,True,2024-06-12,2024-04-22,True,True,microsoft/Phi-3-mini-4k-instruct,0
98
  🟩,dnhkng/RYS-Medium,25.94,44.06,0.44,47.73,0.63,7.78,0.08,10.4,0.33,8.73,0.41,36.96,0.43,🟩 continuously pretrained,Phi3ForCausalLM,Original,bfloat16,True,mit,18,3,True,de09a79e6b2efdcc97490a37b770764e62749fd0,True,True,2024-07-17,2024-07-17,False,False,dnhkng/RYS-Medium,0
99
  🟒,meta-llama/Meta-Llama-3.1-70B,25.91,16.84,0.17,46.4,0.63,16.69,0.17,18.34,0.39,16.58,0.46,40.6,0.47,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3.1,70,223,True,f7d3cc45ed4ff669a354baf2e0f05e65799a0bee,True,True,2024-07-23,2024-07-14,True,True,meta-llama/Meta-Llama-3.1-70B,0
100
  πŸ’¬,cloudyu/Yi-34Bx2-MoE-60B-DPO,25.91,53.19,0.53,31.26,0.52,6.19,0.06,9.62,0.32,14.32,0.44,40.85,0.47,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MixtralForCausalLM,Original,bfloat16,True,apache-2.0,60,2,True,5c2d31042229ee06246064100b781dd926cb0ffd,True,False,2024-08-06,2024-01-23,True,False,cloudyu/Yi-34Bx2-MoE-60B-DPO,0
101
  πŸ’¬,Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo,25.87,57.14,0.57,39.15,0.57,7.63,0.08,10.74,0.33,8.78,0.4,31.78,0.39,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,bfloat16,True,apache-2.0,3,0,True,2896ef357be81fd433c17801d76ce148e60a7032,True,True,2024-06-26,2024-06-24,True,False,Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo,0
102
  🀝,Casual-Autopsy/L3-Umbral-Mind-RP-v2.0-8B (Merge),25.76,71.23,0.71,32.49,0.53,10.12,0.1,4.92,0.29,5.55,0.37,30.26,0.37,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,llama3,8,11,True,b46c066ea8387264858dc3461f382e7b42fd9c48,True,True,2024-07-02,2024-06-26,True,False,Casual-Autopsy/L3-Umbral-Mind-RP-v2.0-8B,1
103
- πŸ”Ά,Sao10K/L3-8B-Stheno-v3.2,25.76,68.73,0.69,32.02,0.52,8.53,0.09,8.05,0.31,6.45,0.38,30.76,0.38,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,8,199,True,4bb828f6e1b1efd648c39b1ad682c44ff260f018,True,True,2024-06-30,2024-06-05,True,False,Sao10K/L3-8B-Stheno-v3.2,0
104
  πŸ”Ά,Nitral-AI/Hathor_Stable-v0.2-L3-8B,25.7,71.75,0.72,32.83,0.53,9.21,0.09,4.92,0.29,5.56,0.38,29.96,0.37,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,8,50,True,1c9f391c3e349f8ba51b5696290ee6db6a2b63fd,True,True,2024-07-02,2024-06-09,True,False,Nitral-AI/Hathor_Stable-v0.2-L3-8B,0
105
  πŸ’¬,cognitivecomputations/dolphin-2.9.2-Phi-3-Medium (Merge),25.66,42.48,0.42,49.72,0.65,0.53,0.01,10.29,0.33,11.41,0.42,39.5,0.46,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,mit,-1,15,True,0470c5b912b51fa6e27d87a8ea7feafacd8cb101,True,True,2024-08-05,2024-05-31,True,True,cognitivecomputations/dolphin-2.9.2-Phi-3-Medium,1
106
  πŸ”Ά,VAGOsolutions/SauerkrautLM-Nemo-12b-Instruct,25.63,61.13,0.61,32.34,0.52,8.69,0.09,7.94,0.31,17.16,0.45,26.5,0.34,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,18,True,fcb056465084ab2c71503a0760f46e4be79c985c,True,True,2024-07-22,2024-07-22,True,False,VAGOsolutions/SauerkrautLM-Nemo-12b-Instruct,0
@@ -127,7 +129,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
127
  πŸ”Ά,arcee-ai/Llama-Spark,24.9,79.11,0.79,29.77,0.51,1.06,0.01,6.6,0.3,2.62,0.36,30.23,0.37,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,22,True,6d74a617fbb17a1ada08528f2673c89f84fb062e,True,True,2024-08-08,2024-07-26,True,False,arcee-ai/Llama-Spark,0
128
  πŸ”Ά,01-ai/Yi-1.5-9B,24.85,44.65,0.45,35.78,0.55,10.42,0.1,11.74,0.34,13.52,0.43,32.97,0.4,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,24,True,91f0a521e3e2a0675a3549aa5d3f40717068de94,True,True,2024-08-02,2024-05-18,True,True,cognitivecomputations/dolphin-2.9.1-yi-1.5-9b,1
129
  πŸ”Ά,Eurdem/Defne-llama3.1-8B,24.81,50.36,0.5,32.82,0.53,14.12,0.14,6.15,0.3,13.54,0.43,31.84,0.39,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,2,True,7832ba3066636bf4dab3e7d658c0b3ded12491ae,True,True,2024-08-14,2024-07-29,False,False,Eurdem/Defne-llama3.1-8B,0
130
- πŸ’¬,Qwen/Qwen2-7B,24.76,56.79,0.57,37.81,0.55,8.61,0.09,6.38,0.3,7.37,0.39,31.64,0.38,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,535,True,41c66b0be1c3081f13defc6bdf946c2ef240d6a6,True,True,2024-06-12,2024-06-04,True,True,Qwen/Qwen2-7B-Instruct,1
131
  🀝,HiroseKoichi/Llama-Salad-4x8B-V3,24.75,66.54,0.67,31.93,0.52,8.53,0.09,7.05,0.3,6.45,0.37,27.98,0.35,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,llama3,24,4,True,a343915429779efbd1478f01ba1f7fd9d8d226c0,True,False,2024-06-26,2024-06-17,True,False,HiroseKoichi/Llama-Salad-4x8B-V3,0
132
  πŸ’¬,meta-llama/Meta-Llama-3-8B-Instruct,24.71,73.47,0.73,28.23,0.5,7.1,0.07,5.37,0.29,3.74,0.36,30.37,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,1,True,8346770280fa169d41d737785dd63a66e9d94501,True,True,2024-07-28,2024-06-07,True,False,haoranxu/Llama-3-Instruct-8B-SimPO,1
133
  🀝,invisietch/Nimbus-Miqu-v0.1-70B,24.71,46.47,0.46,43.45,0.6,5.44,0.05,11.86,0.34,9.33,0.41,31.7,0.39,🀝 base merges and moerges,LlamaForCausalLM,Original,float16,False,unknown,68,5,True,3209583a0849383daf8faa7b819f29726b8806cf,True,True,2024-07-03,2024-06-30,False,False,invisietch/Nimbus-Miqu-v0.1-70B,0
@@ -157,7 +159,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
157
  πŸ”Ά,BAAI/Infinity-Instruct-3M-0625-Qwen2-7B,24.01,55.54,0.56,34.66,0.53,6.12,0.06,8.39,0.31,6.46,0.39,32.89,0.4,πŸ”Ά fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,7,True,503c24156d7682458686a7b5324f7f886e63470d,True,True,2024-08-05,2024-07-09,True,False,BAAI/Infinity-Instruct-3M-0625-Qwen2-7B,0
158
  πŸ”Ά,meta-llama/Meta-Llama-3.1-8B,24.0,64.74,0.65,26.26,0.48,10.73,0.11,8.95,0.32,6.91,0.39,26.4,0.34,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama3.1,8,2,True,6b2b5694a192cb29ad0e4314138affa25b630c0e,True,True,2024-08-07,2024-08-06,True,False,ValiantLabs/Llama3.1-8B-ShiningValiant2,2
159
  πŸ’¬,vicgalle/Roleplay-Llama-3-8B,23.94,73.2,0.73,28.55,0.5,8.69,0.09,1.45,0.26,1.68,0.35,30.09,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,8,36,True,57297eb57dcc2c116f061d9dda341094203da01b,True,True,2024-06-26,2024-04-19,True,False,vicgalle/Roleplay-Llama-3-8B,0
160
- πŸ’¬,meta-llama/Meta-Llama-3-8B-Instruct,23.91,74.08,0.74,28.24,0.5,8.69,0.09,1.23,0.26,1.6,0.36,29.6,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,3360,True,e1945c40cd546c78e41f1151f4db032b271faeaa,True,True,2024-06-12,2024-04-17,True,True,meta-llama/Meta-Llama-3-8B-Instruct,0
161
  πŸ’¬,01-ai/Yi-34B-Chat,23.9,46.99,0.47,37.62,0.56,4.31,0.04,11.74,0.34,8.36,0.4,34.37,0.41,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,34,340,True,2e528b6a80fb064a0a746c5ca43114b135e30464,True,True,2024-06-12,2023-11-22,True,True,01-ai/Yi-34B-Chat,0
162
  πŸ’¬,UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter2,23.78,69.89,0.7,29.87,0.51,8.76,0.09,2.24,0.27,2.0,0.36,29.91,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,0,True,730c7207d4b538feeb3c2e6d6f6a6ba8615a9be3,True,True,2024-08-07,2024-06-25,True,False,UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter2,0
163
  πŸ’¬,vicgalle/Configurable-Yi-1.5-9B-Chat,23.77,43.23,0.43,35.33,0.55,6.12,0.06,12.42,0.34,12.02,0.43,33.5,0.4,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,8,2,True,992cb2232caae78eff6a836b2e0642f7cbf6018e,True,True,2024-06-26,2024-05-12,True,False,vicgalle/Configurable-Yi-1.5-9B-Chat,0
@@ -170,7 +172,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
170
  πŸ’¬,SeaLLMs/SeaLLMs-v3-7B-Chat,23.63,43.77,0.44,33.8,0.53,15.11,0.15,6.49,0.3,10.47,0.42,32.16,0.39,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,7,38,True,67ef6dfd0a5df7af4be7a325786105a2ba4cbaf7,True,True,2024-07-29,2024-07-03,True,False,SeaLLMs/SeaLLMs-v3-7B-Chat,0
171
  πŸ”Ά,meta-llama/Meta-Llama-3-8B-Instruct,23.56,69.03,0.69,29.08,0.5,5.74,0.06,1.12,0.26,5.5,0.38,30.92,0.38,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,mit,8,4,True,9c95ccdeceed14a3c2881bc495101a1acca1385f,True,True,2024-07-02,2024-05-25,True,False,ZhangShenao/SELM-Llama-3-8B-Instruct-iter-3,3
172
  πŸ’¬,lordjia/Qwen2-Cantonese-7B-Instruct,23.5,54.35,0.54,32.45,0.52,8.76,0.09,6.04,0.3,7.81,0.4,31.59,0.38,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,eb8b0faee749d167fd70e74f5e579094c4cfe7fb,True,True,2024-08-03,2024-07-13,True,False,lordjia/Qwen2-Cantonese-7B-Instruct,0
173
- πŸ’¬,meta-llama/Meta-Llama-3.1-8B,23.49,61.7,0.62,30.72,0.52,4.76,0.05,6.38,0.3,13.62,0.44,23.77,0.31,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,143,True,aabb745a717e133b74dcae23195d2635cf5f38cc,True,True,2024-08-28,2024-07-28,True,True,NousResearch/Hermes-3-Llama-3.1-8B,1
174
  πŸ’¬,saltlux/luxia-21.4b-alignment-v1.2,23.44,41.15,0.41,47.77,0.64,1.59,0.02,7.72,0.31,14.9,0.45,27.48,0.35,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,21,7,True,eed12b5574fa49cc81e57a88aff24c08c13721c0,True,True,2024-07-30,2024-05-27,True,False,saltlux/luxia-21.4b-alignment-v1.2,0
175
  πŸ’¬,meta-llama/Meta-Llama-3-8B-Instruct,23.43,66.87,0.67,28.06,0.48,6.57,0.07,3.02,0.27,5.31,0.38,30.77,0.38,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,8,1,True,555f4a0092f239557e1aa34f9d489e8156b907bb,True,True,2024-06-29,2024-04-26,True,False,lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top75,2
176
  πŸ’¬,meta-llama/Meta-Llama-3-8B-Instruct,23.37,66.37,0.66,27.67,0.49,8.53,0.09,3.02,0.27,4.81,0.36,29.83,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,8,2,True,5a2f17238cc83932e00613d285f8bf6b8f4a0c3a,True,True,2024-06-29,2024-04-26,True,False,lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top25,2
@@ -210,7 +212,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
210
  πŸ”Ά,WizardLMTeam/WizardLM-70B-V1.0,22.32,49.51,0.5,37.54,0.56,3.47,0.03,2.13,0.27,14.09,0.44,27.18,0.34,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama2,70,234,True,54aaecaff7d0790eb9f0ecea1cc267a94cc66949,True,True,2024-06-12,2023-08-09,False,True,WizardLMTeam/WizardLM-70B-V1.0,0
211
  🀝,johnsutor/Llama-3-8B-Instruct_breadcrumbs-density-0.1-gamma-0.01 (Merge),22.3,42.71,0.43,29.55,0.5,3.7,0.04,9.62,0.32,17.8,0.46,30.44,0.37,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,f4ebbf27d586e94c63f0a7293f565cbd947b824f,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_breadcrumbs-density-0.1-gamma-0.01,1
212
  πŸ”Ά,NousResearch/Meta-Llama-3-8B,22.29,57.63,0.58,30.51,0.51,5.97,0.06,6.26,0.3,10.06,0.42,23.31,0.31,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,8,5,True,3cb5792509966a963645be24fdbeb2e7dc6cac15,True,True,2024-07-24,2024-05-02,True,False,vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B,2
213
- πŸ’¬,mistralai/Mistral-Nemo-Base-2407,22.27,62.61,0.63,27.11,0.49,0.3,0.0,8.72,0.32,8.48,0.39,26.37,0.34,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,1006,True,4d14c1db68fe20dbf80b8eca85d39b909c5fe1d5,True,True,2024-08-29,2024-07-17,True,True,mistralai/Mistral-Nemo-Instruct-2407,1
214
  🟒,01-ai/Yi-34B,22.26,30.46,0.3,35.54,0.55,4.46,0.04,15.55,0.37,9.65,0.41,37.91,0.44,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,34,1278,True,e1e7da8c75cfd5c44522228599fd4d2990cedd1c,True,True,2024-06-12,2023-11-01,False,True,01-ai/Yi-34B,0
215
  🀝,johnsutor/Llama-3-8B-Instruct_breadcrumbs-density-0.5-gamma-0.1 (Merge),22.18,43.96,0.44,30.85,0.51,6.87,0.07,7.61,0.31,13.84,0.44,29.96,0.37,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,a481edaceeaab34f4dc0e90c4d8ec0f72658bbdd,True,True,2024-06-26,2024-06-08,False,False,johnsutor/Llama-3-8B-Instruct_breadcrumbs-density-0.5-gamma-0.1,1
216
  πŸ”Ά,meta-llama/Meta-Llama-3.1-8B,22.14,64.05,0.64,24.8,0.47,10.8,0.11,4.7,0.29,2.29,0.36,26.22,0.34,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama3.1,8,5,True,332c99d80f378c77b090745a5aac10f8ab339519,True,True,2024-08-14,2024-08-11,True,False,ValiantLabs/Llama3.1-8B-Enigma,2
@@ -223,7 +225,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
223
  🀝,waqasali1707/Beast-Soul-new (Merge),22.01,50.3,0.5,33.04,0.52,6.42,0.06,4.36,0.28,14.5,0.45,23.42,0.31,🀝 base merges and moerges,MistralForCausalLM,Original,bfloat16,True,,7,0,False,a23d68c4556d91a129de3f8fd8b9e0ff0890f4cc,True,True,2024-08-07,2024-08-07,False,False,waqasali1707/Beast-Soul-new,1
224
  πŸ”Ά,chujiezheng/Llama-3-Instruct-8B-SimPO-ExPO,21.97,64.34,0.64,25.87,0.48,0.53,0.01,4.92,0.29,9.5,0.39,26.68,0.34,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,14,True,3fcaa9fe99691659eb197487e9a343f601bf63f2,True,True,2024-06-26,2024-05-26,True,False,chujiezheng/Llama-3-Instruct-8B-SimPO-ExPO,0
225
  πŸ”Ά,VAGOsolutions/SauerkrautLM-7b-LaserChat,21.97,59.88,0.6,22.99,0.45,6.72,0.07,6.71,0.3,9.92,0.41,25.61,0.33,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,10,True,cb759636a3d5b0768df2f43a3d3da9b17e10e7b9,True,True,2024-06-26,2024-02-05,True,False,VAGOsolutions/SauerkrautLM-7b-LaserChat,0
226
- 🟒,01-ai/Yi-1.5-9B,21.95,29.36,0.29,30.5,0.51,10.2,0.1,17.23,0.38,12.03,0.43,32.4,0.39,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,42,True,8cfde9604384c50137bee480b8cef8a08e5ae81d,True,True,2024-06-12,2024-05-11,False,True,01-ai/Yi-1.5-9B,0
227
  πŸ”Ά,4season/final_model_test_v2,21.92,31.91,0.32,47.41,0.63,1.36,0.01,10.29,0.33,12.43,0.43,28.09,0.35,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,21,0,True,cf690c35d9cf0b0b6bf034fa16dbf88c56fe861c,True,True,2024-06-27,2024-05-20,False,False,4season/final_model_test_v2,0
228
  🀝,FallenMerick/Chewy-Lemon-Cookie-11B (Merge),21.91,48.75,0.49,33.01,0.53,4.61,0.05,3.91,0.28,15.95,0.45,25.19,0.33,🀝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,cc-by-4.0,10,0,True,0f5d0d6d218b3ef034f58eba32d6fe7ac4c237ae,True,True,2024-06-27,2024-06-06,False,False,FallenMerick/Chewy-Lemon-Cookie-11B,1
229
  πŸ’¬,OpenBuddy/openbuddy-llama3-8b-v21.2-32k,21.84,61.92,0.62,27.25,0.49,6.5,0.06,3.91,0.28,5.93,0.38,25.54,0.33,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,other,8,0,True,f3ea2dec2533a3dd97df32db2376b17875cafda2,True,True,2024-06-26,2024-06-18,True,False,OpenBuddy/openbuddy-llama3-8b-v21.2-32k,0
@@ -247,7 +249,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
247
  🀝,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.7-gamma-0.1 (Merge),21.46,41.99,0.42,31.01,0.51,7.1,0.07,6.49,0.3,13.14,0.44,29.06,0.36,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,cd52bafe64e82d466d0bc590da5399f2299d24e1,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.7-gamma-0.1,1
248
  πŸ”Ά,flammenai/flammen15-gutenberg-DPO-v1-7B (Merge),21.46,47.98,0.48,32.67,0.52,6.72,0.07,4.59,0.28,12.53,0.43,24.29,0.32,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,0,True,550cd9548cba1265cb1771c85ebe498789fdecb5,True,True,2024-07-10,2024-04-05,False,False,flammenai/flammen15-gutenberg-DPO-v1-7B,1
249
  πŸ”Ά,Intel/neural-chat-7b-v3-2,21.43,49.88,0.5,30.24,0.5,4.53,0.05,5.37,0.29,20.06,0.49,18.52,0.27,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,apache-2.0,7,56,True,0d8f77647810d21d935ea90c66d6339b85e65a75,True,True,2024-06-12,2023-11-21,False,True,Intel/neural-chat-7b-v3-2,0
250
- πŸ’¬,LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct,21.4,71.93,0.72,17.98,0.42,4.46,0.04,2.13,0.27,3.3,0.37,28.63,0.36,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",ExaoneForCausalLM,Original,bfloat16,True,other,7,329,True,7f15baedd46858153d817445aff032f4d6cf4939,True,True,2024-08-18,2024-07-31,True,False,LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct,0
251
  πŸ’¬,Columbia-NLP/LION-LLaMA-3-8b-dpo-v1.0,21.34,49.57,0.5,30.36,0.5,9.06,0.09,4.14,0.28,10.28,0.41,24.65,0.32,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,2,False,3cddd4a6f5939a0a4db1092a0275342b7b9912f3,True,True,2024-07-04,2024-06-28,True,False,Columbia-NLP/LION-LLaMA-3-8b-dpo-v1.0,0
252
  πŸ’¬,mistralai/Mistral-7B-v0.1,21.33,52.86,0.53,29.25,0.49,3.85,0.04,4.47,0.28,16.06,0.45,21.46,0.29,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,254,True,4c6e34123b140ce773a8433cae5410949289102c,True,True,2024-06-12,2023-10-12,True,True,teknium/OpenHermes-2-Mistral-7B,1
253
  πŸ’¬,vicgalle/CarbonBeagle-11B-truthy,21.29,52.12,0.52,33.99,0.53,4.76,0.05,6.6,0.3,4.11,0.37,26.19,0.34,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,float16,True,apache-2.0,10,9,True,476cd2a6d938bddb38dfbeb4cb21e3e34303413d,True,True,2024-07-13,2024-02-10,True,False,vicgalle/CarbonBeagle-11B-truthy,0
@@ -279,7 +281,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
279
  πŸ’¬,Qwen/Qwen2-7B,20.96,35.35,0.35,27.91,0.49,11.56,0.12,5.37,0.29,11.66,0.42,33.9,0.41,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,59,True,c443c4eb5138ed746ac49ed98bf3c183dc5380ac,True,True,2024-07-10,2024-05-24,True,True,cognitivecomputations/dolphin-2.9.2-qwen2-7b,1
280
  🀝,allknowingroger/MultiMash8-13B-slerp (Merge),20.95,43.21,0.43,32.27,0.52,6.95,0.07,5.15,0.29,14.5,0.44,23.62,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,apache-2.0,12,0,True,5590ccd99f74301951f450f9d0271a99e97728c8,True,True,2024-06-26,2024-05-26,False,False,allknowingroger/MultiMash8-13B-slerp,1
281
  πŸ”Ά,maldv/badger-writer-llama-3-8b (Merge),20.93,53.03,0.53,26.88,0.49,6.57,0.07,5.26,0.29,3.2,0.36,30.67,0.38,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,False,cc-by-nc-4.0,8,7,True,1d8134d01af87e994571ae16ccd7b31cce42418f,True,True,2024-06-26,2024-06-17,True,False,maldv/badger-writer-llama-3-8b,1
282
- 🟒,google/gemma-2-9b,20.93,20.4,0.2,34.1,0.54,11.78,0.12,10.51,0.33,14.3,0.45,34.48,0.41,🟒 pretrained,Gemma2ForCausalLM,Original,bfloat16,True,gemma,9,543,True,beb0c08e9eeb0548f3aca2ac870792825c357b7d,True,True,2024-07-11,2024-06-24,False,True,google/gemma-2-9b,0
283
  🀝,icefog72/IceCocoaRP-7b (Merge),20.87,49.62,0.5,29.64,0.49,5.44,0.05,6.04,0.3,11.17,0.42,23.32,0.31,🀝 base merges and moerges,MistralForCausalLM,Original,float16,False,cc-by-nc-4.0,7,3,True,001beaf88932f7e010af21bbdeff0079bda73b1d,True,True,2024-06-26,2024-06-07,False,False,icefog72/IceCocoaRP-7b,1
284
  πŸ”Ά,fblgit/juanako-7b-UNA,20.77,48.37,0.48,30.42,0.51,2.87,0.03,6.15,0.3,17.16,0.46,19.68,0.28,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,23,True,b8ac85b603d5ee1ac619b2e1d0b3bb86c4eecb0c,True,True,2024-06-30,2023-11-27,False,False,fblgit/juanako-7b-UNA,0
285
  🀝,maldv/badger-lambda-llama-3-8b,20.76,48.61,0.49,28.1,0.5,8.31,0.08,4.25,0.28,4.52,0.38,30.74,0.38,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,8,9,True,8ef157d0d3c12212ca5e70d354869aed90e03f22,True,True,2024-06-26,2024-06-10,True,False,maldv/badger-lambda-llama-3-8b,0
@@ -298,7 +300,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
298
  🀝,allknowingroger/MixTAO-19B-pass (Merge),20.54,38.14,0.38,31.58,0.51,5.59,0.06,4.59,0.28,19.95,0.48,23.39,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,apache-2.0,19,1,True,a41369cfcfbada9d5387051ba616bf1432b31d31,True,True,2024-06-26,2024-06-02,False,False,allknowingroger/MixTAO-19B-pass,1
299
  🀝,allknowingroger/MultiMash9-13B-slerp (Merge),20.53,41.88,0.42,32.55,0.52,7.18,0.07,4.03,0.28,14.21,0.44,23.33,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,apache-2.0,12,0,True,56dac45f387669baa04a8997ebb9ea63c65fbbd1,True,True,2024-06-26,2024-05-26,False,False,allknowingroger/MultiMash9-13B-slerp,1
300
  🀝,shadowml/BeagSake-7B (Merge),20.5,40.19,0.4,32.53,0.52,6.27,0.06,4.03,0.28,16.38,0.46,23.61,0.31,🀝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,cc-by-nc-4.0,7,1,True,b7a3b25a188a4608fd05fc4247ddd504c1f529d1,True,True,2024-06-29,2024-01-31,False,False,shadowml/BeagSake-7B,1
301
- πŸ’¬,meta-llama/Meta-Llama-3-8B-Instruct,20.48,47.82,0.48,26.8,0.49,8.38,0.08,5.7,0.29,5.4,0.38,28.79,0.36,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,llama3,8,3360,True,e1945c40cd546c78e41f1151f4db032b271faeaa,True,True,2024-07-08,2024-04-17,False,True,meta-llama/Meta-Llama-3-8B-Instruct,0
302
  πŸ”Ά,SanjiWatsuki/Kunoichi-DPO-v2-7B,20.41,54.31,0.54,20.9,0.44,6.57,0.07,6.15,0.3,11.09,0.42,23.41,0.31,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,cc-by-nc-4.0,7,78,True,5278247beb482c4fceff2294570236d68b74d132,True,True,2024-06-28,2024-01-13,True,False,SanjiWatsuki/Kunoichi-DPO-v2-7B,0
303
  🀝,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.3-gamma-0.01 (Merge),20.39,35.18,0.35,29.14,0.5,1.13,0.01,7.49,0.31,20.35,0.49,29.01,0.36,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,fa77530fe3723d7b15b06b88c3ca6110a8421742,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.3-gamma-0.01,1
304
  🀝,allknowingroger/Multimash3-12B-slerp (Merge),20.38,44.37,0.44,32.15,0.52,5.74,0.06,4.03,0.28,13.03,0.43,22.97,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,apache-2.0,12,0,True,0b90bf0b5230d02b4ba63879fc3bf0b85d46c3ce,True,True,2024-06-26,2024-05-21,False,False,allknowingroger/Multimash3-12B-slerp,1
@@ -324,7 +326,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
324
  🀝,allknowingroger/MixTaoTruthful-13B-slerp (Merge),20.13,41.39,0.41,32.71,0.52,5.89,0.06,4.59,0.28,12.86,0.43,23.33,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,apache-2.0,12,0,True,3324d37e138c6bf0d6891e54b6dd839c8d2f35ec,True,True,2024-06-26,2024-05-25,False,False,allknowingroger/MixTaoTruthful-13B-slerp,1
325
  🀝,johnsutor/Llama-3-8B-Instruct_breadcrumbs-density-0.3-gamma-0.01 (Merge),20.1,33.77,0.34,28.14,0.49,0.0,0.0,8.28,0.31,22.29,0.5,28.15,0.35,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,4a432be239528ffc654955338982f1f32eb12901,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_breadcrumbs-density-0.3-gamma-0.01,1
326
  πŸ’¬,cat-searcher/gemma-2-9b-it-sppo-iter-0,20.1,29.42,0.29,41.1,0.59,0.0,0.0,12.08,0.34,6.9,0.39,31.11,0.38,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Gemma2ForCausalLM,Original,bfloat16,True,,9,0,False,c2d7b76786151aecfa5972a2a3e937feb2d2c48b,True,True,2024-08-09,2024-08-05,True,False,cat-searcher/gemma-2-9b-it-sppo-iter-1-evol-1,2
327
- πŸ’¬,MLP-KTLim/llama-3-Korean-Bllossom-8B (Merge),20.09,51.13,0.51,26.93,0.49,8.38,0.08,1.68,0.26,3.63,0.37,28.82,0.36,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,242,True,8a738f9f622ffc2b0a4a6b81dabbca80406248bf,True,True,2024-07-09,2024-04-25,True,False,MLP-KTLim/llama-3-Korean-Bllossom-8B,1
328
  🀝,allknowingroger/MultiMash-12B-slerp (Merge),20.08,39.74,0.4,31.93,0.51,7.48,0.07,3.58,0.28,14.77,0.44,22.97,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,apache-2.0,12,0,True,91a6d0fe6b9271000ca713ee9ab414c782ba4c50,True,True,2024-06-26,2024-05-20,False,False,allknowingroger/MultiMash-12B-slerp,1
329
  🀝,MaziyarPanahi/Calme-4x7B-MoE-v0.2,20.06,42.94,0.43,31.4,0.51,6.72,0.07,3.91,0.28,12.54,0.43,22.86,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,True,apache-2.0,24,2,True,ffef41baf94b3f88b30cf0aeb3fd72d9e4187161,True,False,2024-08-05,2024-03-17,False,False,MaziyarPanahi/Calme-4x7B-MoE-v0.2,0
330
  🀝,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.5-gamma-0.01 (Merge),20.06,34.54,0.35,29.32,0.5,1.06,0.01,6.26,0.3,21.06,0.49,28.13,0.35,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,a31f86b538ba8b2983620cc27a741bc9a81a7e2f,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.5-gamma-0.01,1
@@ -447,7 +449,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
447
  πŸ”Ά,mistralai/Mistral-7B-v0.3,17.1,39.46,0.39,24.12,0.46,3.32,0.03,2.8,0.27,10.28,0.41,22.6,0.3,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,2,True,404de3b56564dbd43cd64d97f8574b43189462f3,True,True,2024-07-20,2024-07-09,True,False,migtissera/Tess-3-7B-SFT,1
448
  πŸ”Ά,fblgit/una-cybertron-7b-v2-bf16,17.09,47.37,0.47,14.97,0.4,3.32,0.03,6.38,0.3,14.48,0.45,16.03,0.24,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,116,True,7ab101a153740aec39e95ec02831c56f4eab7910,True,True,2024-06-30,2023-12-02,True,False,fblgit/una-cybertron-7b-v2-bf16,0
449
  πŸ”Ά,zhengr/MixTAO-7Bx2-MoE-v8.1,17.06,41.88,0.42,19.18,0.42,5.97,0.06,6.49,0.3,8.3,0.4,20.52,0.28,πŸ”Ά fine-tuned on domain-specific datasets,MixtralForCausalLM,Original,bfloat16,True,apache-2.0,12,52,True,828e963abf2db0f5af9ed0d4034e538fc1cf5f40,True,False,2024-06-27,2024-02-26,True,False,zhengr/MixTAO-7Bx2-MoE-v8.1,0
450
- πŸ”Ά,google/gemma-2-2b,17.05,56.68,0.57,17.98,0.42,0.08,0.0,3.24,0.27,7.08,0.39,17.22,0.25,πŸ”Ά fine-tuned on domain-specific datasets,InternLM2ForCausalLM,Original,bfloat16,True,gemma,2,487,True,2b6ac3ff954ad896c115bbfa1b571cd93ea2c20f,True,True,2024-07-31,2024-07-16,True,True,google/gemma-2-2b-it,1
451
  πŸ”Ά,Salesforce/LLaMA-3-8B-SFR-Iterative-DPO-R,17.03,38.16,0.38,29.15,0.5,0.15,0.0,5.03,0.29,5.55,0.36,24.14,0.32,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,73,True,ad7d1aed82eb6d8ca4b3aad627ff76f72ab34f70,True,True,2024-07-02,2024-05-09,True,True,Salesforce/LLaMA-3-8B-SFR-Iterative-DPO-R,0
452
  πŸ’¬,UCLA-AGI/Mistral7B-PairRM-SPPO-Iter2,17.0,44.46,0.44,22.48,0.45,1.51,0.02,5.15,0.29,9.8,0.41,18.63,0.27,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,8201064df67b5762ff9f361ff1b98aae3747855c,True,True,2024-08-07,2024-05-04,True,False,UCLA-AGI/Mistral7B-PairRM-SPPO-Iter2,0
453
  πŸ’¬,meta-llama/Meta-Llama-3-8B,16.89,44.97,0.45,24.31,0.46,2.57,0.03,2.01,0.27,3.74,0.34,23.71,0.31,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,3,True,7e420ddd6ff48bf213dcab2a9ddb7845b80dd1aa,True,True,2024-08-06,2024-07-15,True,False,Magpie-Align/Llama-3-8B-Magpie-Align-v0.3,2
@@ -498,7 +500,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
498
  🟒,mistralai/Mistral-Nemo-Base-2407,15.08,16.3,0.16,29.37,0.5,4.98,0.05,5.82,0.29,6.52,0.39,27.46,0.35,🟒 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,11,236,True,d2efb15544d5401f761235bef327babb850887d0,True,True,2024-07-19,2024-07-18,False,True,mistralai/Mistral-Nemo-Base-2407,0
499
  πŸ”Ά,Changgil/K2S3-14b-v0.2,15.07,32.43,0.32,24.28,0.46,4.53,0.05,4.14,0.28,6.8,0.39,18.26,0.26,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,14,0,True,b4f0e1eed2640df2b75847ff37e6ebb1be217b6c,True,True,2024-06-27,2024-06-17,False,False,Changgil/K2S3-14b-v0.2,0
500
  🟩,NousResearch/Yarn-Solar-10b-64k,15.06,19.89,0.2,28.4,0.49,2.27,0.02,6.94,0.3,9.01,0.4,23.87,0.31,🟩 continuously pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,10,15,True,703818628a5e8ef637e48e8dbeb3662aa0497aff,True,True,2024-06-12,2024-01-17,False,True,NousResearch/Yarn-Solar-10b-64k,0
501
- 🟒,tiiuae/falcon-mamba-7b,15.04,33.36,0.33,19.88,0.43,3.63,0.04,8.05,0.31,10.86,0.42,14.47,0.23,🟒 pretrained,FalconMambaForCausalLM,Original,bfloat16,True,other,7,174,True,5337fd73f19847e111ba2291f3f0e1617b90c37d,True,True,2024-07-23,2024-07-17,False,True,tiiuae/falcon-mamba-7b,0
502
  πŸ”Ά,pankajmathur/orca_mini_v3_13b,15.0,28.97,0.29,25.55,0.47,1.89,0.02,2.01,0.27,17.11,0.46,14.5,0.23,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,13,32,True,7d6e567d24ce2f228beaf54e89c17b0e750bfe99,True,True,2024-06-26,2023-08-09,False,False,pankajmathur/orca_mini_v3_13b,0
503
  🟒,Deci/DeciLM-7B,14.95,28.13,0.28,21.25,0.44,2.42,0.02,6.04,0.3,13.05,0.44,18.8,0.27,🟒 pretrained,DeciLMForCausalLM,Original,bfloat16,True,apache-2.0,7,222,True,c3c9f4226801dc0433f32aebffe0aac68ee2f051,True,True,2024-06-12,2023-12-10,False,True,Deci/DeciLM-7B,0
504
  πŸ’¬,meta-llama/Meta-Llama-3-8B,14.87,36.53,0.37,21.95,0.44,3.85,0.04,3.91,0.28,4.01,0.36,18.95,0.27,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,other,8,53,True,7f200e4c84ad0daa3ff6bc414012d8d0bacbf90e,True,True,2024-06-12,2024-04-18,True,True,mlabonne/OrpoLlama-3-8B,1
@@ -520,8 +522,8 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
520
  πŸ”Ά,microsoft/Orca-2-7b,14.22,21.83,0.22,22.43,0.45,0.83,0.01,1.45,0.26,24.09,0.5,14.65,0.23,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,7,213,True,60e31e6bdcf582ad103b807cb74b73ee1d2c4b17,True,True,2024-06-12,2023-11-14,False,True,microsoft/Orca-2-7b,0
521
  πŸ”Ά,TencentARC/Mistral_Pro_8B_v0.1,14.2,21.15,0.21,22.89,0.45,5.66,0.06,4.03,0.28,11.83,0.42,19.61,0.28,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,8,66,True,366f159fc5b314ba2a955209d2bca4600f84dac0,True,True,2024-06-12,2024-02-22,False,True,TencentARC/Mistral_Pro_8B_v0.1,0
522
  🟒,tklohj/WindyFloLLM (Merge),14.17,26.69,0.27,24.4,0.46,1.13,0.01,3.36,0.28,11.86,0.43,17.57,0.26,🟒 pretrained,LlamaForCausalLM,Original,float16,True,,13,0,False,21f4241ab3f091d1d309e9076a8d8e3f014908a8,True,True,2024-07-10,2024-06-30,False,False,tklohj/WindyFloLLM,1
523
- 🟒,mistralai/Mistral-7B-v0.3,14.15,22.66,0.23,23.95,0.45,2.64,0.03,5.59,0.29,8.36,0.4,21.7,0.3,🟒 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,349,True,b67d6a03ca097c5122fa65904fce0413500bf8c8,True,True,2024-06-12,2024-05-22,False,True,mistralai/Mistral-7B-v0.3,0
524
  🟒,mistral-community/Mistral-7B-v0.2,14.15,22.66,0.23,23.95,0.45,2.64,0.03,5.59,0.29,8.36,0.4,21.7,0.3,🟒 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,230,True,2c3e624962b1a3f3fbf52e15969565caa7bc064a,True,True,2024-06-12,2024-03-23,False,True,mistral-community/Mistral-7B-v0.2,0
 
525
  🟒,awnr/Mistral-7B-v0.1-signtensors-7-over-16,14.15,22.94,0.23,21.04,0.43,3.25,0.03,7.16,0.3,7.93,0.4,22.56,0.3,🟒 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,0e1f2cb0a81c38fc6c567d9c007883ab62fae266,True,True,2024-07-29,2024-07-29,False,False,awnr/Mistral-7B-v0.1-signtensors-7-over-16,0
526
  πŸ”Ά,netcat420/MFANNv0.19,14.14,30.57,0.31,24.92,0.47,2.64,0.03,7.61,0.31,2.72,0.35,16.36,0.25,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama3.1,8,0,True,af26a25549b7ad291766c479bebda58f15fbff42,True,True,2024-07-27,2024-07-27,False,False,netcat420/MFANNv0.19,0
527
  🀝,johnsutor/Llama-3-8B-Instruct_dare_linear (Merge),14.12,21.45,0.21,19.61,0.43,0.0,0.0,6.15,0.3,21.81,0.5,15.72,0.24,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,abb81fd8fdc2ad32f65befcb7ae369c9837cd563,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_dare_linear,1
@@ -534,7 +536,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
534
  πŸ”Ά,xinchen9/llama3-b8-ft-dis,13.85,15.46,0.15,24.73,0.46,3.17,0.03,8.39,0.31,6.41,0.37,24.93,0.32,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,8,0,True,e4da730f28f79543262de37908943c35f8df81fe,True,True,2024-07-11,2024-06-28,False,False,xinchen9/llama3-b8-ft-dis,0
535
  πŸ”Ά,openchat/openchat_v3.2,13.81,29.81,0.3,20.32,0.43,1.13,0.01,2.68,0.27,13.1,0.43,15.8,0.24,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama2,13,42,True,acc7ce92558681e749678648189812f15c1465fe,True,True,2024-06-12,2023-07-30,False,True,openchat/openchat_v3.2,0
536
  πŸ’¬,yam-peleg/Hebrew-Gemma-11B-Instruct,13.81,30.21,0.3,16.86,0.4,5.06,0.05,3.47,0.28,9.97,0.41,17.27,0.26,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",GemmaForCausalLM,Original,float16,True,other,10,21,True,a40259d1efbcac4829ed44d3b589716f615ed362,True,True,2024-07-31,2024-03-06,True,False,yam-peleg/Hebrew-Gemma-11B-Instruct,0
537
- 🟒,meta-llama/Meta-Llama-3.1-8B,13.78,12.7,0.13,25.29,0.47,4.61,0.05,6.15,0.3,8.98,0.38,24.95,0.32,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,676,True,e5c39e551424c763dbc3e58e32ef2999d33a6d8d,True,True,2024-07-23,2024-07-14,True,True,meta-llama/Meta-Llama-3.1-8B,0
538
  πŸ”Ά,meta-llama/Meta-Llama-3-8B,13.78,24.01,0.24,18.39,0.42,0.45,0.0,2.13,0.27,19.94,0.48,17.75,0.26,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,9,True,4e1c955228bdb4d69c1c4560e8d5872312a8f033,True,True,2024-06-27,2024-06-01,True,False,AI-Sweden-Models/Llama-3-8B-instruct,2
539
  🟒,tiiuae/falcon-11B,13.78,32.61,0.33,21.94,0.44,2.34,0.02,2.8,0.27,7.53,0.4,15.44,0.24,🟒 pretrained,FalconForCausalLM,Original,bfloat16,True,unknown,11,204,True,066e3bf4e2d9aaeefa129af0a6d39727d27816b3,True,True,2024-06-09,2024-05-09,False,True,tiiuae/falcon-11B,0
540
  🟒,awnr/Mistral-7B-v0.1-signtensors-3-over-8,13.73,23.94,0.24,20.44,0.43,2.79,0.03,7.16,0.3,5.79,0.38,22.24,0.3,🟒 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,fa368f705ace05da2fef25c030fe740cf1fef176,True,True,2024-07-29,2024-07-29,False,False,awnr/Mistral-7B-v0.1-signtensors-3-over-8,0
@@ -546,8 +548,8 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
546
  🟒,google/flan-t5-xxl,13.49,22.0,0.22,30.12,0.51,0.0,0.0,2.68,0.27,11.19,0.42,14.92,0.23,🟒 pretrained,T5ForConditionalGeneration,Original,float16,True,apache-2.0,11,1161,True,ae7c9136adc7555eeccc78cdd960dfd60fb346ce,True,True,2024-08-12,2022-10-21,False,True,google/flan-t5-xxl,0
547
  πŸ”Ά,LeroyDyer/Mixtral_AI_CyberTron_Ultra,13.47,15.56,0.16,27.75,0.48,0.76,0.01,5.7,0.29,10.3,0.41,20.73,0.29,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,apache-2.0,7,4,True,50c69e539578ab5384eb018a60cc1268637becae,True,True,2024-07-12,2024-04-14,False,False,LeroyDyer/SpydazWeb_AI_CyberTron_Ultra_7b,1
548
  🟩,NousResearch/Yarn-Mistral-7b-64k,13.43,20.8,0.21,20.23,0.43,3.02,0.03,5.37,0.29,9.88,0.41,21.27,0.29,🟩 continuously pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,49,True,0273c624561fcecc8e8f4030492a9307aa60f945,True,True,2024-06-12,2023-10-31,False,True,NousResearch/Yarn-Mistral-7b-64k,0
549
- 🟒,meta-llama/Meta-Llama-3-8B,13.41,14.55,0.15,24.5,0.46,3.25,0.03,7.38,0.31,6.24,0.36,24.55,0.32,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3,8,5568,True,62bd457b6fe961a42a631306577e622c83876cb6,True,True,2024-06-12,2024-04-17,False,True,meta-llama/Meta-Llama-3-8B,0
550
- πŸ”Ά,Alibaba-NLP/gte-Qwen2-7B-instruct,13.34,22.55,0.23,21.93,0.45,3.47,0.03,0.0,0.24,6.32,0.36,25.79,0.33,πŸ”Ά fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,148,True,e26182b2122f4435e8b3ebecbf363990f409b45b,True,True,2024-08-05,2024-06-15,True,False,Alibaba-NLP/gte-Qwen2-7B-instruct,0
551
  πŸ”Ά,yam-peleg/Hebrew-Mistral-7B,13.23,23.28,0.23,20.18,0.43,4.53,0.05,3.91,0.28,7.67,0.4,19.78,0.28,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,59,True,3d32134b5959492fd7efbbf16395352594bc89f7,True,True,2024-07-11,2024-04-26,False,False,yam-peleg/Hebrew-Mistral-7B,0
552
  πŸ”Ά,kevin009/llamaRAGdrama,13.2,25.98,0.26,16.64,0.4,3.4,0.03,1.9,0.26,12.11,0.43,19.15,0.27,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,7,True,8c103ca8fa6dd9a8d3dab81b319408095e9a1ad8,True,True,2024-06-26,2024-02-04,True,False,kevin009/llamaRAGdrama,0
553
  🟩,NousResearch/Yarn-Mistral-7b-128k,13.16,19.34,0.19,20.63,0.43,2.49,0.02,6.49,0.3,8.95,0.41,21.03,0.29,🟩 continuously pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,569,True,d09f1f8ed437d61c1aff94c1beabee554843dcdd,True,True,2024-06-12,2023-10-31,False,True,NousResearch/Yarn-Mistral-7b-128k,0
@@ -557,7 +559,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
557
  πŸ”Ά,openchat/openchat_v3.2_super,12.84,28.62,0.29,19.15,0.42,1.59,0.02,1.9,0.26,9.92,0.42,15.84,0.24,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama2,13,36,True,9479cc37d43234a57a33628637d1aca0293d745a,True,True,2024-06-12,2023-09-04,False,True,openchat/openchat_v3.2_super,0
558
  πŸ’¬,google/gemma-7b,12.83,38.68,0.39,11.88,0.36,1.59,0.02,4.59,0.28,12.53,0.43,7.72,0.17,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",GemmaForCausalLM,Original,bfloat16,True,gemma,8,1120,True,18329f019fb74ca4b24f97371785268543d687d2,True,True,2024-06-12,2024-02-13,True,True,google/gemma-7b-it,1
559
  πŸ’¬,meta-llama/Llama-2-70b-chat-hf,12.73,49.58,0.5,4.61,0.3,0.91,0.01,1.9,0.26,3.48,0.37,15.92,0.24,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,llama2,68,2129,True,e9149a12809580e8602995856f8098ce973d1080,True,True,2024-06-12,2023-07-14,True,True,meta-llama/Llama-2-70b-chat-hf,0
560
- πŸ’¬,mistralai/Mistral-7B-v0.1,12.67,44.87,0.45,7.65,0.34,1.66,0.02,0.0,0.25,6.13,0.38,15.72,0.24,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1493,True,73068f3702d050a2fd5aa2ca1e612e5036429398,True,True,2024-06-27,2023-09-27,True,True,mistralai/Mistral-7B-Instruct-v0.1,1
561
  πŸ”Ά,Sao10K/L3-8B-Stheno-v3.3-32K,12.57,46.04,0.46,13.51,0.38,0.98,0.01,0.89,0.26,4.07,0.37,9.95,0.19,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,8,47,True,1a59d163e079c7e7f1542553d085853119960f0c,True,True,2024-06-26,2024-06-22,True,False,Sao10K/L3-8B-Stheno-v3.3-32K,0
562
  πŸ”Ά,netcat420/MFANN3bv0.18,12.55,22.06,0.22,23.07,0.45,1.89,0.02,1.01,0.26,10.6,0.4,16.67,0.25,πŸ”Ά fine-tuned on domain-specific datasets,PhiForCausalLM,Original,float16,True,mit,2,0,True,3e792e3413217b63ea9caa0e8b8595fbeb236a69,True,True,2024-07-25,2024-07-25,False,False,netcat420/MFANN3bv0.18,0
563
  πŸ”Ά,netcat420/MFANN3bv0.19,12.45,22.58,0.23,22.91,0.45,1.44,0.01,1.01,0.26,9.9,0.4,16.89,0.25,πŸ”Ά fine-tuned on domain-specific datasets,PhiForCausalLM,Original,float16,True,,2,0,False,073d42274686f5cb6ef6ff9f6ade24eab198e1f2,True,True,2024-08-08,2024-08-04,False,False,netcat420/MFANN3bv0.19,0
@@ -572,7 +574,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
572
  🟒,awnr/Mistral-7B-v0.1-signtensors-5-over-16,12.16,21.18,0.21,17.54,0.41,2.19,0.02,4.14,0.28,6.14,0.37,21.75,0.3,🟒 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,5ea13b3d0723237889e1512bc70dae72f71884d1,True,True,2024-07-29,2024-07-29,False,False,awnr/Mistral-7B-v0.1-signtensors-5-over-16,0
573
  πŸ”Ά,NousResearch/Llama-2-13b-hf,12.12,26.68,0.27,18.21,0.42,0.83,0.01,3.02,0.27,8.53,0.4,15.44,0.24,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,mit,13,53,True,bcad6fff9f8591e091d2d57356a3f102197e8c5f,True,True,2024-06-12,2023-09-06,False,True,teknium/OpenHermes-13B,1
574
  πŸ’¬,internlm/internlm2_5-1_8b-chat,12.11,38.49,0.38,21.03,0.45,0.0,0.0,5.37,0.29,4.42,0.36,3.32,0.13,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",InternLM2ForCausalLM,Original,bfloat16,True,other,1,19,True,4426f00b854561fa60d555d2b628064b56bcb758,True,True,2024-08-07,2024-07-30,True,True,internlm/internlm2_5-1_8b-chat,0
575
- πŸ’¬,unsloth/mistral-7b-v0.3-bnb-4bit,12.08,37.7,0.38,14.86,0.4,0.53,0.01,2.24,0.27,2.97,0.36,14.2,0.23,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,868d8a51e8deb6fd948eabe5bc296c53bcf41073,True,True,2024-08-08,2024-08-04,True,False,llmat/Mistral-v0.3-7B-ORPO,1
576
  πŸ’¬,unsloth/mistral-7b-v0.3-bnb-4bit,12.02,36.4,0.36,15.59,0.4,0.15,0.0,2.57,0.27,2.97,0.35,14.46,0.23,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,float16,True,apache-2.0,7,1,True,868d8a51e8deb6fd948eabe5bc296c53bcf41073,True,True,2024-08-06,2024-08-04,True,False,llmat/Mistral-v0.3-7B-ORPO,1
577
  πŸ”Ά,TencentARC/MetaMath-Mistral-Pro,12.01,21.19,0.21,22.37,0.44,4.61,0.05,2.57,0.27,4.99,0.35,16.35,0.25,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,8,5,True,3835d38de15ed2a04c32aca879b782fc50e390bf,True,True,2024-06-12,2024-02-26,False,True,TencentARC/MetaMath-Mistral-Pro,0
578
  🟒,01-ai/Yi-6B-200K,11.9,8.43,0.08,20.15,0.43,1.21,0.01,4.25,0.28,16.84,0.46,20.49,0.28,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,6,173,True,4a74338e778a599f313e9fa8f5bc08c717604420,True,True,2024-06-12,2023-11-06,False,True,01-ai/Yi-6B-200K,0
@@ -619,15 +621,15 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
619
  πŸ’¬,google/gemma-2-2b,9.71,8.96,0.09,17.37,0.41,4.15,0.04,4.59,0.28,10.91,0.42,12.28,0.21,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Gemma2ForCausalLM,Original,bfloat16,True,gemma,2,17,True,5c0854beb88a6711221771d1b13d51f733e6ca06,True,True,2024-08-25,2024-08-24,True,True,cognitivecomputations/dolphin-2.9.4-gemma2-2b,1
620
  πŸ”Ά,uukuguy/speechless-coder-ds-6.7b,9.64,25.05,0.25,15.9,0.4,1.66,0.02,1.9,0.26,5.34,0.38,7.99,0.17,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,6,5,True,c813a5268c6dfe267a720ad3b51773f1ab0feb59,True,True,2024-06-26,2023-12-30,False,False,uukuguy/speechless-coder-ds-6.7b,0
621
  πŸ”Ά,NousResearch/Llama-2-7b-hf,9.48,18.13,0.18,12.08,0.36,1.06,0.01,2.57,0.27,12.68,0.43,10.37,0.19,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,mit,7,13,True,9f55d6eb15f1edd52ee1fd863a220aa682e78a00,True,True,2024-06-12,2023-09-14,False,True,teknium/OpenHermes-7B,1
622
- 🟒,google/flan-t5-large,9.42,22.01,0.22,17.51,0.42,0.0,0.0,0.11,0.25,9.01,0.41,7.88,0.17,🟒 pretrained,T5ForConditionalGeneration,Original,float16,True,apache-2.0,0,525,True,0613663d0d48ea86ba8cb3d7a44f0f65dc596a2a,True,True,2024-08-14,2022-10-21,False,True,google/flan-t5-large,0
623
- 🟒,meta-llama/Llama-2-7b-chat-hf,9.4,39.65,0.4,4.49,0.31,0.68,0.01,0.56,0.25,3.48,0.37,7.52,0.17,🟒 pretrained,LlamaForCausalLM,Original,float16,True,llama2,6,3802,True,f5db02db724555f92da89c216ac04704f23d4590,True,True,2024-08-30,2023-07-13,True,True,meta-llama/Llama-2-7b-chat-hf,0
624
  πŸ”Ά,iRyanBell/ARC1-II,9.32,17.08,0.17,7.25,0.34,0.76,0.01,2.91,0.27,20.31,0.49,7.62,0.17,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,1,True,c81076b9bdaac0722b33e411a49b07a296e8fae8,True,True,2024-06-26,2024-06-12,False,False,iRyanBell/ARC1-II,0
625
- πŸ”Ά,google/gemma-2-27b,9.3,24.07,0.24,15.31,0.39,0.0,0.0,4.03,0.28,1.6,0.35,10.79,0.2,πŸ”Ά fine-tuned on domain-specific datasets,Gemma2ForCausalLM,Original,bfloat16,True,gemma,27,9,True,27f15219df2000a16955c9403c3f38b5f3413b3d,True,True,2024-08-27,2024-08-13,True,False,AALF/gemma-2-27b-it-SimPO-37K,2
626
  πŸ”Ά,NousResearch/Nous-Hermes-llama-2-7b,9.28,17.29,0.17,13.79,0.38,0.68,0.01,1.79,0.26,11.68,0.43,10.44,0.19,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,mit,6,68,True,b7c3ec54b754175e006ef75696a2ba3802697078,True,True,2024-06-12,2023-07-25,False,True,NousResearch/Nous-Hermes-llama-2-7b,0
627
  πŸ’¬,stabilityai/stablelm-2-zephyr-1_6b,9.26,32.79,0.33,6.71,0.34,2.11,0.02,0.0,0.24,5.99,0.35,7.93,0.17,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",StableLmForCausalLM,Original,float16,True,other,1,176,True,2f275b1127d59fc31e4f7c7426d528768ada9ea4,True,True,2024-06-12,2024-01-19,True,True,stabilityai/stablelm-2-zephyr-1_6b,0
628
  πŸ”Ά,huggyllama/llama-13b,9.25,24.11,0.24,16.15,0.4,1.21,0.01,0.67,0.26,2.81,0.35,10.58,0.2,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,other,13,137,True,bf57045473f207bb1de1ed035ace226f4d9f9bba,True,True,2024-07-04,2023-04-03,False,False,huggyllama/llama-13b,0
629
  🟒,Qwen/Qwen1.5-1.8B,9.12,21.54,0.22,9.76,0.35,2.27,0.02,7.38,0.31,3.96,0.36,9.8,0.19,🟒 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,other,1,43,True,7846de7ed421727b318d6605a0bfab659da2c067,True,True,2024-06-13,2024-01-22,False,True,Qwen/Qwen1.5-1.8B,0
630
- 🟒,ai21labs/Jamba-v0.1,9.1,20.26,0.2,10.72,0.36,0.98,0.01,2.46,0.27,3.71,0.36,16.45,0.25,🟒 pretrained,JambaForCausalLM,Original,bfloat16,True,apache-2.0,51,1170,True,ce13f3fe99555a2606d1892665bb67649032ff2d,True,False,2024-06-27,2024-03-28,False,True,ai21labs/Jamba-v0.1,0
631
  πŸ’¬,Qwen/Qwen1.5-1.8B-Chat,9.01,20.19,0.2,5.91,0.33,0.45,0.0,6.38,0.3,12.18,0.43,8.93,0.18,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,1,44,True,e482ee3f73c375a627a16fdf66fd0c8279743ca6,True,True,2024-06-12,2024-01-30,True,True,Qwen/Qwen1.5-1.8B-Chat,0
632
  🟒,CortexLM/btlm-7b-base-v0.2,8.84,14.83,0.15,16.19,0.4,1.06,0.01,0.45,0.25,5.54,0.38,15.0,0.23,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,mit,6,0,True,eda8b4298365a26c8981316e09427c237b11217f,True,True,2024-06-26,2024-06-13,False,False,CortexLM/btlm-7b-base-v0.2,0
633
  πŸ’¬,0-hero/Matter-0.2-7B-DPO,8.81,33.03,0.33,10.06,0.36,0.83,0.01,1.23,0.26,5.87,0.38,1.82,0.12,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,3,True,26a66f0d862e2024ce4ad0a09c37052ac36e8af6,True,True,2024-08-05,2024-04-13,True,False,0-hero/Matter-0.2-7B-DPO,0
@@ -661,7 +663,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
661
  πŸ’¬,google/gemma-2b,7.17,24.78,0.25,7.95,0.34,1.21,0.01,1.57,0.26,4.13,0.37,3.4,0.13,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",GemmaForCausalLM,Original,bfloat16,True,other,2,27,True,bf6bfe30c31c18620767ad60d0bff89343804230,True,True,2024-07-06,2024-03-24,True,False,anakin87/gemma-2b-orpo,1
662
  🟩,NousResearch/Yarn-Llama-2-7b-64k,7.12,17.0,0.17,7.04,0.33,0.98,0.01,1.9,0.26,6.93,0.39,8.87,0.18,🟩 continuously pretrained,LlamaForCausalLM,Original,bfloat16,True,,7,23,True,08491431ac3b50add7443f5d4c02850801d877be,True,True,2024-06-13,2023-08-30,False,True,NousResearch/Yarn-Llama-2-7b-64k,0
663
  🟒,Qwen/Qwen2-0.5B,7.06,18.67,0.19,7.99,0.33,2.57,0.03,0.78,0.26,4.6,0.38,7.76,0.17,🟒 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,0,84,True,ff3a49fac17555b8dfc4db6709f480cc8f16a9fe,True,True,2024-06-09,2024-05-31,False,True,Qwen/Qwen2-0.5B,0
664
- 🟒,microsoft/phi-1_5,7.06,20.33,0.2,7.47,0.34,1.13,0.01,2.35,0.27,3.39,0.34,7.68,0.17,🟒 pretrained,PhiForCausalLM,Original,float16,True,mit,1,1303,True,675aa382d814580b22651a30acb1a585d7c25963,True,True,2024-06-09,2023-09-10,False,True,microsoft/phi-1_5,0
665
  🟒,google/codegemma-1.1-2b,7.02,22.94,0.23,7.55,0.34,0.6,0.01,2.01,0.27,5.93,0.39,3.09,0.13,🟒 pretrained,GemmaForCausalLM,Original,bfloat16,True,gemma,2,17,True,9d69e500da236427eab5867552ffc87108964f4d,True,True,2024-08-12,2024-04-30,False,True,google/codegemma-1.1-2b,0
666
  🟒,google/recurrentgemma-2b,6.94,30.17,0.3,4.82,0.32,1.59,0.02,0.0,0.25,3.1,0.34,1.96,0.12,🟒 pretrained,RecurrentGemmaForCausalLM,Original,bfloat16,True,gemma,2,92,True,195f13c55b371fc721eda0662c00c64642c70e17,True,True,2024-06-13,2024-04-06,False,True,google/recurrentgemma-2b,0
667
  🟒,databricks/dolly-v1-6b,6.89,22.24,0.22,4.78,0.32,1.36,0.01,1.9,0.26,8.12,0.4,2.95,0.13,🟒 pretrained,GPTJForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,6,310,True,c9a85b3a322b402e20c839c702c725afe0cb454d,True,True,2024-06-12,2023-03-23,False,True,databricks/dolly-v1-6b,0
@@ -759,6 +761,6 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G
759
  πŸ”Ά,Replete-AI/Replete-LLM-Qwen2-7b (Merge),3.33,9.05,0.09,2.84,0.3,0.0,0.0,0.45,0.25,5.86,0.38,1.75,0.12,πŸ”Ά fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,float16,True,apache-2.0,7,11,True,e3569433b23fde853683ad61f342d2c1bd01d60a,True,True,2024-08-13,2024-08-09,True,False,Replete-AI/Replete-LLM-Qwen2-7b,1
760
  πŸ”Ά,pankajmathur/orca_mini_3b,3.07,7.42,0.07,4.69,0.32,0.53,0.01,0.0,0.25,4.2,0.33,1.61,0.11,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-sa-4.0,3,158,True,31e1a7bc3f7ea2f247b432d60036d975b8d590e9,True,True,2024-06-26,2023-06-22,False,False,pankajmathur/orca_mini_3b,0
761
  🟒,instruction-pretrain/InstructLM-500M,2.85,10.28,0.1,2.32,0.29,0.0,0.0,0.89,0.26,2.07,0.35,1.57,0.11,🟒 pretrained,MistralForCausalLM,Original,float16,True,apache-2.0,0,34,True,e9d33823c76303dfaff6a8397a8b70d0118ea350,True,True,2024-06-27,2024-06-18,False,False,instruction-pretrain/InstructLM-500M,0
762
- πŸ”Ά,TinyLlama/TinyLlama-1.1B-Chat-v1.0,2.71,5.96,0.06,4.01,0.31,0.83,0.01,0.0,0.25,4.31,0.35,1.12,0.11,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,1,1046,True,fe8a4ea1ffedaf415f4da2f062534de366a451e6,True,True,2024-08-04,2023-12-30,False,True,TinyLlama/TinyLlama-1.1B-Chat-v1.0,0
763
  🟒,NucleusAI/nucleus-22B-token-500B,1.63,2.57,0.03,1.89,0.29,0.0,0.0,0.0,0.25,3.55,0.35,1.8,0.12,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,mit,21,25,True,49bb1a47c0d32b4bfa6630a4eff04a857adcd4ca,True,True,2024-06-26,2023-10-06,False,False,NucleusAI/nucleus-22B-token-500B,0
764
  πŸ”Ά,pankajmathur/orca_mini_v6_8b,1.41,1.11,0.01,3.22,0.3,0.0,0.0,0.0,0.24,2.77,0.36,1.38,0.11,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,1,True,e95dc8e4c6b6ca5957b657cc2d905683142eaf3e,True,True,2024-06-26,2024-06-02,True,False,pankajmathur/orca_mini_v6_8b,0
 
44
  πŸ’¬,MTSAIR/MultiVerse_70B,31.73,52.49,0.52,46.14,0.62,16.16,0.16,13.87,0.35,18.82,0.47,42.89,0.49,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,other,72,38,True,063430cdc4d972a0884e3e3e3d45ea4afbdf71a2,True,True,2024-06-29,2024-03-25,False,False,MTSAIR/MultiVerse_70B,0
45
  🀝,paloalma/Le_Triomphant-ECE-TW3,31.66,54.02,0.54,44.96,0.61,17.45,0.17,13.2,0.35,18.5,0.47,41.81,0.48,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,72,3,True,f72399253bb3e65c0f55e50461488c098f658a49,True,True,2024-07-25,2024-04-01,False,False,paloalma/Le_Triomphant-ECE-TW3,0
46
  πŸ”Ά,failspy/Phi-3-medium-4k-instruct-abliterated-v3,31.55,63.19,0.63,46.73,0.63,14.12,0.14,8.95,0.32,18.52,0.46,37.78,0.44,πŸ”Ά fine-tuned on domain-specific datasets,Phi3ForCausalLM,Original,bfloat16,True,mit,13,22,True,959b09eacf6cae85a8eb21b25e998addc89a367b,True,True,2024-07-29,2024-05-22,True,False,failspy/Phi-3-medium-4k-instruct-abliterated-v3,0
47
+ πŸ’¬,microsoft/Phi-3-medium-128k-instruct,31.52,60.4,0.6,48.46,0.64,16.16,0.16,11.52,0.34,11.35,0.41,41.24,0.47,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,bfloat16,True,mit,13,360,True,fa7d2aa4f5ea69b2e36b20d050cdae79c9bfbb3f,True,True,2024-08-21,2024-05-07,True,True,microsoft/Phi-3-medium-128k-instruct,0
48
  πŸ’¬,Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO,31.42,47.99,0.48,51.03,0.65,17.45,0.17,10.18,0.33,20.53,0.48,41.37,0.47,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,float16,True,mit,13,3,True,b749dbcb19901b8fd0e9f38c923a24533569f895,True,True,2024-08-13,2024-06-15,True,False,Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO,0
49
  πŸ’¬,CohereForAI/c4ai-command-r-plus,30.86,76.64,0.77,39.92,0.58,7.55,0.08,7.38,0.31,20.42,0.48,33.24,0.4,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",CohereForCausalLM,Original,float16,True,cc-by-nc-4.0,103,1640,True,fa1bd7fb1572ceb861bbbbecfa8af83b29fa8cca,True,True,2024-06-13,2024-04-03,True,True,CohereForAI/c4ai-command-r-plus,0
50
  πŸ’¬,internlm/internlm2_5-7b-chat,30.46,61.4,0.61,57.67,0.71,8.31,0.08,10.63,0.33,14.35,0.44,30.42,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",InternLM2ForCausalLM,Original,float16,True,other,7,148,True,bebb00121ee105b823647c3ba2b1e152652edc33,True,True,2024-07-03,2024-06-27,True,True,internlm/internlm2_5-7b-chat,0
 
52
  🀝,altomek/YiSM-34B-0rn (Merge),30.15,42.84,0.43,45.38,0.61,20.62,0.21,16.22,0.37,14.76,0.44,41.06,0.47,🀝 base merges and moerges,LlamaForCausalLM,Original,float16,False,apache-2.0,34,1,True,7a481c67cbdd5c846d6aaab5ef9f1eebfad812c2,True,True,2024-06-27,2024-05-26,True,False,altomek/YiSM-34B-0rn,1
53
  🀝,paloalma/ECE-TW3-JRGL-V1,30.02,55.35,0.55,46.7,0.63,11.86,0.12,12.98,0.35,17.46,0.46,35.79,0.42,🀝 base merges and moerges,LlamaForCausalLM,Original,float16,False,apache-2.0,68,1,True,2f08c7ab9db03b1b9f455c7beee6a41e99aa910e,True,True,2024-08-04,2024-04-03,False,False,paloalma/ECE-TW3-JRGL-V1,0
54
  πŸ”Ά,jpacifico/Chocolatine-14B-Instruct-4k-DPO,29.83,46.89,0.47,48.02,0.63,14.88,0.15,12.19,0.34,15.15,0.44,41.82,0.48,πŸ”Ά fine-tuned on domain-specific datasets,Phi3ForCausalLM,Original,float16,True,mit,13,1,True,30677e58010979af26b70240846fdf7ff38cbbf2,True,True,2024-08-08,2024-08-01,False,False,jpacifico/Chocolatine-14B-Instruct-4k-DPO,0
55
+ πŸ’¬,microsoft/Phi-3-small-8k-instruct,29.64,64.97,0.65,46.21,0.62,2.64,0.03,8.28,0.31,16.77,0.46,38.96,0.45,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Phi3SmallForCausalLM,Original,bfloat16,True,mit,7,148,True,1535ae26fb4faada95c6950e8bc6e867cdad6b00,True,True,2024-06-13,2024-05-07,True,True,microsoft/Phi-3-small-8k-instruct,0
56
  πŸ’¬,Qwen/Qwen2-57B-A14B,29.6,63.38,0.63,41.79,0.59,7.7,0.08,10.85,0.33,14.18,0.44,39.73,0.46,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2MoeForCausalLM,Original,bfloat16,True,apache-2.0,57,71,True,5ea455a449e61a92a5b194ee06be807647d3e8b5,True,True,2024-08-14,2024-06-04,True,True,Qwen/Qwen2-57B-A14B-Instruct,1
57
  🟒,Qwen/Qwen1.5-110B,29.56,34.22,0.34,44.28,0.61,23.04,0.23,13.65,0.35,13.71,0.44,48.45,0.54,🟒 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,other,111,88,True,16659038ecdcc771c1293cf47020fa7cc2750ee8,True,True,2024-06-13,2024-04-25,False,True,Qwen/Qwen1.5-110B,0
58
  πŸ”Ά,moreh/MoMo-72B-lora-1.8.7-DPO,29.35,51.67,0.52,43.13,0.6,16.77,0.17,9.84,0.32,14.42,0.45,40.26,0.46,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,72,463,True,a1d657156f82c24b670158406378648233487011,True,True,2024-06-12,2024-02-02,False,True,abacusai/Smaug-72B-v0.1,1
 
68
  🟒,dnhkng/RYS-Phi-3-medium-4k-instruct,28.38,43.91,0.44,46.75,0.62,11.78,0.12,13.98,0.35,11.09,0.43,42.74,0.48,🟒 pretrained,Phi3ForCausalLM,Original,bfloat16,True,mit,17,1,True,1009e916b1ff8c9a53bc9d8ff48bea2a15ccde26,True,True,2024-08-07,2024-08-06,False,False,dnhkng/RYS-Phi-3-medium-4k-instruct,0
69
  πŸ”Ά,NLPark/AnFeng_v3.1-Avocet,28.05,50.96,0.51,40.31,0.58,13.9,0.14,9.96,0.32,14.98,0.45,38.2,0.44,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,cc-by-nc-nd-4.0,34,0,True,5170739731033323e6e66a0f68d34790042a3b2a,True,True,2024-08-07,2024-08-03,False,False,NLPark/AnFeng_v3.1-Avocet,0
70
  🀝,OpenBuddy/openbuddy-zero-56b-v21.2-32k,27.99,50.57,0.51,44.8,0.61,12.99,0.13,9.06,0.32,12.78,0.43,37.77,0.44,🀝 base merges and moerges,LlamaForCausalLM,Original,float16,True,other,56,0,True,c7a1a4a6e798f75d1d3219ab9ff9f2692e29f7d5,True,True,2024-06-26,2024-06-10,True,False,OpenBuddy/openbuddy-zero-56b-v21.2-32k,0
71
+ πŸ’¬,meta-llama/Meta-Llama-3.1-8B,27.91,78.56,0.79,29.89,0.51,17.6,0.18,2.35,0.27,8.41,0.39,30.68,0.38,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,2176,True,df34336b42332c6d360959e259cd6271c6a09fd4,True,True,2024-08-15,2024-07-18,True,True,meta-llama/Meta-Llama-3.1-8B-Instruct,1
72
  πŸ’¬,vicgalle/Configurable-Llama-3.1-8B-Instruct,27.77,83.12,0.83,29.66,0.5,15.86,0.16,3.24,0.27,5.93,0.38,28.8,0.36,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,8,8,True,133b3ab1a5385ff9b3d17da2addfe3fc1fd6f733,True,True,2024-08-05,2024-07-24,True,False,vicgalle/Configurable-Llama-3.1-8B-Instruct,0
73
  πŸ”Ά,BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B,27.74,51.86,0.52,35.38,0.55,13.97,0.14,13.87,0.35,16.72,0.46,34.65,0.41,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,2,True,a42c86c61b98ca4fdf238d688fe6ea11cf414d29,True,True,2024-08-05,2024-07-09,True,False,BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B,0
74
  πŸ”Ά,01-ai/Yi-1.5-34B,27.73,38.53,0.39,44.17,0.61,15.18,0.15,12.42,0.34,16.97,0.46,39.1,0.45,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,34,34,True,1ec522298a6935c881df6dc29d3669833bd8672d,True,True,2024-07-27,2024-05-18,True,True,cognitivecomputations/dolphin-2.9.1-yi-1.5-34b,1
75
  πŸ’¬,01-ai/Yi-1.5-9B-Chat,27.71,60.46,0.6,36.95,0.56,11.63,0.12,11.3,0.33,12.84,0.43,33.06,0.4,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,126,True,bc87d8557c98dc1e5fdef6ec23ed31088c4d3f35,True,True,2024-06-12,2024-05-10,True,True,01-ai/Yi-1.5-9B-Chat,0
76
  πŸ’¬,jpacifico/Chocolatine-3B-Instruct-DPO-Revised,27.63,56.23,0.56,37.16,0.55,14.5,0.15,9.62,0.32,15.1,0.45,33.21,0.4,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,float16,True,mit,3,10,True,c403df6c0f78148cfb477972455cbd859149311a,True,True,2024-07-19,2024-07-17,True,False,jpacifico/Chocolatine-3B-Instruct-DPO-Revised,0
77
+ πŸ’¬,microsoft/Phi-3.5-mini-instruct,27.4,57.75,0.58,36.75,0.55,14.95,0.15,11.97,0.34,10.1,0.4,32.91,0.4,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,bfloat16,True,mit,3,407,True,64963004ad95869fa73a30279371c8778509ac84,True,True,2024-08-21,2024-08-16,True,True,microsoft/Phi-3.5-mini-instruct,0
78
+ πŸ’¬,microsoft/Phi-3-mini-4k-instruct,27.2,54.77,0.55,36.56,0.55,14.2,0.14,10.96,0.33,13.12,0.43,33.58,0.4,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,float16,True,mit,3,996,True,c1358f8a35e6d2af81890deffbbfa575b978c62f,True,True,2024-07-02,2024-04-22,True,True,microsoft/Phi-3-mini-4k-instruct,0
79
  πŸ’¬,mistralai/Mixtral-8x7B-v0.1,27.13,58.97,0.59,37.11,0.55,10.88,0.11,9.51,0.32,16.68,0.46,29.62,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MixtralForCausalLM,Original,bfloat16,True,apache-2.0,46,408,True,286ae6737d048ad1d965c2e830864df02db50f2f,True,False,2024-07-27,2024-01-11,True,True,NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO,1
80
  πŸ’¬,Qwen/Qwen1.5-32B-Chat,27.1,55.32,0.55,44.55,0.61,6.65,0.07,7.49,0.31,10.2,0.42,38.41,0.45,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,32,106,True,0997b012af6ddd5465d40465a8415535b2f06cfc,True,True,2024-06-12,2024-04-03,True,True,Qwen/Qwen1.5-32B-Chat,0
81
  🀝,mlabonne/NeuralDaredevil-8B-abliterated,27.01,75.61,0.76,30.31,0.51,8.01,0.08,7.49,0.31,9.08,0.4,31.57,0.38,🀝 base merges and moerges,LlamaForCausalLM,Original,float16,True,llama3,8,127,True,2f4a5e8a8522f19dff345c7189b7891468763061,True,True,2024-07-25,2024-05-27,True,True,mlabonne/NeuralDaredevil-8B-abliterated,0
 
96
  🟒,meta-llama/Meta-Llama-3-70B,26.37,16.03,0.16,48.71,0.65,16.54,0.17,19.69,0.4,16.01,0.45,41.21,0.47,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3,70,795,True,b4d08b7db49d488da3ac49adf25a6b9ac01ae338,True,True,2024-06-12,2024-04-17,False,True,meta-llama/Meta-Llama-3-70B,0
97
  🀝,xxx777xxxASD/L3.1-ClaudeMaid-4x8B,26.19,66.96,0.67,29.44,0.51,12.84,0.13,5.48,0.29,13.75,0.43,28.67,0.36,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,True,llama3.1,24,7,True,2a98d9cb91c7aa775acbf5bfe7bb91beb2faf682,True,False,2024-07-28,2024-07-27,True,False,xxx777xxxASD/L3.1-ClaudeMaid-4x8B,0
98
  🀝,AbacusResearch/Jallabi-34B,25.97,35.29,0.35,43.62,0.6,3.93,0.04,11.86,0.34,20.24,0.48,40.91,0.47,🀝 base merges and moerges,LlamaForCausalLM,Original,float16,True,apache-2.0,34,2,True,f65696da4ed82c9a20e94b200d9dccffa07af682,True,True,2024-06-27,2024-03-01,False,False,AbacusResearch/Jallabi-34B,0
99
+ πŸ’¬,microsoft/Phi-3-mini-4k-instruct,25.97,56.13,0.56,39.27,0.57,11.63,0.12,9.28,0.32,7.64,0.4,31.85,0.39,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,bfloat16,True,mit,3,996,True,ff07dc01615f8113924aed013115ab2abd32115b,True,True,2024-06-12,2024-04-22,True,True,microsoft/Phi-3-mini-4k-instruct,0
100
  🟩,dnhkng/RYS-Medium,25.94,44.06,0.44,47.73,0.63,7.78,0.08,10.4,0.33,8.73,0.41,36.96,0.43,🟩 continuously pretrained,Phi3ForCausalLM,Original,bfloat16,True,mit,18,3,True,de09a79e6b2efdcc97490a37b770764e62749fd0,True,True,2024-07-17,2024-07-17,False,False,dnhkng/RYS-Medium,0
101
  🟒,meta-llama/Meta-Llama-3.1-70B,25.91,16.84,0.17,46.4,0.63,16.69,0.17,18.34,0.39,16.58,0.46,40.6,0.47,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3.1,70,223,True,f7d3cc45ed4ff669a354baf2e0f05e65799a0bee,True,True,2024-07-23,2024-07-14,True,True,meta-llama/Meta-Llama-3.1-70B,0
102
  πŸ’¬,cloudyu/Yi-34Bx2-MoE-60B-DPO,25.91,53.19,0.53,31.26,0.52,6.19,0.06,9.62,0.32,14.32,0.44,40.85,0.47,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MixtralForCausalLM,Original,bfloat16,True,apache-2.0,60,2,True,5c2d31042229ee06246064100b781dd926cb0ffd,True,False,2024-08-06,2024-01-23,True,False,cloudyu/Yi-34Bx2-MoE-60B-DPO,0
103
  πŸ’¬,Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo,25.87,57.14,0.57,39.15,0.57,7.63,0.08,10.74,0.33,8.78,0.4,31.78,0.39,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,bfloat16,True,apache-2.0,3,0,True,2896ef357be81fd433c17801d76ce148e60a7032,True,True,2024-06-26,2024-06-24,True,False,Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo,0
104
  🀝,Casual-Autopsy/L3-Umbral-Mind-RP-v2.0-8B (Merge),25.76,71.23,0.71,32.49,0.53,10.12,0.1,4.92,0.29,5.55,0.37,30.26,0.37,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,llama3,8,11,True,b46c066ea8387264858dc3461f382e7b42fd9c48,True,True,2024-07-02,2024-06-26,True,False,Casual-Autopsy/L3-Umbral-Mind-RP-v2.0-8B,1
105
+ πŸ”Ά,Sao10K/L3-8B-Stheno-v3.2,25.76,68.73,0.69,32.02,0.52,8.53,0.09,8.05,0.31,6.45,0.38,30.76,0.38,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,8,200,True,4bb828f6e1b1efd648c39b1ad682c44ff260f018,True,True,2024-06-30,2024-06-05,True,False,Sao10K/L3-8B-Stheno-v3.2,0
106
  πŸ”Ά,Nitral-AI/Hathor_Stable-v0.2-L3-8B,25.7,71.75,0.72,32.83,0.53,9.21,0.09,4.92,0.29,5.56,0.38,29.96,0.37,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,8,50,True,1c9f391c3e349f8ba51b5696290ee6db6a2b63fd,True,True,2024-07-02,2024-06-09,True,False,Nitral-AI/Hathor_Stable-v0.2-L3-8B,0
107
  πŸ’¬,cognitivecomputations/dolphin-2.9.2-Phi-3-Medium (Merge),25.66,42.48,0.42,49.72,0.65,0.53,0.01,10.29,0.33,11.41,0.42,39.5,0.46,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,mit,-1,15,True,0470c5b912b51fa6e27d87a8ea7feafacd8cb101,True,True,2024-08-05,2024-05-31,True,True,cognitivecomputations/dolphin-2.9.2-Phi-3-Medium,1
108
  πŸ”Ά,VAGOsolutions/SauerkrautLM-Nemo-12b-Instruct,25.63,61.13,0.61,32.34,0.52,8.69,0.09,7.94,0.31,17.16,0.45,26.5,0.34,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,18,True,fcb056465084ab2c71503a0760f46e4be79c985c,True,True,2024-07-22,2024-07-22,True,False,VAGOsolutions/SauerkrautLM-Nemo-12b-Instruct,0
 
129
  πŸ”Ά,arcee-ai/Llama-Spark,24.9,79.11,0.79,29.77,0.51,1.06,0.01,6.6,0.3,2.62,0.36,30.23,0.37,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,22,True,6d74a617fbb17a1ada08528f2673c89f84fb062e,True,True,2024-08-08,2024-07-26,True,False,arcee-ai/Llama-Spark,0
130
  πŸ”Ά,01-ai/Yi-1.5-9B,24.85,44.65,0.45,35.78,0.55,10.42,0.1,11.74,0.34,13.52,0.43,32.97,0.4,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,24,True,91f0a521e3e2a0675a3549aa5d3f40717068de94,True,True,2024-08-02,2024-05-18,True,True,cognitivecomputations/dolphin-2.9.1-yi-1.5-9b,1
131
  πŸ”Ά,Eurdem/Defne-llama3.1-8B,24.81,50.36,0.5,32.82,0.53,14.12,0.14,6.15,0.3,13.54,0.43,31.84,0.39,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,2,True,7832ba3066636bf4dab3e7d658c0b3ded12491ae,True,True,2024-08-14,2024-07-29,False,False,Eurdem/Defne-llama3.1-8B,0
132
+ πŸ’¬,Qwen/Qwen2-7B,24.76,56.79,0.57,37.81,0.55,8.61,0.09,6.38,0.3,7.37,0.39,31.64,0.38,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,536,True,41c66b0be1c3081f13defc6bdf946c2ef240d6a6,True,True,2024-06-12,2024-06-04,True,True,Qwen/Qwen2-7B-Instruct,1
133
  🀝,HiroseKoichi/Llama-Salad-4x8B-V3,24.75,66.54,0.67,31.93,0.52,8.53,0.09,7.05,0.3,6.45,0.37,27.98,0.35,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,llama3,24,4,True,a343915429779efbd1478f01ba1f7fd9d8d226c0,True,False,2024-06-26,2024-06-17,True,False,HiroseKoichi/Llama-Salad-4x8B-V3,0
134
  πŸ’¬,meta-llama/Meta-Llama-3-8B-Instruct,24.71,73.47,0.73,28.23,0.5,7.1,0.07,5.37,0.29,3.74,0.36,30.37,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,1,True,8346770280fa169d41d737785dd63a66e9d94501,True,True,2024-07-28,2024-06-07,True,False,haoranxu/Llama-3-Instruct-8B-SimPO,1
135
  🀝,invisietch/Nimbus-Miqu-v0.1-70B,24.71,46.47,0.46,43.45,0.6,5.44,0.05,11.86,0.34,9.33,0.41,31.7,0.39,🀝 base merges and moerges,LlamaForCausalLM,Original,float16,False,unknown,68,5,True,3209583a0849383daf8faa7b819f29726b8806cf,True,True,2024-07-03,2024-06-30,False,False,invisietch/Nimbus-Miqu-v0.1-70B,0
 
159
  πŸ”Ά,BAAI/Infinity-Instruct-3M-0625-Qwen2-7B,24.01,55.54,0.56,34.66,0.53,6.12,0.06,8.39,0.31,6.46,0.39,32.89,0.4,πŸ”Ά fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,7,True,503c24156d7682458686a7b5324f7f886e63470d,True,True,2024-08-05,2024-07-09,True,False,BAAI/Infinity-Instruct-3M-0625-Qwen2-7B,0
160
  πŸ”Ά,meta-llama/Meta-Llama-3.1-8B,24.0,64.74,0.65,26.26,0.48,10.73,0.11,8.95,0.32,6.91,0.39,26.4,0.34,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama3.1,8,2,True,6b2b5694a192cb29ad0e4314138affa25b630c0e,True,True,2024-08-07,2024-08-06,True,False,ValiantLabs/Llama3.1-8B-ShiningValiant2,2
161
  πŸ’¬,vicgalle/Roleplay-Llama-3-8B,23.94,73.2,0.73,28.55,0.5,8.69,0.09,1.45,0.26,1.68,0.35,30.09,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,8,36,True,57297eb57dcc2c116f061d9dda341094203da01b,True,True,2024-06-26,2024-04-19,True,False,vicgalle/Roleplay-Llama-3-8B,0
162
+ πŸ’¬,meta-llama/Meta-Llama-3-8B-Instruct,23.91,74.08,0.74,28.24,0.5,8.69,0.09,1.23,0.26,1.6,0.36,29.6,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,3361,True,e1945c40cd546c78e41f1151f4db032b271faeaa,True,True,2024-06-12,2024-04-17,True,True,meta-llama/Meta-Llama-3-8B-Instruct,0
163
  πŸ’¬,01-ai/Yi-34B-Chat,23.9,46.99,0.47,37.62,0.56,4.31,0.04,11.74,0.34,8.36,0.4,34.37,0.41,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,34,340,True,2e528b6a80fb064a0a746c5ca43114b135e30464,True,True,2024-06-12,2023-11-22,True,True,01-ai/Yi-34B-Chat,0
164
  πŸ’¬,UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter2,23.78,69.89,0.7,29.87,0.51,8.76,0.09,2.24,0.27,2.0,0.36,29.91,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,0,True,730c7207d4b538feeb3c2e6d6f6a6ba8615a9be3,True,True,2024-08-07,2024-06-25,True,False,UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter2,0
165
  πŸ’¬,vicgalle/Configurable-Yi-1.5-9B-Chat,23.77,43.23,0.43,35.33,0.55,6.12,0.06,12.42,0.34,12.02,0.43,33.5,0.4,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,8,2,True,992cb2232caae78eff6a836b2e0642f7cbf6018e,True,True,2024-06-26,2024-05-12,True,False,vicgalle/Configurable-Yi-1.5-9B-Chat,0
 
172
  πŸ’¬,SeaLLMs/SeaLLMs-v3-7B-Chat,23.63,43.77,0.44,33.8,0.53,15.11,0.15,6.49,0.3,10.47,0.42,32.16,0.39,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,7,38,True,67ef6dfd0a5df7af4be7a325786105a2ba4cbaf7,True,True,2024-07-29,2024-07-03,True,False,SeaLLMs/SeaLLMs-v3-7B-Chat,0
173
  πŸ”Ά,meta-llama/Meta-Llama-3-8B-Instruct,23.56,69.03,0.69,29.08,0.5,5.74,0.06,1.12,0.26,5.5,0.38,30.92,0.38,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,mit,8,4,True,9c95ccdeceed14a3c2881bc495101a1acca1385f,True,True,2024-07-02,2024-05-25,True,False,ZhangShenao/SELM-Llama-3-8B-Instruct-iter-3,3
174
  πŸ’¬,lordjia/Qwen2-Cantonese-7B-Instruct,23.5,54.35,0.54,32.45,0.52,8.76,0.09,6.04,0.3,7.81,0.4,31.59,0.38,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,eb8b0faee749d167fd70e74f5e579094c4cfe7fb,True,True,2024-08-03,2024-07-13,True,False,lordjia/Qwen2-Cantonese-7B-Instruct,0
175
+ πŸ’¬,meta-llama/Meta-Llama-3.1-8B,23.49,61.7,0.62,30.72,0.52,4.76,0.05,6.38,0.3,13.62,0.44,23.77,0.31,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,144,True,aabb745a717e133b74dcae23195d2635cf5f38cc,True,True,2024-08-28,2024-07-28,True,True,NousResearch/Hermes-3-Llama-3.1-8B,1
176
  πŸ’¬,saltlux/luxia-21.4b-alignment-v1.2,23.44,41.15,0.41,47.77,0.64,1.59,0.02,7.72,0.31,14.9,0.45,27.48,0.35,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,21,7,True,eed12b5574fa49cc81e57a88aff24c08c13721c0,True,True,2024-07-30,2024-05-27,True,False,saltlux/luxia-21.4b-alignment-v1.2,0
177
  πŸ’¬,meta-llama/Meta-Llama-3-8B-Instruct,23.43,66.87,0.67,28.06,0.48,6.57,0.07,3.02,0.27,5.31,0.38,30.77,0.38,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,8,1,True,555f4a0092f239557e1aa34f9d489e8156b907bb,True,True,2024-06-29,2024-04-26,True,False,lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top75,2
178
  πŸ’¬,meta-llama/Meta-Llama-3-8B-Instruct,23.37,66.37,0.66,27.67,0.49,8.53,0.09,3.02,0.27,4.81,0.36,29.83,0.37,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,8,2,True,5a2f17238cc83932e00613d285f8bf6b8f4a0c3a,True,True,2024-06-29,2024-04-26,True,False,lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top25,2
 
212
  πŸ”Ά,WizardLMTeam/WizardLM-70B-V1.0,22.32,49.51,0.5,37.54,0.56,3.47,0.03,2.13,0.27,14.09,0.44,27.18,0.34,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama2,70,234,True,54aaecaff7d0790eb9f0ecea1cc267a94cc66949,True,True,2024-06-12,2023-08-09,False,True,WizardLMTeam/WizardLM-70B-V1.0,0
213
  🀝,johnsutor/Llama-3-8B-Instruct_breadcrumbs-density-0.1-gamma-0.01 (Merge),22.3,42.71,0.43,29.55,0.5,3.7,0.04,9.62,0.32,17.8,0.46,30.44,0.37,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,f4ebbf27d586e94c63f0a7293f565cbd947b824f,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_breadcrumbs-density-0.1-gamma-0.01,1
214
  πŸ”Ά,NousResearch/Meta-Llama-3-8B,22.29,57.63,0.58,30.51,0.51,5.97,0.06,6.26,0.3,10.06,0.42,23.31,0.31,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,8,5,True,3cb5792509966a963645be24fdbeb2e7dc6cac15,True,True,2024-07-24,2024-05-02,True,False,vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B,2
215
+ πŸ’¬,mistralai/Mistral-Nemo-Base-2407,22.27,62.61,0.63,27.11,0.49,0.3,0.0,8.72,0.32,8.48,0.39,26.37,0.34,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,1007,True,4d14c1db68fe20dbf80b8eca85d39b909c5fe1d5,True,True,2024-08-29,2024-07-17,True,True,mistralai/Mistral-Nemo-Instruct-2407,1
216
  🟒,01-ai/Yi-34B,22.26,30.46,0.3,35.54,0.55,4.46,0.04,15.55,0.37,9.65,0.41,37.91,0.44,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,34,1278,True,e1e7da8c75cfd5c44522228599fd4d2990cedd1c,True,True,2024-06-12,2023-11-01,False,True,01-ai/Yi-34B,0
217
  🀝,johnsutor/Llama-3-8B-Instruct_breadcrumbs-density-0.5-gamma-0.1 (Merge),22.18,43.96,0.44,30.85,0.51,6.87,0.07,7.61,0.31,13.84,0.44,29.96,0.37,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,a481edaceeaab34f4dc0e90c4d8ec0f72658bbdd,True,True,2024-06-26,2024-06-08,False,False,johnsutor/Llama-3-8B-Instruct_breadcrumbs-density-0.5-gamma-0.1,1
218
  πŸ”Ά,meta-llama/Meta-Llama-3.1-8B,22.14,64.05,0.64,24.8,0.47,10.8,0.11,4.7,0.29,2.29,0.36,26.22,0.34,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama3.1,8,5,True,332c99d80f378c77b090745a5aac10f8ab339519,True,True,2024-08-14,2024-08-11,True,False,ValiantLabs/Llama3.1-8B-Enigma,2
 
225
  🀝,waqasali1707/Beast-Soul-new (Merge),22.01,50.3,0.5,33.04,0.52,6.42,0.06,4.36,0.28,14.5,0.45,23.42,0.31,🀝 base merges and moerges,MistralForCausalLM,Original,bfloat16,True,,7,0,False,a23d68c4556d91a129de3f8fd8b9e0ff0890f4cc,True,True,2024-08-07,2024-08-07,False,False,waqasali1707/Beast-Soul-new,1
226
  πŸ”Ά,chujiezheng/Llama-3-Instruct-8B-SimPO-ExPO,21.97,64.34,0.64,25.87,0.48,0.53,0.01,4.92,0.29,9.5,0.39,26.68,0.34,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,14,True,3fcaa9fe99691659eb197487e9a343f601bf63f2,True,True,2024-06-26,2024-05-26,True,False,chujiezheng/Llama-3-Instruct-8B-SimPO-ExPO,0
227
  πŸ”Ά,VAGOsolutions/SauerkrautLM-7b-LaserChat,21.97,59.88,0.6,22.99,0.45,6.72,0.07,6.71,0.3,9.92,0.41,25.61,0.33,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,10,True,cb759636a3d5b0768df2f43a3d3da9b17e10e7b9,True,True,2024-06-26,2024-02-05,True,False,VAGOsolutions/SauerkrautLM-7b-LaserChat,0
228
+ 🟒,01-ai/Yi-1.5-9B,21.95,29.36,0.29,30.5,0.51,10.2,0.1,17.23,0.38,12.03,0.43,32.4,0.39,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,43,True,8cfde9604384c50137bee480b8cef8a08e5ae81d,True,True,2024-06-12,2024-05-11,False,True,01-ai/Yi-1.5-9B,0
229
  πŸ”Ά,4season/final_model_test_v2,21.92,31.91,0.32,47.41,0.63,1.36,0.01,10.29,0.33,12.43,0.43,28.09,0.35,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,21,0,True,cf690c35d9cf0b0b6bf034fa16dbf88c56fe861c,True,True,2024-06-27,2024-05-20,False,False,4season/final_model_test_v2,0
230
  🀝,FallenMerick/Chewy-Lemon-Cookie-11B (Merge),21.91,48.75,0.49,33.01,0.53,4.61,0.05,3.91,0.28,15.95,0.45,25.19,0.33,🀝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,cc-by-4.0,10,0,True,0f5d0d6d218b3ef034f58eba32d6fe7ac4c237ae,True,True,2024-06-27,2024-06-06,False,False,FallenMerick/Chewy-Lemon-Cookie-11B,1
231
  πŸ’¬,OpenBuddy/openbuddy-llama3-8b-v21.2-32k,21.84,61.92,0.62,27.25,0.49,6.5,0.06,3.91,0.28,5.93,0.38,25.54,0.33,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,other,8,0,True,f3ea2dec2533a3dd97df32db2376b17875cafda2,True,True,2024-06-26,2024-06-18,True,False,OpenBuddy/openbuddy-llama3-8b-v21.2-32k,0
 
249
  🀝,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.7-gamma-0.1 (Merge),21.46,41.99,0.42,31.01,0.51,7.1,0.07,6.49,0.3,13.14,0.44,29.06,0.36,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,cd52bafe64e82d466d0bc590da5399f2299d24e1,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.7-gamma-0.1,1
250
  πŸ”Ά,flammenai/flammen15-gutenberg-DPO-v1-7B (Merge),21.46,47.98,0.48,32.67,0.52,6.72,0.07,4.59,0.28,12.53,0.43,24.29,0.32,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,0,True,550cd9548cba1265cb1771c85ebe498789fdecb5,True,True,2024-07-10,2024-04-05,False,False,flammenai/flammen15-gutenberg-DPO-v1-7B,1
251
  πŸ”Ά,Intel/neural-chat-7b-v3-2,21.43,49.88,0.5,30.24,0.5,4.53,0.05,5.37,0.29,20.06,0.49,18.52,0.27,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,apache-2.0,7,56,True,0d8f77647810d21d935ea90c66d6339b85e65a75,True,True,2024-06-12,2023-11-21,False,True,Intel/neural-chat-7b-v3-2,0
252
+ πŸ’¬,LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct,21.4,71.93,0.72,17.98,0.42,4.46,0.04,2.13,0.27,3.3,0.37,28.63,0.36,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",ExaoneForCausalLM,Original,bfloat16,True,other,7,330,True,7f15baedd46858153d817445aff032f4d6cf4939,True,True,2024-08-18,2024-07-31,True,False,LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct,0
253
  πŸ’¬,Columbia-NLP/LION-LLaMA-3-8b-dpo-v1.0,21.34,49.57,0.5,30.36,0.5,9.06,0.09,4.14,0.28,10.28,0.41,24.65,0.32,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,2,False,3cddd4a6f5939a0a4db1092a0275342b7b9912f3,True,True,2024-07-04,2024-06-28,True,False,Columbia-NLP/LION-LLaMA-3-8b-dpo-v1.0,0
254
  πŸ’¬,mistralai/Mistral-7B-v0.1,21.33,52.86,0.53,29.25,0.49,3.85,0.04,4.47,0.28,16.06,0.45,21.46,0.29,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,254,True,4c6e34123b140ce773a8433cae5410949289102c,True,True,2024-06-12,2023-10-12,True,True,teknium/OpenHermes-2-Mistral-7B,1
255
  πŸ’¬,vicgalle/CarbonBeagle-11B-truthy,21.29,52.12,0.52,33.99,0.53,4.76,0.05,6.6,0.3,4.11,0.37,26.19,0.34,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,float16,True,apache-2.0,10,9,True,476cd2a6d938bddb38dfbeb4cb21e3e34303413d,True,True,2024-07-13,2024-02-10,True,False,vicgalle/CarbonBeagle-11B-truthy,0
 
281
  πŸ’¬,Qwen/Qwen2-7B,20.96,35.35,0.35,27.91,0.49,11.56,0.12,5.37,0.29,11.66,0.42,33.9,0.41,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,59,True,c443c4eb5138ed746ac49ed98bf3c183dc5380ac,True,True,2024-07-10,2024-05-24,True,True,cognitivecomputations/dolphin-2.9.2-qwen2-7b,1
282
  🀝,allknowingroger/MultiMash8-13B-slerp (Merge),20.95,43.21,0.43,32.27,0.52,6.95,0.07,5.15,0.29,14.5,0.44,23.62,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,apache-2.0,12,0,True,5590ccd99f74301951f450f9d0271a99e97728c8,True,True,2024-06-26,2024-05-26,False,False,allknowingroger/MultiMash8-13B-slerp,1
283
  πŸ”Ά,maldv/badger-writer-llama-3-8b (Merge),20.93,53.03,0.53,26.88,0.49,6.57,0.07,5.26,0.29,3.2,0.36,30.67,0.38,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,False,cc-by-nc-4.0,8,7,True,1d8134d01af87e994571ae16ccd7b31cce42418f,True,True,2024-06-26,2024-06-17,True,False,maldv/badger-writer-llama-3-8b,1
284
+ 🟒,google/gemma-2-9b,20.93,20.4,0.2,34.1,0.54,11.78,0.12,10.51,0.33,14.3,0.45,34.48,0.41,🟒 pretrained,Gemma2ForCausalLM,Original,bfloat16,True,gemma,9,544,True,beb0c08e9eeb0548f3aca2ac870792825c357b7d,True,True,2024-07-11,2024-06-24,False,True,google/gemma-2-9b,0
285
  🀝,icefog72/IceCocoaRP-7b (Merge),20.87,49.62,0.5,29.64,0.49,5.44,0.05,6.04,0.3,11.17,0.42,23.32,0.31,🀝 base merges and moerges,MistralForCausalLM,Original,float16,False,cc-by-nc-4.0,7,3,True,001beaf88932f7e010af21bbdeff0079bda73b1d,True,True,2024-06-26,2024-06-07,False,False,icefog72/IceCocoaRP-7b,1
286
  πŸ”Ά,fblgit/juanako-7b-UNA,20.77,48.37,0.48,30.42,0.51,2.87,0.03,6.15,0.3,17.16,0.46,19.68,0.28,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,23,True,b8ac85b603d5ee1ac619b2e1d0b3bb86c4eecb0c,True,True,2024-06-30,2023-11-27,False,False,fblgit/juanako-7b-UNA,0
287
  🀝,maldv/badger-lambda-llama-3-8b,20.76,48.61,0.49,28.1,0.5,8.31,0.08,4.25,0.28,4.52,0.38,30.74,0.38,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,8,9,True,8ef157d0d3c12212ca5e70d354869aed90e03f22,True,True,2024-06-26,2024-06-10,True,False,maldv/badger-lambda-llama-3-8b,0
 
300
  🀝,allknowingroger/MixTAO-19B-pass (Merge),20.54,38.14,0.38,31.58,0.51,5.59,0.06,4.59,0.28,19.95,0.48,23.39,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,apache-2.0,19,1,True,a41369cfcfbada9d5387051ba616bf1432b31d31,True,True,2024-06-26,2024-06-02,False,False,allknowingroger/MixTAO-19B-pass,1
301
  🀝,allknowingroger/MultiMash9-13B-slerp (Merge),20.53,41.88,0.42,32.55,0.52,7.18,0.07,4.03,0.28,14.21,0.44,23.33,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,apache-2.0,12,0,True,56dac45f387669baa04a8997ebb9ea63c65fbbd1,True,True,2024-06-26,2024-05-26,False,False,allknowingroger/MultiMash9-13B-slerp,1
302
  🀝,shadowml/BeagSake-7B (Merge),20.5,40.19,0.4,32.53,0.52,6.27,0.06,4.03,0.28,16.38,0.46,23.61,0.31,🀝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,cc-by-nc-4.0,7,1,True,b7a3b25a188a4608fd05fc4247ddd504c1f529d1,True,True,2024-06-29,2024-01-31,False,False,shadowml/BeagSake-7B,1
303
+ πŸ’¬,meta-llama/Meta-Llama-3-8B-Instruct,20.48,47.82,0.48,26.8,0.49,8.38,0.08,5.7,0.29,5.4,0.38,28.79,0.36,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,llama3,8,3361,True,e1945c40cd546c78e41f1151f4db032b271faeaa,True,True,2024-07-08,2024-04-17,False,True,meta-llama/Meta-Llama-3-8B-Instruct,0
304
  πŸ”Ά,SanjiWatsuki/Kunoichi-DPO-v2-7B,20.41,54.31,0.54,20.9,0.44,6.57,0.07,6.15,0.3,11.09,0.42,23.41,0.31,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,cc-by-nc-4.0,7,78,True,5278247beb482c4fceff2294570236d68b74d132,True,True,2024-06-28,2024-01-13,True,False,SanjiWatsuki/Kunoichi-DPO-v2-7B,0
305
  🀝,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.3-gamma-0.01 (Merge),20.39,35.18,0.35,29.14,0.5,1.13,0.01,7.49,0.31,20.35,0.49,29.01,0.36,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,fa77530fe3723d7b15b06b88c3ca6110a8421742,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.3-gamma-0.01,1
306
  🀝,allknowingroger/Multimash3-12B-slerp (Merge),20.38,44.37,0.44,32.15,0.52,5.74,0.06,4.03,0.28,13.03,0.43,22.97,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,apache-2.0,12,0,True,0b90bf0b5230d02b4ba63879fc3bf0b85d46c3ce,True,True,2024-06-26,2024-05-21,False,False,allknowingroger/Multimash3-12B-slerp,1
 
326
  🀝,allknowingroger/MixTaoTruthful-13B-slerp (Merge),20.13,41.39,0.41,32.71,0.52,5.89,0.06,4.59,0.28,12.86,0.43,23.33,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,apache-2.0,12,0,True,3324d37e138c6bf0d6891e54b6dd839c8d2f35ec,True,True,2024-06-26,2024-05-25,False,False,allknowingroger/MixTaoTruthful-13B-slerp,1
327
  🀝,johnsutor/Llama-3-8B-Instruct_breadcrumbs-density-0.3-gamma-0.01 (Merge),20.1,33.77,0.34,28.14,0.49,0.0,0.0,8.28,0.31,22.29,0.5,28.15,0.35,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,4a432be239528ffc654955338982f1f32eb12901,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_breadcrumbs-density-0.3-gamma-0.01,1
328
  πŸ’¬,cat-searcher/gemma-2-9b-it-sppo-iter-0,20.1,29.42,0.29,41.1,0.59,0.0,0.0,12.08,0.34,6.9,0.39,31.11,0.38,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Gemma2ForCausalLM,Original,bfloat16,True,,9,0,False,c2d7b76786151aecfa5972a2a3e937feb2d2c48b,True,True,2024-08-09,2024-08-05,True,False,cat-searcher/gemma-2-9b-it-sppo-iter-1-evol-1,2
329
+ πŸ’¬,MLP-KTLim/llama-3-Korean-Bllossom-8B (Merge),20.09,51.13,0.51,26.93,0.49,8.38,0.08,1.68,0.26,3.63,0.37,28.82,0.36,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,243,True,8a738f9f622ffc2b0a4a6b81dabbca80406248bf,True,True,2024-07-09,2024-04-25,True,False,MLP-KTLim/llama-3-Korean-Bllossom-8B,1
330
  🀝,allknowingroger/MultiMash-12B-slerp (Merge),20.08,39.74,0.4,31.93,0.51,7.48,0.07,3.58,0.28,14.77,0.44,22.97,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,apache-2.0,12,0,True,91a6d0fe6b9271000ca713ee9ab414c782ba4c50,True,True,2024-06-26,2024-05-20,False,False,allknowingroger/MultiMash-12B-slerp,1
331
  🀝,MaziyarPanahi/Calme-4x7B-MoE-v0.2,20.06,42.94,0.43,31.4,0.51,6.72,0.07,3.91,0.28,12.54,0.43,22.86,0.31,🀝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,True,apache-2.0,24,2,True,ffef41baf94b3f88b30cf0aeb3fd72d9e4187161,True,False,2024-08-05,2024-03-17,False,False,MaziyarPanahi/Calme-4x7B-MoE-v0.2,0
332
  🀝,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.5-gamma-0.01 (Merge),20.06,34.54,0.35,29.32,0.5,1.06,0.01,6.26,0.3,21.06,0.49,28.13,0.35,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,a31f86b538ba8b2983620cc27a741bc9a81a7e2f,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.5-gamma-0.01,1
 
449
  πŸ”Ά,mistralai/Mistral-7B-v0.3,17.1,39.46,0.39,24.12,0.46,3.32,0.03,2.8,0.27,10.28,0.41,22.6,0.3,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,2,True,404de3b56564dbd43cd64d97f8574b43189462f3,True,True,2024-07-20,2024-07-09,True,False,migtissera/Tess-3-7B-SFT,1
450
  πŸ”Ά,fblgit/una-cybertron-7b-v2-bf16,17.09,47.37,0.47,14.97,0.4,3.32,0.03,6.38,0.3,14.48,0.45,16.03,0.24,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,116,True,7ab101a153740aec39e95ec02831c56f4eab7910,True,True,2024-06-30,2023-12-02,True,False,fblgit/una-cybertron-7b-v2-bf16,0
451
  πŸ”Ά,zhengr/MixTAO-7Bx2-MoE-v8.1,17.06,41.88,0.42,19.18,0.42,5.97,0.06,6.49,0.3,8.3,0.4,20.52,0.28,πŸ”Ά fine-tuned on domain-specific datasets,MixtralForCausalLM,Original,bfloat16,True,apache-2.0,12,52,True,828e963abf2db0f5af9ed0d4034e538fc1cf5f40,True,False,2024-06-27,2024-02-26,True,False,zhengr/MixTAO-7Bx2-MoE-v8.1,0
452
+ πŸ”Ά,google/gemma-2-2b,17.05,56.68,0.57,17.98,0.42,0.08,0.0,3.24,0.27,7.08,0.39,17.22,0.25,πŸ”Ά fine-tuned on domain-specific datasets,InternLM2ForCausalLM,Original,bfloat16,True,gemma,2,488,True,2b6ac3ff954ad896c115bbfa1b571cd93ea2c20f,True,True,2024-07-31,2024-07-16,True,True,google/gemma-2-2b-it,1
453
  πŸ”Ά,Salesforce/LLaMA-3-8B-SFR-Iterative-DPO-R,17.03,38.16,0.38,29.15,0.5,0.15,0.0,5.03,0.29,5.55,0.36,24.14,0.32,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,73,True,ad7d1aed82eb6d8ca4b3aad627ff76f72ab34f70,True,True,2024-07-02,2024-05-09,True,True,Salesforce/LLaMA-3-8B-SFR-Iterative-DPO-R,0
454
  πŸ’¬,UCLA-AGI/Mistral7B-PairRM-SPPO-Iter2,17.0,44.46,0.44,22.48,0.45,1.51,0.02,5.15,0.29,9.8,0.41,18.63,0.27,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,8201064df67b5762ff9f361ff1b98aae3747855c,True,True,2024-08-07,2024-05-04,True,False,UCLA-AGI/Mistral7B-PairRM-SPPO-Iter2,0
455
  πŸ’¬,meta-llama/Meta-Llama-3-8B,16.89,44.97,0.45,24.31,0.46,2.57,0.03,2.01,0.27,3.74,0.34,23.71,0.31,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,3,True,7e420ddd6ff48bf213dcab2a9ddb7845b80dd1aa,True,True,2024-08-06,2024-07-15,True,False,Magpie-Align/Llama-3-8B-Magpie-Align-v0.3,2
 
500
  🟒,mistralai/Mistral-Nemo-Base-2407,15.08,16.3,0.16,29.37,0.5,4.98,0.05,5.82,0.29,6.52,0.39,27.46,0.35,🟒 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,11,236,True,d2efb15544d5401f761235bef327babb850887d0,True,True,2024-07-19,2024-07-18,False,True,mistralai/Mistral-Nemo-Base-2407,0
501
  πŸ”Ά,Changgil/K2S3-14b-v0.2,15.07,32.43,0.32,24.28,0.46,4.53,0.05,4.14,0.28,6.8,0.39,18.26,0.26,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,14,0,True,b4f0e1eed2640df2b75847ff37e6ebb1be217b6c,True,True,2024-06-27,2024-06-17,False,False,Changgil/K2S3-14b-v0.2,0
502
  🟩,NousResearch/Yarn-Solar-10b-64k,15.06,19.89,0.2,28.4,0.49,2.27,0.02,6.94,0.3,9.01,0.4,23.87,0.31,🟩 continuously pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,10,15,True,703818628a5e8ef637e48e8dbeb3662aa0497aff,True,True,2024-06-12,2024-01-17,False,True,NousResearch/Yarn-Solar-10b-64k,0
503
+ 🟒,tiiuae/falcon-mamba-7b,15.04,33.36,0.33,19.88,0.43,3.63,0.04,8.05,0.31,10.86,0.42,14.47,0.23,🟒 pretrained,FalconMambaForCausalLM,Original,bfloat16,True,other,7,175,True,5337fd73f19847e111ba2291f3f0e1617b90c37d,True,True,2024-07-23,2024-07-17,False,True,tiiuae/falcon-mamba-7b,0
504
  πŸ”Ά,pankajmathur/orca_mini_v3_13b,15.0,28.97,0.29,25.55,0.47,1.89,0.02,2.01,0.27,17.11,0.46,14.5,0.23,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,13,32,True,7d6e567d24ce2f228beaf54e89c17b0e750bfe99,True,True,2024-06-26,2023-08-09,False,False,pankajmathur/orca_mini_v3_13b,0
505
  🟒,Deci/DeciLM-7B,14.95,28.13,0.28,21.25,0.44,2.42,0.02,6.04,0.3,13.05,0.44,18.8,0.27,🟒 pretrained,DeciLMForCausalLM,Original,bfloat16,True,apache-2.0,7,222,True,c3c9f4226801dc0433f32aebffe0aac68ee2f051,True,True,2024-06-12,2023-12-10,False,True,Deci/DeciLM-7B,0
506
  πŸ’¬,meta-llama/Meta-Llama-3-8B,14.87,36.53,0.37,21.95,0.44,3.85,0.04,3.91,0.28,4.01,0.36,18.95,0.27,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,other,8,53,True,7f200e4c84ad0daa3ff6bc414012d8d0bacbf90e,True,True,2024-06-12,2024-04-18,True,True,mlabonne/OrpoLlama-3-8B,1
 
522
  πŸ”Ά,microsoft/Orca-2-7b,14.22,21.83,0.22,22.43,0.45,0.83,0.01,1.45,0.26,24.09,0.5,14.65,0.23,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,7,213,True,60e31e6bdcf582ad103b807cb74b73ee1d2c4b17,True,True,2024-06-12,2023-11-14,False,True,microsoft/Orca-2-7b,0
523
  πŸ”Ά,TencentARC/Mistral_Pro_8B_v0.1,14.2,21.15,0.21,22.89,0.45,5.66,0.06,4.03,0.28,11.83,0.42,19.61,0.28,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,8,66,True,366f159fc5b314ba2a955209d2bca4600f84dac0,True,True,2024-06-12,2024-02-22,False,True,TencentARC/Mistral_Pro_8B_v0.1,0
524
  🟒,tklohj/WindyFloLLM (Merge),14.17,26.69,0.27,24.4,0.46,1.13,0.01,3.36,0.28,11.86,0.43,17.57,0.26,🟒 pretrained,LlamaForCausalLM,Original,float16,True,,13,0,False,21f4241ab3f091d1d309e9076a8d8e3f014908a8,True,True,2024-07-10,2024-06-30,False,False,tklohj/WindyFloLLM,1
 
525
  🟒,mistral-community/Mistral-7B-v0.2,14.15,22.66,0.23,23.95,0.45,2.64,0.03,5.59,0.29,8.36,0.4,21.7,0.3,🟒 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,230,True,2c3e624962b1a3f3fbf52e15969565caa7bc064a,True,True,2024-06-12,2024-03-23,False,True,mistral-community/Mistral-7B-v0.2,0
526
+ 🟒,mistralai/Mistral-7B-v0.3,14.15,22.66,0.23,23.95,0.45,2.64,0.03,5.59,0.29,8.36,0.4,21.7,0.3,🟒 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,349,True,b67d6a03ca097c5122fa65904fce0413500bf8c8,True,True,2024-06-12,2024-05-22,False,True,mistralai/Mistral-7B-v0.3,0
527
  🟒,awnr/Mistral-7B-v0.1-signtensors-7-over-16,14.15,22.94,0.23,21.04,0.43,3.25,0.03,7.16,0.3,7.93,0.4,22.56,0.3,🟒 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,0e1f2cb0a81c38fc6c567d9c007883ab62fae266,True,True,2024-07-29,2024-07-29,False,False,awnr/Mistral-7B-v0.1-signtensors-7-over-16,0
528
  πŸ”Ά,netcat420/MFANNv0.19,14.14,30.57,0.31,24.92,0.47,2.64,0.03,7.61,0.31,2.72,0.35,16.36,0.25,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama3.1,8,0,True,af26a25549b7ad291766c479bebda58f15fbff42,True,True,2024-07-27,2024-07-27,False,False,netcat420/MFANNv0.19,0
529
  🀝,johnsutor/Llama-3-8B-Instruct_dare_linear (Merge),14.12,21.45,0.21,19.61,0.43,0.0,0.0,6.15,0.3,21.81,0.5,15.72,0.24,🀝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,abb81fd8fdc2ad32f65befcb7ae369c9837cd563,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_dare_linear,1
 
536
  πŸ”Ά,xinchen9/llama3-b8-ft-dis,13.85,15.46,0.15,24.73,0.46,3.17,0.03,8.39,0.31,6.41,0.37,24.93,0.32,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,8,0,True,e4da730f28f79543262de37908943c35f8df81fe,True,True,2024-07-11,2024-06-28,False,False,xinchen9/llama3-b8-ft-dis,0
537
  πŸ”Ά,openchat/openchat_v3.2,13.81,29.81,0.3,20.32,0.43,1.13,0.01,2.68,0.27,13.1,0.43,15.8,0.24,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama2,13,42,True,acc7ce92558681e749678648189812f15c1465fe,True,True,2024-06-12,2023-07-30,False,True,openchat/openchat_v3.2,0
538
  πŸ’¬,yam-peleg/Hebrew-Gemma-11B-Instruct,13.81,30.21,0.3,16.86,0.4,5.06,0.05,3.47,0.28,9.97,0.41,17.27,0.26,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",GemmaForCausalLM,Original,float16,True,other,10,21,True,a40259d1efbcac4829ed44d3b589716f615ed362,True,True,2024-07-31,2024-03-06,True,False,yam-peleg/Hebrew-Gemma-11B-Instruct,0
539
+ 🟒,meta-llama/Meta-Llama-3.1-8B,13.78,12.7,0.13,25.29,0.47,4.61,0.05,6.15,0.3,8.98,0.38,24.95,0.32,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,678,True,e5c39e551424c763dbc3e58e32ef2999d33a6d8d,True,True,2024-07-23,2024-07-14,True,True,meta-llama/Meta-Llama-3.1-8B,0
540
  πŸ”Ά,meta-llama/Meta-Llama-3-8B,13.78,24.01,0.24,18.39,0.42,0.45,0.0,2.13,0.27,19.94,0.48,17.75,0.26,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,9,True,4e1c955228bdb4d69c1c4560e8d5872312a8f033,True,True,2024-06-27,2024-06-01,True,False,AI-Sweden-Models/Llama-3-8B-instruct,2
541
  🟒,tiiuae/falcon-11B,13.78,32.61,0.33,21.94,0.44,2.34,0.02,2.8,0.27,7.53,0.4,15.44,0.24,🟒 pretrained,FalconForCausalLM,Original,bfloat16,True,unknown,11,204,True,066e3bf4e2d9aaeefa129af0a6d39727d27816b3,True,True,2024-06-09,2024-05-09,False,True,tiiuae/falcon-11B,0
542
  🟒,awnr/Mistral-7B-v0.1-signtensors-3-over-8,13.73,23.94,0.24,20.44,0.43,2.79,0.03,7.16,0.3,5.79,0.38,22.24,0.3,🟒 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,fa368f705ace05da2fef25c030fe740cf1fef176,True,True,2024-07-29,2024-07-29,False,False,awnr/Mistral-7B-v0.1-signtensors-3-over-8,0
 
548
  🟒,google/flan-t5-xxl,13.49,22.0,0.22,30.12,0.51,0.0,0.0,2.68,0.27,11.19,0.42,14.92,0.23,🟒 pretrained,T5ForConditionalGeneration,Original,float16,True,apache-2.0,11,1161,True,ae7c9136adc7555eeccc78cdd960dfd60fb346ce,True,True,2024-08-12,2022-10-21,False,True,google/flan-t5-xxl,0
549
  πŸ”Ά,LeroyDyer/Mixtral_AI_CyberTron_Ultra,13.47,15.56,0.16,27.75,0.48,0.76,0.01,5.7,0.29,10.3,0.41,20.73,0.29,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,apache-2.0,7,4,True,50c69e539578ab5384eb018a60cc1268637becae,True,True,2024-07-12,2024-04-14,False,False,LeroyDyer/SpydazWeb_AI_CyberTron_Ultra_7b,1
550
  🟩,NousResearch/Yarn-Mistral-7b-64k,13.43,20.8,0.21,20.23,0.43,3.02,0.03,5.37,0.29,9.88,0.41,21.27,0.29,🟩 continuously pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,49,True,0273c624561fcecc8e8f4030492a9307aa60f945,True,True,2024-06-12,2023-10-31,False,True,NousResearch/Yarn-Mistral-7b-64k,0
551
+ 🟒,meta-llama/Meta-Llama-3-8B,13.41,14.55,0.15,24.5,0.46,3.25,0.03,7.38,0.31,6.24,0.36,24.55,0.32,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3,8,5570,True,62bd457b6fe961a42a631306577e622c83876cb6,True,True,2024-06-12,2024-04-17,False,True,meta-llama/Meta-Llama-3-8B,0
552
+ πŸ”Ά,Alibaba-NLP/gte-Qwen2-7B-instruct,13.34,22.55,0.23,21.93,0.45,3.47,0.03,0.0,0.24,6.32,0.36,25.79,0.33,πŸ”Ά fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,149,True,e26182b2122f4435e8b3ebecbf363990f409b45b,True,True,2024-08-05,2024-06-15,True,False,Alibaba-NLP/gte-Qwen2-7B-instruct,0
553
  πŸ”Ά,yam-peleg/Hebrew-Mistral-7B,13.23,23.28,0.23,20.18,0.43,4.53,0.05,3.91,0.28,7.67,0.4,19.78,0.28,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,59,True,3d32134b5959492fd7efbbf16395352594bc89f7,True,True,2024-07-11,2024-04-26,False,False,yam-peleg/Hebrew-Mistral-7B,0
554
  πŸ”Ά,kevin009/llamaRAGdrama,13.2,25.98,0.26,16.64,0.4,3.4,0.03,1.9,0.26,12.11,0.43,19.15,0.27,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,7,True,8c103ca8fa6dd9a8d3dab81b319408095e9a1ad8,True,True,2024-06-26,2024-02-04,True,False,kevin009/llamaRAGdrama,0
555
  🟩,NousResearch/Yarn-Mistral-7b-128k,13.16,19.34,0.19,20.63,0.43,2.49,0.02,6.49,0.3,8.95,0.41,21.03,0.29,🟩 continuously pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,569,True,d09f1f8ed437d61c1aff94c1beabee554843dcdd,True,True,2024-06-12,2023-10-31,False,True,NousResearch/Yarn-Mistral-7b-128k,0
 
559
  πŸ”Ά,openchat/openchat_v3.2_super,12.84,28.62,0.29,19.15,0.42,1.59,0.02,1.9,0.26,9.92,0.42,15.84,0.24,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama2,13,36,True,9479cc37d43234a57a33628637d1aca0293d745a,True,True,2024-06-12,2023-09-04,False,True,openchat/openchat_v3.2_super,0
560
  πŸ’¬,google/gemma-7b,12.83,38.68,0.39,11.88,0.36,1.59,0.02,4.59,0.28,12.53,0.43,7.72,0.17,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",GemmaForCausalLM,Original,bfloat16,True,gemma,8,1120,True,18329f019fb74ca4b24f97371785268543d687d2,True,True,2024-06-12,2024-02-13,True,True,google/gemma-7b-it,1
561
  πŸ’¬,meta-llama/Llama-2-70b-chat-hf,12.73,49.58,0.5,4.61,0.3,0.91,0.01,1.9,0.26,3.48,0.37,15.92,0.24,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,llama2,68,2129,True,e9149a12809580e8602995856f8098ce973d1080,True,True,2024-06-12,2023-07-14,True,True,meta-llama/Llama-2-70b-chat-hf,0
562
+ πŸ’¬,mistralai/Mistral-7B-v0.1,12.67,44.87,0.45,7.65,0.34,1.66,0.02,0.0,0.25,6.13,0.38,15.72,0.24,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1494,True,73068f3702d050a2fd5aa2ca1e612e5036429398,True,True,2024-06-27,2023-09-27,True,True,mistralai/Mistral-7B-Instruct-v0.1,1
563
  πŸ”Ά,Sao10K/L3-8B-Stheno-v3.3-32K,12.57,46.04,0.46,13.51,0.38,0.98,0.01,0.89,0.26,4.07,0.37,9.95,0.19,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,8,47,True,1a59d163e079c7e7f1542553d085853119960f0c,True,True,2024-06-26,2024-06-22,True,False,Sao10K/L3-8B-Stheno-v3.3-32K,0
564
  πŸ”Ά,netcat420/MFANN3bv0.18,12.55,22.06,0.22,23.07,0.45,1.89,0.02,1.01,0.26,10.6,0.4,16.67,0.25,πŸ”Ά fine-tuned on domain-specific datasets,PhiForCausalLM,Original,float16,True,mit,2,0,True,3e792e3413217b63ea9caa0e8b8595fbeb236a69,True,True,2024-07-25,2024-07-25,False,False,netcat420/MFANN3bv0.18,0
565
  πŸ”Ά,netcat420/MFANN3bv0.19,12.45,22.58,0.23,22.91,0.45,1.44,0.01,1.01,0.26,9.9,0.4,16.89,0.25,πŸ”Ά fine-tuned on domain-specific datasets,PhiForCausalLM,Original,float16,True,,2,0,False,073d42274686f5cb6ef6ff9f6ade24eab198e1f2,True,True,2024-08-08,2024-08-04,False,False,netcat420/MFANN3bv0.19,0
 
574
  🟒,awnr/Mistral-7B-v0.1-signtensors-5-over-16,12.16,21.18,0.21,17.54,0.41,2.19,0.02,4.14,0.28,6.14,0.37,21.75,0.3,🟒 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,5ea13b3d0723237889e1512bc70dae72f71884d1,True,True,2024-07-29,2024-07-29,False,False,awnr/Mistral-7B-v0.1-signtensors-5-over-16,0
575
  πŸ”Ά,NousResearch/Llama-2-13b-hf,12.12,26.68,0.27,18.21,0.42,0.83,0.01,3.02,0.27,8.53,0.4,15.44,0.24,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,mit,13,53,True,bcad6fff9f8591e091d2d57356a3f102197e8c5f,True,True,2024-06-12,2023-09-06,False,True,teknium/OpenHermes-13B,1
576
  πŸ’¬,internlm/internlm2_5-1_8b-chat,12.11,38.49,0.38,21.03,0.45,0.0,0.0,5.37,0.29,4.42,0.36,3.32,0.13,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",InternLM2ForCausalLM,Original,bfloat16,True,other,1,19,True,4426f00b854561fa60d555d2b628064b56bcb758,True,True,2024-08-07,2024-07-30,True,True,internlm/internlm2_5-1_8b-chat,0
577
+ πŸ’¬,unsloth/mistral-7b-v0.3-bnb-4bit,12.08,37.7,0.38,14.86,0.4,0.53,0.01,2.24,0.27,2.97,0.36,14.2,0.23,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,868d8a51e8deb6fd948eabe5bc296c53bcf41073,True,True,2024-09-02,2024-08-04,True,False,llmat/Mistral-v0.3-7B-ORPO,1
578
  πŸ’¬,unsloth/mistral-7b-v0.3-bnb-4bit,12.02,36.4,0.36,15.59,0.4,0.15,0.0,2.57,0.27,2.97,0.35,14.46,0.23,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,float16,True,apache-2.0,7,1,True,868d8a51e8deb6fd948eabe5bc296c53bcf41073,True,True,2024-08-06,2024-08-04,True,False,llmat/Mistral-v0.3-7B-ORPO,1
579
  πŸ”Ά,TencentARC/MetaMath-Mistral-Pro,12.01,21.19,0.21,22.37,0.44,4.61,0.05,2.57,0.27,4.99,0.35,16.35,0.25,πŸ”Ά fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,8,5,True,3835d38de15ed2a04c32aca879b782fc50e390bf,True,True,2024-06-12,2024-02-26,False,True,TencentARC/MetaMath-Mistral-Pro,0
580
  🟒,01-ai/Yi-6B-200K,11.9,8.43,0.08,20.15,0.43,1.21,0.01,4.25,0.28,16.84,0.46,20.49,0.28,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,6,173,True,4a74338e778a599f313e9fa8f5bc08c717604420,True,True,2024-06-12,2023-11-06,False,True,01-ai/Yi-6B-200K,0
 
621
  πŸ’¬,google/gemma-2-2b,9.71,8.96,0.09,17.37,0.41,4.15,0.04,4.59,0.28,10.91,0.42,12.28,0.21,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Gemma2ForCausalLM,Original,bfloat16,True,gemma,2,17,True,5c0854beb88a6711221771d1b13d51f733e6ca06,True,True,2024-08-25,2024-08-24,True,True,cognitivecomputations/dolphin-2.9.4-gemma2-2b,1
622
  πŸ”Ά,uukuguy/speechless-coder-ds-6.7b,9.64,25.05,0.25,15.9,0.4,1.66,0.02,1.9,0.26,5.34,0.38,7.99,0.17,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,6,5,True,c813a5268c6dfe267a720ad3b51773f1ab0feb59,True,True,2024-06-26,2023-12-30,False,False,uukuguy/speechless-coder-ds-6.7b,0
623
  πŸ”Ά,NousResearch/Llama-2-7b-hf,9.48,18.13,0.18,12.08,0.36,1.06,0.01,2.57,0.27,12.68,0.43,10.37,0.19,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,mit,7,13,True,9f55d6eb15f1edd52ee1fd863a220aa682e78a00,True,True,2024-06-12,2023-09-14,False,True,teknium/OpenHermes-7B,1
624
+ 🟒,google/flan-t5-large,9.42,22.01,0.22,17.51,0.42,0.0,0.0,0.11,0.25,9.01,0.41,7.88,0.17,🟒 pretrained,T5ForConditionalGeneration,Original,float16,True,apache-2.0,0,526,True,0613663d0d48ea86ba8cb3d7a44f0f65dc596a2a,True,True,2024-08-14,2022-10-21,False,True,google/flan-t5-large,0
625
+ 🟒,meta-llama/Llama-2-7b-chat-hf,9.4,39.65,0.4,4.49,0.31,0.68,0.01,0.56,0.25,3.48,0.37,7.52,0.17,🟒 pretrained,LlamaForCausalLM,Original,float16,True,llama2,6,3803,True,f5db02db724555f92da89c216ac04704f23d4590,True,True,2024-08-30,2023-07-13,True,True,meta-llama/Llama-2-7b-chat-hf,0
626
  πŸ”Ά,iRyanBell/ARC1-II,9.32,17.08,0.17,7.25,0.34,0.76,0.01,2.91,0.27,20.31,0.49,7.62,0.17,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,1,True,c81076b9bdaac0722b33e411a49b07a296e8fae8,True,True,2024-06-26,2024-06-12,False,False,iRyanBell/ARC1-II,0
627
+ πŸ”Ά,google/gemma-2-27b,9.3,24.07,0.24,15.31,0.39,0.0,0.0,4.03,0.28,1.6,0.35,10.79,0.2,πŸ”Ά fine-tuned on domain-specific datasets,Gemma2ForCausalLM,Original,bfloat16,True,gemma,27,10,True,27f15219df2000a16955c9403c3f38b5f3413b3d,True,True,2024-08-27,2024-08-13,True,False,AALF/gemma-2-27b-it-SimPO-37K,2
628
  πŸ”Ά,NousResearch/Nous-Hermes-llama-2-7b,9.28,17.29,0.17,13.79,0.38,0.68,0.01,1.79,0.26,11.68,0.43,10.44,0.19,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,mit,6,68,True,b7c3ec54b754175e006ef75696a2ba3802697078,True,True,2024-06-12,2023-07-25,False,True,NousResearch/Nous-Hermes-llama-2-7b,0
629
  πŸ’¬,stabilityai/stablelm-2-zephyr-1_6b,9.26,32.79,0.33,6.71,0.34,2.11,0.02,0.0,0.24,5.99,0.35,7.93,0.17,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",StableLmForCausalLM,Original,float16,True,other,1,176,True,2f275b1127d59fc31e4f7c7426d528768ada9ea4,True,True,2024-06-12,2024-01-19,True,True,stabilityai/stablelm-2-zephyr-1_6b,0
630
  πŸ”Ά,huggyllama/llama-13b,9.25,24.11,0.24,16.15,0.4,1.21,0.01,0.67,0.26,2.81,0.35,10.58,0.2,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,other,13,137,True,bf57045473f207bb1de1ed035ace226f4d9f9bba,True,True,2024-07-04,2023-04-03,False,False,huggyllama/llama-13b,0
631
  🟒,Qwen/Qwen1.5-1.8B,9.12,21.54,0.22,9.76,0.35,2.27,0.02,7.38,0.31,3.96,0.36,9.8,0.19,🟒 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,other,1,43,True,7846de7ed421727b318d6605a0bfab659da2c067,True,True,2024-06-13,2024-01-22,False,True,Qwen/Qwen1.5-1.8B,0
632
+ 🟒,ai21labs/Jamba-v0.1,9.1,20.26,0.2,10.72,0.36,0.98,0.01,2.46,0.27,3.71,0.36,16.45,0.25,🟒 pretrained,JambaForCausalLM,Original,bfloat16,True,apache-2.0,51,1169,True,ce13f3fe99555a2606d1892665bb67649032ff2d,True,False,2024-06-27,2024-03-28,False,True,ai21labs/Jamba-v0.1,0
633
  πŸ’¬,Qwen/Qwen1.5-1.8B-Chat,9.01,20.19,0.2,5.91,0.33,0.45,0.0,6.38,0.3,12.18,0.43,8.93,0.18,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,1,44,True,e482ee3f73c375a627a16fdf66fd0c8279743ca6,True,True,2024-06-12,2024-01-30,True,True,Qwen/Qwen1.5-1.8B-Chat,0
634
  🟒,CortexLM/btlm-7b-base-v0.2,8.84,14.83,0.15,16.19,0.4,1.06,0.01,0.45,0.25,5.54,0.38,15.0,0.23,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,mit,6,0,True,eda8b4298365a26c8981316e09427c237b11217f,True,True,2024-06-26,2024-06-13,False,False,CortexLM/btlm-7b-base-v0.2,0
635
  πŸ’¬,0-hero/Matter-0.2-7B-DPO,8.81,33.03,0.33,10.06,0.36,0.83,0.01,1.23,0.26,5.87,0.38,1.82,0.12,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,3,True,26a66f0d862e2024ce4ad0a09c37052ac36e8af6,True,True,2024-08-05,2024-04-13,True,False,0-hero/Matter-0.2-7B-DPO,0
 
663
  πŸ’¬,google/gemma-2b,7.17,24.78,0.25,7.95,0.34,1.21,0.01,1.57,0.26,4.13,0.37,3.4,0.13,"πŸ’¬ chat models (RLHF, DPO, IFT, ...)",GemmaForCausalLM,Original,bfloat16,True,other,2,27,True,bf6bfe30c31c18620767ad60d0bff89343804230,True,True,2024-07-06,2024-03-24,True,False,anakin87/gemma-2b-orpo,1
664
  🟩,NousResearch/Yarn-Llama-2-7b-64k,7.12,17.0,0.17,7.04,0.33,0.98,0.01,1.9,0.26,6.93,0.39,8.87,0.18,🟩 continuously pretrained,LlamaForCausalLM,Original,bfloat16,True,,7,23,True,08491431ac3b50add7443f5d4c02850801d877be,True,True,2024-06-13,2023-08-30,False,True,NousResearch/Yarn-Llama-2-7b-64k,0
665
  🟒,Qwen/Qwen2-0.5B,7.06,18.67,0.19,7.99,0.33,2.57,0.03,0.78,0.26,4.6,0.38,7.76,0.17,🟒 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,0,84,True,ff3a49fac17555b8dfc4db6709f480cc8f16a9fe,True,True,2024-06-09,2024-05-31,False,True,Qwen/Qwen2-0.5B,0
666
+ 🟒,microsoft/phi-1_5,7.06,20.33,0.2,7.47,0.34,1.13,0.01,2.35,0.27,3.39,0.34,7.68,0.17,🟒 pretrained,PhiForCausalLM,Original,float16,True,mit,1,1304,True,675aa382d814580b22651a30acb1a585d7c25963,True,True,2024-06-09,2023-09-10,False,True,microsoft/phi-1_5,0
667
  🟒,google/codegemma-1.1-2b,7.02,22.94,0.23,7.55,0.34,0.6,0.01,2.01,0.27,5.93,0.39,3.09,0.13,🟒 pretrained,GemmaForCausalLM,Original,bfloat16,True,gemma,2,17,True,9d69e500da236427eab5867552ffc87108964f4d,True,True,2024-08-12,2024-04-30,False,True,google/codegemma-1.1-2b,0
668
  🟒,google/recurrentgemma-2b,6.94,30.17,0.3,4.82,0.32,1.59,0.02,0.0,0.25,3.1,0.34,1.96,0.12,🟒 pretrained,RecurrentGemmaForCausalLM,Original,bfloat16,True,gemma,2,92,True,195f13c55b371fc721eda0662c00c64642c70e17,True,True,2024-06-13,2024-04-06,False,True,google/recurrentgemma-2b,0
669
  🟒,databricks/dolly-v1-6b,6.89,22.24,0.22,4.78,0.32,1.36,0.01,1.9,0.26,8.12,0.4,2.95,0.13,🟒 pretrained,GPTJForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,6,310,True,c9a85b3a322b402e20c839c702c725afe0cb454d,True,True,2024-06-12,2023-03-23,False,True,databricks/dolly-v1-6b,0
 
761
  πŸ”Ά,Replete-AI/Replete-LLM-Qwen2-7b (Merge),3.33,9.05,0.09,2.84,0.3,0.0,0.0,0.45,0.25,5.86,0.38,1.75,0.12,πŸ”Ά fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,float16,True,apache-2.0,7,11,True,e3569433b23fde853683ad61f342d2c1bd01d60a,True,True,2024-08-13,2024-08-09,True,False,Replete-AI/Replete-LLM-Qwen2-7b,1
762
  πŸ”Ά,pankajmathur/orca_mini_3b,3.07,7.42,0.07,4.69,0.32,0.53,0.01,0.0,0.25,4.2,0.33,1.61,0.11,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-sa-4.0,3,158,True,31e1a7bc3f7ea2f247b432d60036d975b8d590e9,True,True,2024-06-26,2023-06-22,False,False,pankajmathur/orca_mini_3b,0
763
  🟒,instruction-pretrain/InstructLM-500M,2.85,10.28,0.1,2.32,0.29,0.0,0.0,0.89,0.26,2.07,0.35,1.57,0.11,🟒 pretrained,MistralForCausalLM,Original,float16,True,apache-2.0,0,34,True,e9d33823c76303dfaff6a8397a8b70d0118ea350,True,True,2024-06-27,2024-06-18,False,False,instruction-pretrain/InstructLM-500M,0
764
+ πŸ”Ά,TinyLlama/TinyLlama-1.1B-Chat-v1.0,2.71,5.96,0.06,4.01,0.31,0.83,0.01,0.0,0.25,4.31,0.35,1.12,0.11,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,1,1047,True,fe8a4ea1ffedaf415f4da2f062534de366a451e6,True,True,2024-08-04,2023-12-30,False,True,TinyLlama/TinyLlama-1.1B-Chat-v1.0,0
765
  🟒,NucleusAI/nucleus-22B-token-500B,1.63,2.57,0.03,1.89,0.29,0.0,0.0,0.0,0.25,3.55,0.35,1.8,0.12,🟒 pretrained,LlamaForCausalLM,Original,bfloat16,True,mit,21,25,True,49bb1a47c0d32b4bfa6630a4eff04a857adcd4ca,True,True,2024-06-26,2023-10-06,False,False,NucleusAI/nucleus-22B-token-500B,0
766
  πŸ”Ά,pankajmathur/orca_mini_v6_8b,1.41,1.11,0.01,3.22,0.3,0.0,0.0,0.0,0.24,2.77,0.36,1.38,0.11,πŸ”Ά fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,1,True,e95dc8e4c6b6ca5957b657cc2d905683142eaf3e,True,True,2024-06-26,2024-06-02,True,False,pankajmathur/orca_mini_v6_8b,0