diff --git "a/llm-df.csv" "b/llm-df.csv" --- "a/llm-df.csv" +++ "b/llm-df.csv" @@ -1,7 +1,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,GPQA,GPQA Raw,MUSR,MUSR Raw,MMLU-PRO,MMLU-PRO Raw,Type,Architecture,Weight type,Precision,Not_Merged,Hub License,#Params (B),Hub ❤️,Available on the hub,Model sha,Flagged,MoE,Submission Date,Upload To Hub Date,Chat Template,Maintainer's Highlight,fullname,Generation,Base Model -💬,dfurman/CalmeRys-78B-Orpo-v0.1,50.78,81.63,0.82,61.92,0.73,37.92,0.38,20.02,0.4,36.37,0.59,66.8,0.7,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,mit,77,9,True,7988deb48419c3f56bb24c139c23e5c476ec03f8,True,True,2024-09-24,2024-09-24,True,False,dfurman/CalmeRys-78B-Orpo-v0.1,1,dfurman/CalmeRys-78B-Orpo-v0.1 (Merge) +💬,dfurman/CalmeRys-78B-Orpo-v0.1,50.78,81.63,0.82,61.92,0.73,37.92,0.38,20.02,0.4,36.37,0.59,66.8,0.7,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,mit,77,10,True,7988deb48419c3f56bb24c139c23e5c476ec03f8,True,True,2024-09-24,2024-09-24,True,False,dfurman/CalmeRys-78B-Orpo-v0.1,1,dfurman/CalmeRys-78B-Orpo-v0.1 (Merge) 💬,MaziyarPanahi/calme-2.4-rys-78b,50.26,80.11,0.8,62.16,0.73,37.69,0.38,20.36,0.4,34.57,0.58,66.69,0.7,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,mit,77,32,True,0a35e51ffa9efa644c11816a2d56434804177acb,True,True,2024-09-03,2024-08-07,True,False,MaziyarPanahi/calme-2.4-rys-78b,2,dnhkng/RYS-XLarge -🔶,rombodawg/Replete-LLM-V2.5-Qwen-72b_Duplicated,45.39,71.55,0.72,61.27,0.72,47.58,0.48,19.8,0.4,17.32,0.46,54.83,0.59,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,other,72,3,True,5260f182e7859e13d515c4cb3926ac85ad057504,True,True,2024-09-30,2024-09-30,False,False,rombodawg/Replete-LLM-V2.5-Qwen-72b_Duplicated,1,rombodawg/Replete-LLM-V2.5-Qwen-72b_Duplicated (Merge) +🔶,rombodawg/Rombos-LLM-V2.5-Qwen-72b,45.39,71.55,0.72,61.27,0.72,47.58,0.48,19.8,0.4,17.32,0.46,54.83,0.59,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,other,72,3,True,5260f182e7859e13d515c4cb3926ac85ad057504,True,True,2024-09-30,2024-09-30,False,False,rombodawg/Rombos-LLM-V2.5-Qwen-72b,1,rombodawg/Rombos-LLM-V2.5-Qwen-72b (Merge) 🔶,dnhkng/RYS-XLarge,44.75,79.96,0.8,58.77,0.71,38.97,0.39,17.9,0.38,23.72,0.5,49.2,0.54,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,mit,77,68,True,0f84dd9dde60f383e1e2821496befb4ce9a11ef6,True,True,2024-08-07,2024-07-24,False,False,dnhkng/RYS-XLarge,0,dnhkng/RYS-XLarge 💬,MaziyarPanahi/calme-2.1-rys-78b,44.14,81.36,0.81,59.47,0.71,36.4,0.36,19.24,0.39,19.0,0.47,49.38,0.54,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,mit,77,3,True,e746f5ddc0c9b31a2382d985a4ec87fa910847c7,True,True,2024-08-08,2024-08-06,True,False,MaziyarPanahi/calme-2.1-rys-78b,1,dnhkng/RYS-XLarge 💬,MaziyarPanahi/calme-2.3-rys-78b,44.02,80.66,0.81,59.57,0.71,36.56,0.37,20.58,0.4,17.0,0.45,49.73,0.55,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,mit,77,4,True,a8a4e55c2f7054d25c2f0ab3a3b3d806eb915180,True,True,2024-09-03,2024-08-06,True,False,MaziyarPanahi/calme-2.3-rys-78b,1,dnhkng/RYS-XLarge @@ -17,7 +17,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,VAGOsolutions/Llama-3.1-SauerkrautLM-70b-Instruct,42.24,86.56,0.87,57.24,0.7,29.91,0.3,12.19,0.34,19.39,0.47,48.17,0.53,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3.1,70,14,True,e8e74aa789243c25a3a8f7565780a402f5050bbb,True,True,2024-08-26,2024-07-29,True,False,VAGOsolutions/Llama-3.1-SauerkrautLM-70b-Instruct,0,VAGOsolutions/Llama-3.1-SauerkrautLM-70b-Instruct 💬,anthracite-org/magnum-v1-72b,42.21,76.06,0.76,57.65,0.7,35.27,0.35,18.79,0.39,15.62,0.45,49.85,0.55,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,72,159,True,f8f85021bace7e8250ed8559c5b78b8b34f0c4cc,True,True,2024-09-21,2024-06-17,True,False,anthracite-org/magnum-v1-72b,2,Qwen/Qwen2-72B 💬,alpindale/magnum-72b-v1,42.17,76.06,0.76,57.65,0.7,35.27,0.35,18.79,0.39,15.62,0.45,49.64,0.55,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,72,159,True,fef27e0f235ae8858b84b765db773a2a954110dd,True,True,2024-07-25,2024-06-17,True,False,alpindale/magnum-72b-v1,2,Qwen/Qwen2-72B -💬,meta-llama/Meta-Llama-3.1-70B-Instruct,41.74,86.69,0.87,55.93,0.69,28.02,0.28,14.21,0.36,17.69,0.46,47.88,0.53,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3.1,70,551,True,b9461463b511ed3c0762467538ea32cf7c9669f2,True,True,2024-08-15,2024-07-16,True,True,meta-llama/Meta-Llama-3.1-70B-Instruct,1,meta-llama/Meta-Llama-3.1-70B +💬,meta-llama/Meta-Llama-3.1-70B-Instruct,41.74,86.69,0.87,55.93,0.69,28.02,0.28,14.21,0.36,17.69,0.46,47.88,0.53,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3.1,70,552,True,b9461463b511ed3c0762467538ea32cf7c9669f2,True,True,2024-08-15,2024-07-16,True,True,meta-llama/Meta-Llama-3.1-70B-Instruct,1,meta-llama/Meta-Llama-3.1-70B 🔶,dnhkng/RYS-Llama3.1-Large,41.6,84.92,0.85,55.41,0.69,28.4,0.28,16.55,0.37,17.09,0.46,47.21,0.52,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,mit,81,1,True,52cc979de78155b33689efa48f52a8aab184bd86,True,True,2024-08-22,2024-08-11,True,False,dnhkng/RYS-Llama3.1-Large,0,dnhkng/RYS-Llama3.1-Large 💬,anthracite-org/magnum-v2-72b,41.15,75.6,0.76,57.85,0.7,31.65,0.32,18.12,0.39,14.18,0.44,49.51,0.55,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,72,27,True,c9c5826ef42b9fcc8a8e1079be574481cf0b6cc6,True,True,2024-09-05,2024-08-18,True,False,anthracite-org/magnum-v2-72b,2,Qwen/Qwen2-72B 💬,abacusai/Smaug-Qwen2-72B-Instruct,41.08,78.25,0.78,56.27,0.69,35.35,0.35,14.88,0.36,15.18,0.44,46.56,0.52,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,72,8,True,af015925946d0c60ef69f512c3b35f421cf8063d,True,True,2024-07-29,2024-06-26,True,True,abacusai/Smaug-Qwen2-72B-Instruct,0,abacusai/Smaug-Qwen2-72B-Instruct @@ -27,7 +27,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 💬,upstage/solar-pro-preview-instruct,39.61,84.16,0.84,54.82,0.68,20.09,0.2,16.11,0.37,15.01,0.44,47.48,0.53,"💬 chat models (RLHF, DPO, IFT, ...)",SolarForCausalLM,Original,bfloat16,True,mit,22,401,True,b4db141b5fb08b23f8bc323bc34e2cff3e9675f8,True,True,2024-09-11,2024-09-09,True,True,upstage/solar-pro-preview-instruct,0,upstage/solar-pro-preview-instruct 🔶,pankajmathur/orca_mini_v7_72b,39.06,59.3,0.59,55.06,0.68,26.44,0.26,18.01,0.39,24.21,0.51,51.35,0.56,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,72,11,True,447f11912cfa496e32e188a55214043a05760d3a,True,True,2024-06-26,2024-06-26,False,False,pankajmathur/orca_mini_v7_72b,0,pankajmathur/orca_mini_v7_72b 💬,MaziyarPanahi/calme-2.1-qwen2.5-72b,38.38,86.62,0.87,61.66,0.73,2.27,0.02,15.1,0.36,13.3,0.43,51.32,0.56,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,72,1,True,eb6c92dec932070ea872f39469ca5b9daf2d34e6,True,True,2024-09-26,2024-09-19,True,False,MaziyarPanahi/calme-2.1-qwen2.5-72b,1,Qwen/Qwen2.5-72B -💬,Qwen/Qwen2.5-72B-Instruct,38.35,86.5,0.87,61.78,0.73,1.28,0.01,17.45,0.38,11.81,0.42,51.3,0.56,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,72,291,True,a13fff9ad76700c7ecff2769f75943ba8395b4a7,True,True,2024-09-19,2024-09-16,True,True,Qwen/Qwen2.5-72B-Instruct,1,Qwen/Qwen2.5-72B +💬,Qwen/Qwen2.5-72B-Instruct,38.35,86.5,0.87,61.78,0.73,1.28,0.01,17.45,0.38,11.81,0.42,51.3,0.56,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,72,292,True,a13fff9ad76700c7ecff2769f75943ba8395b4a7,True,True,2024-09-19,2024-09-16,True,True,Qwen/Qwen2.5-72B-Instruct,1,Qwen/Qwen2.5-72B 🤝,gbueno86/Meta-LLama-3-Cat-Smaug-LLama-70b,38.27,80.72,0.81,51.51,0.67,26.81,0.27,10.29,0.33,15.0,0.44,45.28,0.51,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,llama3,70,1,True,2d73b7e1c7157df482555944d6a6b1362bc6c3c5,True,True,2024-06-27,2024-05-24,True,False,gbueno86/Meta-LLama-3-Cat-Smaug-LLama-70b,1,gbueno86/Meta-LLama-3-Cat-Smaug-LLama-70b (Merge) 💬,MaziyarPanahi/calme-2.2-qwen2.5-72b,38.01,84.77,0.85,61.8,0.73,3.63,0.04,14.54,0.36,12.02,0.42,51.31,0.56,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,72,2,True,c6c7fdf70d8bf81364108975eb8ba78eecac83d4,True,True,2024-09-26,2024-09-19,True,False,MaziyarPanahi/calme-2.2-qwen2.5-72b,1,Qwen/Qwen2.5-72B 💬,MaziyarPanahi/calme-2.2-llama3-70b,37.98,82.08,0.82,48.57,0.64,22.96,0.23,12.19,0.34,15.3,0.44,46.74,0.52,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,70,17,True,95366b974baedee4d95c1e841bc3d15e94753804,True,True,2024-06-26,2024-04-27,True,False,MaziyarPanahi/calme-2.2-llama3-70b,2,meta-llama/Meta-Llama-3-70B @@ -51,6 +51,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,failspy/llama-3-70B-Instruct-abliterated,35.79,80.23,0.8,48.94,0.65,23.72,0.24,5.26,0.29,10.53,0.41,46.06,0.51,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,70,85,True,53ae9dafe8b3d163e05d75387575f8e9f43253d0,True,True,2024-07-03,2024-05-07,True,False,failspy/llama-3-70B-Instruct-abliterated,0,failspy/llama-3-70B-Instruct-abliterated 💬,dnhkng/RYS-Llama-3-Large-Instruct,35.78,80.51,0.81,49.67,0.65,21.83,0.22,5.26,0.29,11.45,0.42,45.97,0.51,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,mit,73,1,True,01e3208aaf7bf6d2b09737960c701ec6628977fe,True,True,2024-08-07,2024-08-06,True,False,dnhkng/RYS-Llama-3-Large-Instruct,0,dnhkng/RYS-Llama-3-Large-Instruct 🔶,KSU-HW-SEC/Llama3-70b-SVA-FT-final,35.78,61.65,0.62,51.33,0.67,20.09,0.2,16.67,0.38,17.8,0.46,47.14,0.52,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,,70,0,False,391bbd94173b34975d1aa2c7356977a630253b75,True,True,2024-09-08,,False,False,KSU-HW-SEC/Llama3-70b-SVA-FT-final,0,Removed +💬,tanliboy/lambda-qwen2.5-32b-dpo-test,35.75,80.84,0.81,54.41,0.68,0.0,0.0,14.21,0.36,13.33,0.43,51.74,0.57,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,32,2,True,675b60d6e859455a6139e6e284bbe1844b8ddf46,True,True,2024-09-30,2024-09-22,True,False,tanliboy/lambda-qwen2.5-32b-dpo-test,2,Qwen/Qwen2.5-32B 🔶,KSU-HW-SEC/Llama3-70b-SVA-FT-500,35.61,61.05,0.61,51.89,0.67,19.34,0.19,17.45,0.38,16.99,0.45,46.97,0.52,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,,70,0,False,856a23f28aeada23d1135c86a37e05524307e8ed,True,True,2024-09-08,,False,False,KSU-HW-SEC/Llama3-70b-SVA-FT-500,0,Removed 🔶,cognitivecomputations/dolphin-2.9.2-qwen2-72b,35.42,63.44,0.63,47.7,0.63,18.66,0.19,16.0,0.37,17.04,0.45,49.68,0.55,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,other,72,57,True,e79582577c2bf2af304221af0e8308b7e7d46ca1,True,True,2024-09-19,2024-05-27,True,True,cognitivecomputations/dolphin-2.9.2-qwen2-72b,1,Qwen/Qwen2-72B 🔶,cloudyu/Llama-3-70Bx2-MOE,35.35,54.82,0.55,51.42,0.66,19.86,0.2,19.13,0.39,20.85,0.48,46.02,0.51,🔶 fine-tuned on domain-specific datasets,MixtralForCausalLM,Original,bfloat16,True,llama3,126,1,True,b8bd85e8db8e4ec352b93441c92e0ae1334bf5a7,True,False,2024-06-27,2024-05-20,False,False,cloudyu/Llama-3-70Bx2-MOE,0,cloudyu/Llama-3-70Bx2-MOE @@ -63,7 +64,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 💬,Qwen/Qwen2-Math-72B-Instruct,34.79,56.94,0.57,47.96,0.63,35.95,0.36,15.77,0.37,15.73,0.45,36.36,0.43,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,72,83,True,5c267882f3377bcfc35882f8609098a894eeeaa8,True,True,2024-08-19,2024-08-08,True,True,Qwen/Qwen2-Math-72B-Instruct,0,Qwen/Qwen2-Math-72B-Instruct 🔶,aaditya/Llama3-OpenBioLLM-70B,34.73,75.97,0.76,47.15,0.64,18.2,0.18,9.73,0.32,14.35,0.44,42.97,0.49,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,70,335,True,5f79deaf38bc5f662943d304d59cb30357e8e5bd,True,True,2024-08-30,2024-04-24,True,False,aaditya/Llama3-OpenBioLLM-70B,2,meta-llama/Meta-Llama-3-70B 💬,abacusai/Smaug-Llama-3-70B-Instruct-32K,34.72,77.61,0.78,49.07,0.65,21.22,0.21,6.15,0.3,12.43,0.42,41.83,0.48,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,70,20,True,33840982dc253968f32ef3a534ee0e025eb97482,True,True,2024-08-06,2024-06-11,True,True,abacusai/Smaug-Llama-3-70B-Instruct-32K,0,abacusai/Smaug-Llama-3-70B-Instruct-32K -🔶,Replete-AI/Replete-LLM-V2.5-Qwen-14b,34.52,58.4,0.58,49.39,0.65,15.63,0.16,16.22,0.37,18.83,0.47,48.62,0.54,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,,14,0,False,834ddb1712ae6d1b232b2d5b26be658d90d23e43,True,True,2024-09-29,,False,False,Replete-AI/Replete-LLM-V2.5-Qwen-14b,0,Removed +🔶,rombodawg/Rombos-LLM-V2.5-Qwen-14b,34.52,58.4,0.58,49.39,0.65,15.63,0.16,16.22,0.37,18.83,0.47,48.62,0.54,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,14,3,True,834ddb1712ae6d1b232b2d5b26be658d90d23e43,True,True,2024-09-29,2024-10-06,False,False,rombodawg/Rombos-LLM-V2.5-Qwen-14b,1,rombodawg/Rombos-LLM-V2.5-Qwen-14b (Merge) 🔶,BAAI/Infinity-Instruct-3M-0613-Llama3-70B,34.47,68.21,0.68,51.33,0.66,14.88,0.15,14.43,0.36,16.53,0.45,41.44,0.47,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,70,5,True,9fc53668064bdda22975ca72c5a287f8241c95b3,True,True,2024-06-28,2024-06-27,True,False,BAAI/Infinity-Instruct-3M-0613-Llama3-70B,0,BAAI/Infinity-Instruct-3M-0613-Llama3-70B 💬,dnhkng/RYS-Llama-3-Huge-Instruct,34.37,76.86,0.77,49.07,0.65,21.22,0.21,1.45,0.26,11.93,0.42,45.66,0.51,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,mit,99,1,True,cfe14a5339e88a7a89f075d9d48215d45f64acaf,True,True,2024-08-07,2024-08-06,True,False,dnhkng/RYS-Llama-3-Huge-Instruct,0,dnhkng/RYS-Llama-3-Huge-Instruct 🔶,nisten/franqwenstein-35b,34.16,39.14,0.39,51.68,0.66,28.7,0.29,14.54,0.36,19.68,0.47,51.23,0.56,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,mit,34,4,True,901351a987d664a1cd7f483115a167d3ae5694ec,True,True,2024-10-03,2024-10-03,True,False,nisten/franqwenstein-35b,1,nisten/franqwenstein-35b (Merge) @@ -87,7 +88,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🤝,paloalma/TW3-JRGL-v2,32.12,53.16,0.53,45.61,0.61,15.86,0.16,14.54,0.36,20.7,0.49,42.87,0.49,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,72,0,True,aca3f0ba2bfb90038a9e2cd5b486821d4c181b46,True,True,2024-08-29,2024-04-01,False,False,paloalma/TW3-JRGL-v2,0,paloalma/TW3-JRGL-v2 💬,v000000/Qwen2.5-14B-Gutenberg-1e-Delta,32.11,80.45,0.8,48.62,0.64,0.0,0.0,10.51,0.33,9.38,0.41,43.67,0.49,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,14,4,True,f624854b4380e01322e752ce4daadd49ac86580f,True,True,2024-09-28,2024-09-20,True,False,v000000/Qwen2.5-14B-Gutenberg-1e-Delta,1,v000000/Qwen2.5-14B-Gutenberg-1e-Delta (Merge) 💬,internlm/internlm2_5-20b-chat,32.08,70.1,0.7,62.83,0.75,0.0,0.0,9.51,0.32,16.74,0.46,33.31,0.4,"💬 chat models (RLHF, DPO, IFT, ...)",InternLM2ForCausalLM,Original,bfloat16,True,other,19,82,True,ef17bde929761255fee76d95e2c25969ccd93b0d,True,True,2024-08-12,2024-07-30,True,True,internlm/internlm2_5-20b-chat,0,internlm/internlm2_5-20b-chat -💬,bosonai/Higgs-Llama-3-70B,31.95,55.61,0.56,45.9,0.63,15.79,0.16,15.55,0.37,15.52,0.45,43.35,0.49,"💬 chat models (RLHF, DPO, IFT, ...)",?,Adapter,bfloat16,True,other,70,209,True,b2c7540768046dfdae7a0cb846a7da6c41d826b1,True,True,2024-08-30,2024-06-05,True,False,bosonai/Higgs-Llama-3-70B,1,meta-llama/Meta-Llama-3-70B +💬,bosonai/Higgs-Llama-3-70B,31.95,55.61,0.56,45.9,0.63,15.79,0.16,15.55,0.37,15.52,0.45,43.35,0.49,"💬 chat models (RLHF, DPO, IFT, ...)",?,Adapter,bfloat16,True,other,70,208,True,b2c7540768046dfdae7a0cb846a7da6c41d826b1,True,True,2024-08-30,2024-06-05,True,False,bosonai/Higgs-Llama-3-70B,1,meta-llama/Meta-Llama-3-70B 💬,MTSAIR/MultiVerse_70B,31.73,52.49,0.52,46.14,0.62,16.16,0.16,13.87,0.35,18.82,0.47,42.89,0.49,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,other,72,38,True,063430cdc4d972a0884e3e3e3d45ea4afbdf71a2,True,True,2024-06-29,2024-03-25,False,False,MTSAIR/MultiVerse_70B,0,MTSAIR/MultiVerse_70B 🤝,paloalma/Le_Triomphant-ECE-TW3,31.66,54.02,0.54,44.96,0.61,17.45,0.17,13.2,0.35,18.5,0.47,41.81,0.48,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,72,3,True,f72399253bb3e65c0f55e50461488c098f658a49,True,True,2024-07-25,2024-04-01,False,False,paloalma/Le_Triomphant-ECE-TW3,0,paloalma/Le_Triomphant-ECE-TW3 🔶,failspy/Phi-3-medium-4k-instruct-abliterated-v3,31.55,63.19,0.63,46.73,0.63,14.12,0.14,8.95,0.32,18.52,0.46,37.78,0.44,🔶 fine-tuned on domain-specific datasets,Phi3ForCausalLM,Original,bfloat16,True,mit,13,21,True,959b09eacf6cae85a8eb21b25e998addc89a367b,True,True,2024-07-29,2024-05-22,True,False,failspy/Phi-3-medium-4k-instruct-abliterated-v3,0,failspy/Phi-3-medium-4k-instruct-abliterated-v3 @@ -99,7 +100,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🤝,CombinHorizon/YiSM-blossom5.1-34B-SLERP,31.09,50.33,0.5,46.4,0.62,19.79,0.2,14.09,0.36,14.37,0.44,41.56,0.47,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,34,0,True,ebd8d6507623008567a0548cd0ff9e28cbd6a656,True,True,2024-08-27,2024-08-27,True,False,CombinHorizon/YiSM-blossom5.1-34B-SLERP,1,CombinHorizon/YiSM-blossom5.1-34B-SLERP (Merge) 💬,OpenBuddy/openbuddy-qwen2.5llamaify-14b-v23.1-200k,30.92,63.09,0.63,43.28,0.6,15.71,0.16,11.07,0.33,11.54,0.42,40.82,0.47,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,14,0,True,001e14063e2702a9b2284dc6ec889d2586dc839b,True,True,2024-09-23,2024-09-23,True,False,OpenBuddy/openbuddy-qwen2.5llamaify-14b-v23.1-200k,0,OpenBuddy/openbuddy-qwen2.5llamaify-14b-v23.1-200k 💬,CohereForAI/c4ai-command-r-plus,30.86,76.64,0.77,39.92,0.58,7.55,0.08,7.38,0.31,20.42,0.48,33.24,0.4,"💬 chat models (RLHF, DPO, IFT, ...)",CohereForCausalLM,Original,float16,True,cc-by-nc-4.0,103,1664,True,fa1bd7fb1572ceb861bbbbecfa8af83b29fa8cca,True,True,2024-06-13,2024-04-03,True,True,CohereForAI/c4ai-command-r-plus,0,CohereForAI/c4ai-command-r-plus -🔶,Replete-AI/Replete-LLM-V2.5-Qwen-7b,30.8,62.37,0.62,36.37,0.55,26.44,0.26,9.06,0.32,12.0,0.43,38.54,0.45,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,,7,0,False,dbd819e8f765181f774cb5b79812d081669eb302,True,True,2024-09-29,,False,False,Replete-AI/Replete-LLM-V2.5-Qwen-7b,0,Removed +🔶,rombodawg/Rombos-LLM-V2.5-Qwen-7b,30.8,62.37,0.62,36.37,0.55,26.44,0.26,9.06,0.32,12.0,0.43,38.54,0.45,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,2,True,dbd819e8f765181f774cb5b79812d081669eb302,True,True,2024-09-29,2024-10-06,False,False,rombodawg/Rombos-LLM-V2.5-Qwen-7b,1,rombodawg/Rombos-LLM-V2.5-Qwen-7b (Merge) 💬,mattshumer/ref_70_e3,30.74,62.94,0.63,49.27,0.65,0.0,0.0,11.41,0.34,13.0,0.43,47.81,0.53,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,llama3.1,70,50,True,5d2d9dbb9e0bf61879255f63f1b787296fe524cc,True,True,2024-09-08,2024-09-08,True,False,mattshumer/ref_70_e3,2,meta-llama/Meta-Llama-3.1-70B 🔶,mmnga/Llama-3-70B-japanese-suzume-vector-v0.1,30.54,46.49,0.46,50.02,0.65,24.24,0.24,4.81,0.29,10.76,0.41,46.94,0.52,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,70,4,True,16f98b2d45684af2c4a9ff5da75b00ef13cca808,True,True,2024-09-19,2024-04-28,True,False,mmnga/Llama-3-70B-japanese-suzume-vector-v0.1,0,mmnga/Llama-3-70B-japanese-suzume-vector-v0.1 💬,internlm/internlm2_5-7b-chat,30.46,61.4,0.61,57.67,0.71,8.31,0.08,10.63,0.33,14.35,0.44,30.42,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",InternLM2ForCausalLM,Original,float16,True,other,7,163,True,bebb00121ee105b823647c3ba2b1e152652edc33,True,True,2024-07-03,2024-06-27,True,True,internlm/internlm2_5-7b-chat,0,internlm/internlm2_5-7b-chat @@ -124,7 +125,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 💬,microsoft/Phi-3-small-8k-instruct,29.64,64.97,0.65,46.21,0.62,2.64,0.03,8.28,0.31,16.77,0.46,38.96,0.45,"💬 chat models (RLHF, DPO, IFT, ...)",Phi3SmallForCausalLM,Original,bfloat16,True,mit,7,156,True,1535ae26fb4faada95c6950e8bc6e867cdad6b00,True,True,2024-06-13,2024-05-07,True,True,microsoft/Phi-3-small-8k-instruct,0,microsoft/Phi-3-small-8k-instruct 💬,Qwen/Qwen2-57B-A14B-Instruct,29.6,63.38,0.63,41.79,0.59,7.7,0.08,10.85,0.33,14.18,0.44,39.73,0.46,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2MoeForCausalLM,Original,bfloat16,True,apache-2.0,57,75,True,5ea455a449e61a92a5b194ee06be807647d3e8b5,True,True,2024-08-14,2024-06-04,True,True,Qwen/Qwen2-57B-A14B-Instruct,1,Qwen/Qwen2-57B-A14B 🟢,Qwen/Qwen1.5-110B,29.56,34.22,0.34,44.28,0.61,23.04,0.23,13.65,0.35,13.71,0.44,48.45,0.54,🟢 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,other,111,91,True,16659038ecdcc771c1293cf47020fa7cc2750ee8,True,True,2024-06-13,2024-04-25,False,True,Qwen/Qwen1.5-110B,0,Qwen/Qwen1.5-110B -🔶,mistralai/Mistral-Small-Instruct-2409,29.54,62.83,0.63,40.56,0.58,18.13,0.18,11.07,0.33,10.23,0.41,34.43,0.41,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,other,22,297,True,63e53df6575e7085d62113f4383835ff979b3795,True,True,2024-09-25,2024-09-17,False,True,mistralai/Mistral-Small-Instruct-2409,0,mistralai/Mistral-Small-Instruct-2409 +🔶,mistralai/Mistral-Small-Instruct-2409,29.54,62.83,0.63,40.56,0.58,18.13,0.18,11.07,0.33,10.23,0.41,34.43,0.41,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,other,22,298,True,63e53df6575e7085d62113f4383835ff979b3795,True,True,2024-09-25,2024-09-17,False,True,mistralai/Mistral-Small-Instruct-2409,0,mistralai/Mistral-Small-Instruct-2409 💬,recoilme/recoilme-gemma-2-9B-v0.1,29.53,75.15,0.75,42.32,0.6,1.21,0.01,11.86,0.34,11.53,0.42,35.1,0.42,"💬 chat models (RLHF, DPO, IFT, ...)",Gemma2ForCausalLM,Original,float16,True,,10,0,False,6dc0997046db4e9932f87d338ecdc2a4158abbda,True,True,2024-09-18,,True,False,recoilme/recoilme-gemma-2-9B-v0.1,0,Removed 🔶,Qwen/Qwen2.5-Math-72B-Instruct,29.51,40.03,0.4,48.97,0.65,18.5,0.19,10.85,0.33,16.34,0.45,42.36,0.48,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,other,72,12,True,3743c8fd46b002d105c1d28d180f1e531df1d40f,True,True,2024-09-29,2024-09-16,True,True,Qwen/Qwen2.5-Math-72B-Instruct,2,Qwen/Qwen2.5-72B 🔶,nbeerbower/Mistral-Small-Drummer-22B,29.45,63.31,0.63,40.12,0.58,16.69,0.17,12.42,0.34,9.8,0.41,34.39,0.41,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,other,22,6,True,53b21ece0c64ffc8aba81f294ad19e2c06e9852c,True,True,2024-10-01,2024-09-26,False,False,nbeerbower/Mistral-Small-Drummer-22B,1,nbeerbower/Mistral-Small-Drummer-22B (Merge) @@ -143,7 +144,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🤝,Dampfinchen/Llama-3.1-8B-Ultra-Instruct,28.98,80.81,0.81,32.49,0.53,14.95,0.15,5.59,0.29,8.61,0.4,31.4,0.38,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,llama3,8,6,True,46662d14130cfd34f7d90816540794f24a301f86,True,True,2024-08-26,2024-08-26,True,False,Dampfinchen/Llama-3.1-8B-Ultra-Instruct,1,Dampfinchen/Llama-3.1-8B-Ultra-Instruct (Merge) 💬,01-ai/Yi-1.5-34B-Chat-16K,28.98,45.64,0.46,44.54,0.61,18.81,0.19,11.74,0.34,13.74,0.44,39.38,0.45,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,34,26,True,ff74452e11f0f749ab872dc19b1dd3813c25c4d8,True,True,2024-07-15,2024-05-15,True,True,01-ai/Yi-1.5-34B-Chat-16K,0,01-ai/Yi-1.5-34B-Chat-16K 💬,anthracite-org/magnum-v3-27b-kto,28.9,56.75,0.57,41.16,0.59,15.48,0.15,14.09,0.36,9.92,0.39,35.98,0.42,"💬 chat models (RLHF, DPO, IFT, ...)",Gemma2ForCausalLM,Original,bfloat16,True,gemma,27,7,True,96fbb750b3150e5fe9d6d2fcf757f49310d99a43,True,True,2024-09-15,2024-09-06,True,False,anthracite-org/magnum-v3-27b-kto,1,anthracite-org/magnum-v3-27b-kto (Merge) -💬,google/gemma-2-9b-it,28.86,74.36,0.74,42.14,0.6,0.23,0.0,14.77,0.36,9.74,0.41,31.95,0.39,"💬 chat models (RLHF, DPO, IFT, ...)",Gemma2ForCausalLM,Original,bfloat16,True,gemma,9,483,True,1937c70277fcc5f7fb0fc772fc5bc69378996e71,True,True,2024-07-11,2024-06-24,True,True,google/gemma-2-9b-it,1,google/gemma-2-9b +💬,google/gemma-2-9b-it,28.86,74.36,0.74,42.14,0.6,0.23,0.0,14.77,0.36,9.74,0.41,31.95,0.39,"💬 chat models (RLHF, DPO, IFT, ...)",Gemma2ForCausalLM,Original,bfloat16,True,gemma,9,485,True,1937c70277fcc5f7fb0fc772fc5bc69378996e71,True,True,2024-07-11,2024-06-24,True,True,google/gemma-2-9b-it,1,google/gemma-2-9b 🔶,152334H/miqu-1-70b-sf,28.82,51.82,0.52,43.81,0.61,10.8,0.11,13.42,0.35,17.21,0.46,35.87,0.42,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,,68,218,False,1dca4cce36f01f2104ee2e6b97bac6ff7bb300c1,True,True,2024-06-26,2024-01-30,False,False,152334H/miqu-1-70b-sf,0,152334H/miqu-1-70b-sf 🤝,DreadPoor/Aurora_faustus-8B-LORABLATED,28.8,75.27,0.75,34.2,0.54,12.92,0.13,6.94,0.3,13.78,0.42,29.7,0.37,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,1,True,97746081f7c681dcf7fad10c57de9a341aa10db1,True,True,2024-09-29,2024-09-29,True,False,DreadPoor/Aurora_faustus-8B-LORABLATED,1,DreadPoor/Aurora_faustus-8B-LORABLATED (Merge) 🤝,DreadPoor/Promissum_Mane-8B-LINEAR,28.78,71.5,0.72,35.25,0.55,13.67,0.14,7.27,0.3,13.34,0.42,31.67,0.39,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,True,,8,1,False,ff399e7004040e1807e8d08b4d0967206fc50afa,True,True,2024-09-30,2024-09-30,True,False,DreadPoor/Promissum_Mane-8B-LINEAR,1,DreadPoor/Promissum_Mane-8B-LINEAR (Merge) @@ -156,6 +157,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,VAGOsolutions/Llama-3.1-SauerkrautLM-8b-Instruct,28.56,80.17,0.8,31.0,0.51,11.18,0.11,5.37,0.29,11.52,0.41,32.12,0.39,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,30,True,23ca79966a4ab0a61f7ccc7a0454ffef553b66eb,True,True,2024-07-29,2024-07-25,True,False,VAGOsolutions/Llama-3.1-SauerkrautLM-8b-Instruct,0,VAGOsolutions/Llama-3.1-SauerkrautLM-8b-Instruct 🤝,DeepAutoAI/Explore_Llama-3.1-8B-Inst,28.5,77.95,0.78,30.39,0.51,17.52,0.18,4.47,0.28,9.64,0.39,31.02,0.38,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,True,,8,0,False,9752180fafd8f584625eb649c0cba36b91bdc3ce,True,True,2024-09-21,2024-09-21,True,False,DeepAutoAI/Explore_Llama-3.1-8B-Inst,0,DeepAutoAI/Explore_Llama-3.1-8B-Inst 🔶,abhishek/autotrain-llama3-70b-orpo-v2,28.48,54.06,0.54,39.88,0.59,18.73,0.19,5.82,0.29,9.95,0.41,42.42,0.48,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,other,70,3,True,a2c16a8a7fa48792eb8a1f0c50e13309c2021a63,True,True,2024-08-21,2024-05-04,True,False,abhishek/autotrain-llama3-70b-orpo-v2,0,abhishek/autotrain-llama3-70b-orpo-v2 +🤝,zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.75,28.42,72.08,0.72,42.49,0.6,0.0,0.0,13.31,0.35,7.76,0.4,34.9,0.41,🤝 base merges and moerges,Gemma2ForCausalLM,Original,bfloat16,True,,10,0,False,eb0e589291630ba20328db650f74af949d217a97,True,True,2024-10-04,2024-10-04,True,False,zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.75,1,zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.75 (Merge) 💬,Azure99/blossom-v5.1-34b,28.39,56.97,0.57,44.15,0.61,14.43,0.14,7.94,0.31,7.3,0.39,39.53,0.46,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,34,5,True,2c803204f5dbf4ce37e2df98eb0205cdc53de10d,True,True,2024-07-27,2024-05-19,True,False,Azure99/blossom-v5.1-34b,0,Azure99/blossom-v5.1-34b 🟢,dnhkng/RYS-Phi-3-medium-4k-instruct,28.38,43.91,0.44,46.75,0.62,11.78,0.12,13.98,0.35,11.09,0.43,42.74,0.48,🟢 pretrained,Phi3ForCausalLM,Original,bfloat16,True,mit,17,1,True,1009e916b1ff8c9a53bc9d8ff48bea2a15ccde26,True,True,2024-08-07,2024-08-06,False,False,dnhkng/RYS-Phi-3-medium-4k-instruct,0,dnhkng/RYS-Phi-3-medium-4k-instruct 🤝,DreadPoor/Aspire-8B-model_stock,28.28,71.41,0.71,32.53,0.53,12.99,0.13,8.61,0.31,13.46,0.42,30.7,0.38,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,2,True,5c23cb2aff877d0b7bdcfa4de43d1bc8a1852de0,True,True,2024-09-17,2024-09-16,True,False,DreadPoor/Aspire-8B-model_stock,1,DreadPoor/Aspire-8B-model_stock (Merge) @@ -168,10 +170,10 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🤝,djuna/L3.1-ForStHS,28.0,78.13,0.78,31.39,0.52,12.92,0.13,5.48,0.29,9.66,0.4,30.39,0.37,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,True,,8,2,False,f5442e1f27e4a0c469504624ea85afdc6907c9cc,True,True,2024-09-15,2024-09-10,True,False,djuna/L3.1-ForStHS,1,djuna/L3.1-ForStHS (Merge) 🤝,OpenBuddy/openbuddy-zero-56b-v21.2-32k,27.99,50.57,0.51,44.8,0.61,12.99,0.13,9.06,0.32,12.78,0.43,37.77,0.44,🤝 base merges and moerges,LlamaForCausalLM,Original,float16,True,other,56,0,True,c7a1a4a6e798f75d1d3219ab9ff9f2692e29f7d5,True,True,2024-06-26,2024-06-10,True,False,OpenBuddy/openbuddy-zero-56b-v21.2-32k,0,OpenBuddy/openbuddy-zero-56b-v21.2-32k 🔶,Orenguteng/Llama-3.1-8B-Lexi-Uncensored-V2,27.93,77.92,0.78,29.69,0.51,16.92,0.17,4.36,0.28,7.77,0.38,30.9,0.38,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,53,True,2340f8fbcd2452125a798686ca90b882a08fb0d9,True,True,2024-08-28,2024-08-09,True,False,Orenguteng/Llama-3.1-8B-Lexi-Uncensored-V2,0,Orenguteng/Llama-3.1-8B-Lexi-Uncensored-V2 -💬,meta-llama/Meta-Llama-3.1-8B-Instruct,27.91,78.56,0.79,29.89,0.51,17.6,0.18,2.35,0.27,8.41,0.39,30.68,0.38,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,2714,True,df34336b42332c6d360959e259cd6271c6a09fd4,True,True,2024-08-15,2024-07-18,True,True,meta-llama/Meta-Llama-3.1-8B-Instruct,1,meta-llama/Meta-Llama-3.1-8B +💬,meta-llama/Meta-Llama-3.1-8B-Instruct,27.91,78.56,0.79,29.89,0.51,17.6,0.18,2.35,0.27,8.41,0.39,30.68,0.38,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,2718,True,df34336b42332c6d360959e259cd6271c6a09fd4,True,True,2024-08-15,2024-07-18,True,True,meta-llama/Meta-Llama-3.1-8B-Instruct,1,meta-llama/Meta-Llama-3.1-8B 💬,v000000/L3.1-Niitorm-8B-DPO-t0.0001,27.89,76.89,0.77,30.51,0.51,14.88,0.15,5.93,0.29,7.26,0.39,31.85,0.39,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,,8,4,False,a34150b5f63de4bc83d79b1de127faff3750289f,True,True,2024-09-19,2024-09-19,True,False,v000000/L3.1-Niitorm-8B-DPO-t0.0001,1,v000000/L3.1-Niitorm-8B-DPO-t0.0001 (Merge) 🔶,Isaak-Carter/Josiefied-Qwen2.5-7B-Instruct-abliterated-v2,27.82,78.41,0.78,33.29,0.53,0.0,0.0,6.49,0.3,13.96,0.44,34.76,0.41,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,4,True,5d07f58562422feb9f25c9c048e40356d2cf7e4b,True,True,2024-09-21,2024-09-20,True,False,Isaak-Carter/Josiefied-Qwen2.5-7B-Instruct-abliterated-v2,1,Qwen/Qwen2.5-7B -💬,vicgalle/Configurable-Llama-3.1-8B-Instruct,27.77,83.12,0.83,29.66,0.5,15.86,0.16,3.24,0.27,5.93,0.38,28.8,0.36,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,8,10,True,133b3ab1a5385ff9b3d17da2addfe3fc1fd6f733,True,True,2024-08-05,2024-07-24,True,False,vicgalle/Configurable-Llama-3.1-8B-Instruct,0,vicgalle/Configurable-Llama-3.1-8B-Instruct +💬,vicgalle/Configurable-Llama-3.1-8B-Instruct,27.77,83.12,0.83,29.66,0.5,15.86,0.16,3.24,0.27,5.93,0.38,28.8,0.36,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,8,11,True,133b3ab1a5385ff9b3d17da2addfe3fc1fd6f733,True,True,2024-08-05,2024-07-24,True,False,vicgalle/Configurable-Llama-3.1-8B-Instruct,0,vicgalle/Configurable-Llama-3.1-8B-Instruct 🔶,BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B,27.74,51.86,0.52,35.38,0.55,13.97,0.14,13.87,0.35,16.72,0.46,34.65,0.41,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,3,True,a42c86c61b98ca4fdf238d688fe6ea11cf414d29,True,True,2024-08-05,2024-07-09,True,False,BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B,0,BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B 🔶,cognitivecomputations/dolphin-2.9.1-yi-1.5-34b,27.73,38.53,0.39,44.17,0.61,15.18,0.15,12.42,0.34,16.97,0.46,39.1,0.45,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,34,34,True,1ec522298a6935c881df6dc29d3669833bd8672d,True,True,2024-07-27,2024-05-18,True,True,cognitivecomputations/dolphin-2.9.1-yi-1.5-34b,1,01-ai/Yi-1.5-34B 💬,01-ai/Yi-1.5-9B-Chat,27.71,60.46,0.6,36.95,0.56,11.63,0.12,11.3,0.33,12.84,0.43,33.06,0.4,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,131,True,bc87d8557c98dc1e5fdef6ec23ed31088c4d3f35,True,True,2024-06-12,2024-05-10,True,True,01-ai/Yi-1.5-9B-Chat,0,01-ai/Yi-1.5-9B-Chat @@ -183,7 +185,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🤝,DreadPoor/Heart_Stolen-ALT-8B-Model_Stock,27.53,71.84,0.72,32.35,0.53,13.6,0.14,6.82,0.3,9.75,0.41,30.8,0.38,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,2,True,03d1d70cb7eb5a743468b97c9c580028df487564,True,True,2024-09-11,2024-09-11,True,False,DreadPoor/Heart_Stolen-ALT-8B-Model_Stock,1,DreadPoor/Heart_Stolen-ALT-8B-Model_Stock (Merge) 🤝,Shreyash2010/Uma-4x4B-Instruct-v0.1,27.46,55.17,0.55,36.28,0.55,14.95,0.15,11.3,0.33,15.15,0.44,31.89,0.39,🤝 base merges and moerges,?,Adapter,bfloat16,True,,3,0,False,f78146bdd1632585b3520717885e0ca41ddbce69,True,True,2024-08-25,,True,False,Shreyash2010/Uma-4x4B-Instruct-v0.1,0,Removed 🤝,cloudyu/Mixtral_34Bx2_MoE_60B,27.42,45.38,0.45,41.21,0.59,6.57,0.07,11.74,0.34,17.78,0.46,41.85,0.48,🤝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,True,apache-2.0,60,110,True,d01642769ccc782e1db1fc26cb25097aecb98e23,True,False,2024-08-22,2024-01-05,False,False,cloudyu/Mixtral_34Bx2_MoE_60B,0,cloudyu/Mixtral_34Bx2_MoE_60B -💬,microsoft/Phi-3.5-mini-instruct,27.4,57.75,0.58,36.75,0.55,14.95,0.15,11.97,0.34,10.1,0.4,32.91,0.4,"💬 chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,bfloat16,True,mit,3,534,True,64963004ad95869fa73a30279371c8778509ac84,True,True,2024-08-21,2024-08-16,True,True,microsoft/Phi-3.5-mini-instruct,0,microsoft/Phi-3.5-mini-instruct +💬,microsoft/Phi-3.5-mini-instruct,27.4,57.75,0.58,36.75,0.55,14.95,0.15,11.97,0.34,10.1,0.4,32.91,0.4,"💬 chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,bfloat16,True,mit,3,535,True,64963004ad95869fa73a30279371c8778509ac84,True,True,2024-08-21,2024-08-16,True,True,microsoft/Phi-3.5-mini-instruct,0,microsoft/Phi-3.5-mini-instruct 💬,NAPS-ai/naps-llama-3_1-8b-instruct-v0.4,27.31,73.44,0.73,27.83,0.49,17.22,0.17,3.91,0.28,13.96,0.44,27.5,0.35,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,8,0,True,152229e8de5270aea7b9d7689503fb2577f8911a,True,True,2024-09-30,2024-09-12,True,False,NAPS-ai/naps-llama-3_1-8b-instruct-v0.4,1,NAPS-ai/naps-llama-3_1-8b-instruct-v0.4 (Merge) 🤝,DreadPoor/Trinas_Nectar-8B-model_stock,27.27,72.59,0.73,31.98,0.53,13.75,0.14,4.81,0.29,11.41,0.41,29.09,0.36,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,2,True,cb46b8431872557904d83fc5aa1b90dabeb74acc,True,True,2024-08-27,2024-08-16,True,False,DreadPoor/Trinas_Nectar-8B-model_stock,1,DreadPoor/Trinas_Nectar-8B-model_stock (Merge) 💬,microsoft/Phi-3-mini-4k-instruct,27.2,54.77,0.55,36.56,0.55,14.2,0.14,10.96,0.33,13.12,0.43,33.58,0.4,"💬 chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,float16,True,mit,3,1036,True,c1358f8a35e6d2af81890deffbbfa575b978c62f,True,True,2024-07-02,2024-04-22,True,True,microsoft/Phi-3-mini-4k-instruct,0,microsoft/Phi-3-mini-4k-instruct @@ -202,7 +204,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🤝,DreadPoor/ONeil-model_stock-8B,26.78,67.86,0.68,36.41,0.55,9.21,0.09,7.38,0.31,10.97,0.42,28.87,0.36,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,2,True,d4b84956211fd57b85122fe0c6f88b2cb9a9c86a,True,True,2024-07-15,2024-07-06,True,False,DreadPoor/ONeil-model_stock-8B,1,DreadPoor/ONeil-model_stock-8B (Merge) 🔶,MaziyarPanahi/Llama-3-8B-Instruct-v0.9,26.7,76.3,0.76,27.9,0.49,6.8,0.07,7.72,0.31,9.85,0.41,31.62,0.38,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,8,6,True,ddf91fdc0a3ab5e5d76864f1c4cf44e5adacd565,True,True,2024-08-06,2024-05-30,True,False,MaziyarPanahi/Llama-3-8B-Instruct-v0.9,3,meta-llama/Meta-Llama-3-8B-Instruct 🟢,Qwen/Qwen1.5-32B,26.69,32.97,0.33,38.98,0.57,26.66,0.27,10.63,0.33,12.04,0.43,38.89,0.45,🟢 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,other,32,81,True,cefef80dc06a65f89d1d71d0adbc56d335ca2490,True,True,2024-06-13,2024-04-01,False,True,Qwen/Qwen1.5-32B,0,Qwen/Qwen1.5-32B -💬,HumanLLMs/Humanish-Qwen2.5-7B-Instruct,26.67,72.84,0.73,34.48,0.54,0.0,0.0,6.49,0.3,8.42,0.4,37.76,0.44,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,0,True,7d2c71d926832d6e257ad2776011494dbac2d151,True,True,2024-10-05,2024-10-05,True,False,HumanLLMs/Humanish-Qwen2.5-7B-Instruct,2,Qwen/Qwen2.5-7B +💬,HumanLLMs/Humanish-Qwen2.5-7B-Instruct,26.67,72.84,0.73,34.48,0.54,0.0,0.0,6.49,0.3,8.42,0.4,37.76,0.44,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,7d2c71d926832d6e257ad2776011494dbac2d151,True,True,2024-10-05,2024-10-05,True,False,HumanLLMs/Humanish-Qwen2.5-7B-Instruct,2,Qwen/Qwen2.5-7B 🔶,MaziyarPanahi/Llama-3-8B-Instruct-v0.10,26.66,76.67,0.77,27.92,0.49,4.91,0.05,7.83,0.31,10.81,0.42,31.8,0.39,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,8,6,True,4411eb9f6f5e4c462a6bdbc64c26dcc123100b66,True,True,2024-06-26,2024-06-04,True,False,MaziyarPanahi/Llama-3-8B-Instruct-v0.10,4,meta-llama/Meta-Llama-3-8B-Instruct 🔶,huihui-ai/Qwen2.5-7B-Instruct-abliterated,26.65,75.46,0.75,32.89,0.53,0.0,0.0,8.72,0.32,7.48,0.4,35.33,0.42,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,c04c14c82962506e2b16f58f9f6b0a2e60a6afde,True,True,2024-09-24,2024-09-19,True,False,huihui-ai/Qwen2.5-7B-Instruct-abliterated,2,Qwen/Qwen2.5-7B 💬,OpenBuddy/openbuddy-qwen2.5llamaify-7b-v23.1-200k,26.62,56.73,0.57,36.4,0.55,11.4,0.11,8.61,0.31,13.81,0.44,32.75,0.39,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,7,0,True,91521abfec2a00f4853f6cb4dd620177617ca572,True,True,2024-10-06,2024-10-04,True,False,OpenBuddy/openbuddy-qwen2.5llamaify-7b-v23.1-200k,0,OpenBuddy/openbuddy-qwen2.5llamaify-7b-v23.1-200k @@ -216,8 +218,8 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 💬,Danielbrdz/Barcenas-Llama3-8b-ORPO,26.38,73.72,0.74,28.6,0.5,5.74,0.06,7.61,0.31,11.17,0.42,31.44,0.38,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,other,8,7,True,66c848c4526d3db1ec41468c0f73ac4448c6abe9,True,True,2024-06-29,2024-04-29,True,False,Danielbrdz/Barcenas-Llama3-8b-ORPO,0,Danielbrdz/Barcenas-Llama3-8b-ORPO 💬,byroneverson/Yi-1.5-9B-Chat-16K-abliterated,26.37,55.28,0.55,32.84,0.53,10.65,0.11,8.39,0.31,19.68,0.47,31.37,0.38,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,2,True,84a6eaa723633bbefc7cfac9c64bf0e0a4d39065,True,True,2024-09-03,2024-09-03,True,False,byroneverson/Yi-1.5-9B-Chat-16K-abliterated,1,01-ai/Yi-1.5-9B-Chat-16K 🟢,meta-llama/Meta-Llama-3-70B,26.37,16.03,0.16,48.71,0.65,16.54,0.17,19.69,0.4,16.01,0.45,41.21,0.47,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3,70,805,True,b4d08b7db49d488da3ac49adf25a6b9ac01ae338,True,True,2024-06-12,2024-04-17,False,True,meta-llama/Meta-Llama-3-70B,0,meta-llama/Meta-Llama-3-70B -🔶,flammenai/Mahou-1.5-mistral-nemo-12B,26.28,67.51,0.68,36.26,0.55,5.06,0.05,3.47,0.28,16.47,0.45,28.91,0.36,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,0,True,852561e74f1785bf7225bb28395db1fd9431fe31,True,True,2024-10-07,2024-10-06,True,False,flammenai/Mahou-1.5-mistral-nemo-12B,1,flammenai/Mahou-1.5-mistral-nemo-12B (Merge) -💬,mistralai/Mistral-Small-Instruct-2409,26.26,66.7,0.67,30.79,0.52,14.35,0.14,9.84,0.32,3.0,0.36,32.89,0.4,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,other,22,297,True,63e53df6575e7085d62113f4383835ff979b3795,True,True,2024-09-19,2024-09-17,True,True,mistralai/Mistral-Small-Instruct-2409,0,mistralai/Mistral-Small-Instruct-2409 +🔶,flammenai/Mahou-1.5-mistral-nemo-12B,26.28,67.51,0.68,36.26,0.55,5.06,0.05,3.47,0.28,16.47,0.45,28.91,0.36,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,2,True,852561e74f1785bf7225bb28395db1fd9431fe31,True,True,2024-10-07,2024-10-06,True,False,flammenai/Mahou-1.5-mistral-nemo-12B,1,flammenai/Mahou-1.5-mistral-nemo-12B (Merge) +💬,mistralai/Mistral-Small-Instruct-2409,26.26,66.7,0.67,30.79,0.52,14.35,0.14,9.84,0.32,3.0,0.36,32.89,0.4,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,other,22,298,True,63e53df6575e7085d62113f4383835ff979b3795,True,True,2024-09-19,2024-09-17,True,True,mistralai/Mistral-Small-Instruct-2409,0,mistralai/Mistral-Small-Instruct-2409 💬,Azure99/blossom-v5-32b,26.23,52.35,0.52,42.88,0.6,9.67,0.1,8.17,0.31,8.35,0.4,35.94,0.42,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,32,4,True,ccd4d86e3de01187043683dea1e28df904f7408e,True,True,2024-09-21,2024-04-29,True,False,Azure99/blossom-v5-32b,0,Azure99/blossom-v5-32b 🤝,xxx777xxxASD/L3.1-ClaudeMaid-4x8B,26.19,66.96,0.67,29.44,0.51,12.84,0.13,5.48,0.29,13.75,0.43,28.67,0.36,🤝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,True,llama3.1,24,7,True,2a98d9cb91c7aa775acbf5bfe7bb91beb2faf682,True,False,2024-07-28,2024-07-27,True,False,xxx777xxxASD/L3.1-ClaudeMaid-4x8B,0,xxx777xxxASD/L3.1-ClaudeMaid-4x8B 🤝,DreadPoor/Sellen-8B-model_stock,26.17,71.13,0.71,31.36,0.52,12.08,0.12,3.24,0.27,10.67,0.4,28.55,0.36,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,True,,8,0,False,accde7145d81a428c782695ea61eebc608efd980,True,True,2024-08-27,,True,False,DreadPoor/Sellen-8B-model_stock,0,Removed @@ -232,7 +234,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 💬,Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo,25.87,57.14,0.57,39.15,0.57,7.63,0.08,10.74,0.33,8.78,0.4,31.78,0.39,"💬 chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,bfloat16,True,apache-2.0,3,1,True,2896ef357be81fd433c17801d76ce148e60a7032,True,True,2024-06-26,2024-06-24,True,False,Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo,0,Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo 💬,MaziyarPanahi/Llama-3-70B-Instruct-v0.1,25.82,47.14,0.47,32.71,0.54,14.95,0.15,4.59,0.28,15.31,0.44,40.2,0.46,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,70,1,True,6db1cb4256525fc5429734ddc0eb941d08d0be30,True,True,2024-06-26,2024-05-14,True,False,MaziyarPanahi/Llama-3-70B-Instruct-v0.1,2,meta-llama/Meta-Llama-3-70B 🤝,Casual-Autopsy/L3-Umbral-Mind-RP-v2.0-8B,25.76,71.23,0.71,32.49,0.53,10.12,0.1,4.92,0.29,5.55,0.37,30.26,0.37,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,llama3,8,12,True,b46c066ea8387264858dc3461f382e7b42fd9c48,True,True,2024-07-02,2024-06-26,True,False,Casual-Autopsy/L3-Umbral-Mind-RP-v2.0-8B,1,Casual-Autopsy/L3-Umbral-Mind-RP-v2.0-8B (Merge) -🔶,Sao10K/L3-8B-Stheno-v3.2,25.76,68.73,0.69,32.02,0.52,8.53,0.09,8.05,0.31,6.45,0.38,30.76,0.38,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,8,220,True,4bb828f6e1b1efd648c39b1ad682c44ff260f018,True,True,2024-06-30,2024-06-05,True,False,Sao10K/L3-8B-Stheno-v3.2,0,Sao10K/L3-8B-Stheno-v3.2 +🔶,Sao10K/L3-8B-Stheno-v3.2,25.76,68.73,0.69,32.02,0.52,8.53,0.09,8.05,0.31,6.45,0.38,30.76,0.38,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,8,221,True,4bb828f6e1b1efd648c39b1ad682c44ff260f018,True,True,2024-06-30,2024-06-05,True,False,Sao10K/L3-8B-Stheno-v3.2,0,Sao10K/L3-8B-Stheno-v3.2 🤝,djuna/L3.1-Suze-Vume-calc,25.75,72.97,0.73,31.14,0.52,9.89,0.1,4.25,0.28,8.3,0.38,27.94,0.35,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,True,,8,1,False,830c07d136ecd8171805078606f00c4ee69f21c3,True,True,2024-09-04,2024-08-26,True,False,djuna/L3.1-Suze-Vume-calc,1,djuna/L3.1-Suze-Vume-calc (Merge) 🔶,Nitral-AI/Hathor_Stable-v0.2-L3-8B,25.7,71.75,0.72,32.83,0.53,9.21,0.09,4.92,0.29,5.56,0.38,29.96,0.37,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,8,54,True,1c9f391c3e349f8ba51b5696290ee6db6a2b63fd,True,True,2024-07-02,2024-06-09,True,False,Nitral-AI/Hathor_Stable-v0.2-L3-8B,0,Nitral-AI/Hathor_Stable-v0.2-L3-8B 💬,princeton-nlp/Llama-3-Instruct-8B-ORPO-v0.2,25.68,76.33,0.76,29.6,0.51,8.46,0.08,4.47,0.28,4.85,0.38,30.34,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,0,True,3ea5c542a3d8d61f6afb6cdbef5972a501ddf759,True,True,2024-09-28,2024-07-06,True,False,princeton-nlp/Llama-3-Instruct-8B-ORPO-v0.2,0,princeton-nlp/Llama-3-Instruct-8B-ORPO-v0.2 @@ -265,6 +267,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🤝,DreadPoor/Irina-8B-model_stock,25.16,67.99,0.68,32.09,0.52,9.06,0.09,4.59,0.28,8.64,0.4,28.6,0.36,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,True,,8,0,False,b282e3ab449d71a31f48b8c13eb43a4435968728,True,True,2024-08-30,,True,False,DreadPoor/Irina-8B-model_stock,0,Removed 🔶,Ba2han/Llama-Phi-3_DoRA,25.14,51.31,0.51,37.25,0.55,10.2,0.1,10.18,0.33,9.53,0.41,32.39,0.39,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,mit,3,5,True,36f99064a7be8ba475c2ee5c5424e95c263ccb87,True,True,2024-06-26,2024-05-15,True,False,Ba2han/Llama-Phi-3_DoRA,0,Ba2han/Llama-Phi-3_DoRA 🔶,byroneverson/Yi-1.5-9B-Chat-abliterated,25.14,57.23,0.57,34.35,0.54,9.82,0.1,5.59,0.29,13.66,0.44,30.17,0.37,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,2,True,4e26c200cdf2dc50dd50cdd9fe5b74887e9fa94a,True,True,2024-09-17,2024-09-04,True,False,byroneverson/Yi-1.5-9B-Chat-abliterated,1,01-ai/Yi-1.5-9B-Chat +🤝,lesubra/ECE-EIFFEL-3Bv3,25.14,37.86,0.38,36.46,0.55,14.5,0.15,10.63,0.33,18.31,0.47,33.06,0.4,🤝 base merges and moerges,Phi3ForCausalLM,Original,float16,False,apache-2.0,3,0,True,2cd31e58d38b96626a8a83192b5d2eec6669f5e2,True,True,2024-10-07,2024-10-07,False,False,lesubra/ECE-EIFFEL-3Bv3,0,lesubra/ECE-EIFFEL-3Bv3 🔶,lemon07r/Gemma-2-Ataraxy-Advanced-9B,25.07,55.16,0.55,41.16,0.59,0.15,0.0,11.41,0.34,6.51,0.38,36.04,0.42,🔶 fine-tuned on domain-specific datasets,Gemma2ForCausalLM,Original,bfloat16,True,,10,2,False,960654f5780f0b458367a6b591ad8440892c2aad,True,True,2024-09-30,2024-09-30,False,False,lemon07r/Gemma-2-Ataraxy-Advanced-9B,1,lemon07r/Gemma-2-Ataraxy-Advanced-9B (Merge) 🔶,Lyte/Llama-3.1-8B-Instruct-Reasoner-1o1_v0.3,25.05,70.98,0.71,27.84,0.49,14.8,0.15,2.68,0.27,4.9,0.35,29.09,0.36,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,8,6,True,35ab483f04afa763f36f978408f4f82e0379ee25,True,True,2024-09-17,2024-09-17,True,False,Lyte/Llama-3.1-8B-Instruct-Reasoner-1o1_v0.3,2,unsloth/Meta-Llama-3.1-8B 🟢,Qwen/Qwen2-57B-A14B,25.03,31.13,0.31,38.88,0.56,18.66,0.19,7.49,0.31,10.54,0.42,43.51,0.49,🟢 pretrained,Qwen2MoeForCausalLM,Original,bfloat16,True,apache-2.0,57,46,True,973e466c39ba76372a2ae464dbca0af3f5a5a2a9,True,False,2024-06-13,2024-05-22,False,True,Qwen/Qwen2-57B-A14B,0,Qwen/Qwen2-57B-A14B @@ -293,7 +296,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 💬,CohereForAI/aya-23-35B,24.62,64.62,0.65,34.86,0.54,2.64,0.03,5.93,0.29,13.47,0.43,26.18,0.34,"💬 chat models (RLHF, DPO, IFT, ...)",CohereForCausalLM,Original,float16,True,cc-by-nc-4.0,34,246,True,31d6fd858f20539a55401c7ad913086f54d9ca2c,True,True,2024-06-12,2024-05-19,True,True,CohereForAI/aya-23-35B,0,CohereForAI/aya-23-35B 🤝,vicgalle/Merge-Mixtral-Prometheus-8x7B,24.61,57.44,0.57,34.65,0.54,8.31,0.08,7.83,0.31,9.59,0.41,29.82,0.37,🤝 base merges and moerges,MixtralForCausalLM,Original,bfloat16,False,apache-2.0,46,2,True,ba53ee5b52a81e56b01e919c069a0d045cfd4e83,True,False,2024-06-26,2024-05-04,True,False,vicgalle/Merge-Mixtral-Prometheus-8x7B,1,vicgalle/Merge-Mixtral-Prometheus-8x7B (Merge) 🤝,Youlln/3PRYMMAL-PHI3-3B-SLERP,24.58,36.56,0.37,35.83,0.54,13.82,0.14,10.18,0.33,17.77,0.46,33.35,0.4,🤝 base merges and moerges,Phi3ForCausalLM,Original,bfloat16,False,apache-2.0,3,0,True,9396bcf1709ac8360a95a746482520fab4295706,True,True,2024-09-23,2024-09-23,False,False,Youlln/3PRYMMAL-PHI3-3B-SLERP,1,Youlln/3PRYMMAL-PHI3-3B-SLERP (Merge) -🔶,cognitivecomputations/dolphin-2.9.3-mistral-nemo-12b,24.58,56.01,0.56,36.08,0.55,5.06,0.05,8.72,0.32,15.21,0.44,26.41,0.34,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,72,True,7b535c900688fc836fbeebaeb7133910b09bafda,True,True,2024-07-26,2024-07-23,True,True,cognitivecomputations/dolphin-2.9.3-mistral-nemo-12b,1,mistralai/Mistral-Nemo-Base-2407 +🔶,cognitivecomputations/dolphin-2.9.3-mistral-nemo-12b,24.58,56.01,0.56,36.08,0.55,5.06,0.05,8.72,0.32,15.21,0.44,26.41,0.34,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,73,True,7b535c900688fc836fbeebaeb7133910b09bafda,True,True,2024-07-26,2024-07-23,True,True,cognitivecomputations/dolphin-2.9.3-mistral-nemo-12b,1,mistralai/Mistral-Nemo-Base-2407 💬,nbeerbower/mistral-nemo-wissenschaft-12B,24.58,65.2,0.65,29.57,0.5,6.57,0.07,5.7,0.29,12.29,0.42,28.14,0.35,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,3,True,2480f9924415c72fe00ae9391bb15a6d05c889eb,True,True,2024-08-30,2024-08-12,True,False,nbeerbower/mistral-nemo-wissenschaft-12B,1,nbeerbower/mistral-nemo-wissenschaft-12B (Merge) 🔶,ZeusLabs/L3-Aethora-15B-V2,24.57,72.08,0.72,28.97,0.5,7.33,0.07,5.03,0.29,6.25,0.39,27.78,0.35,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,cc-by-sa-4.0,15,39,True,2c601f116c37dd912c89357dbdbef879a637997e,True,True,2024-06-27,2024-06-27,True,False,ZeusLabs/L3-Aethora-15B-V2,1,ZeusLabs/L3-Aethora-15B-V2 (Merge) 💬,ycros/BagelMIsteryTour-v2-8x7B,24.55,62.62,0.63,31.37,0.51,7.7,0.08,7.72,0.31,10.32,0.41,27.56,0.35,"💬 chat models (RLHF, DPO, IFT, ...)",MixtralForCausalLM,Original,bfloat16,False,cc-by-nc-4.0,46,16,True,98a8b319707be3dab1659594da69a37ed8f8c148,True,True,2024-08-04,2024-01-19,True,False,ycros/BagelMIsteryTour-v2-8x7B,1,ycros/BagelMIsteryTour-v2-8x7B (Merge) @@ -309,7 +312,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,Nekochu/Luminia-8B-RP,24.3,55.74,0.56,31.8,0.52,11.71,0.12,6.26,0.3,11.07,0.4,29.24,0.36,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,8,0,True,619be17206729d86b898b9d1b3369a7135c1a9b9,True,True,2024-09-24,2024-09-13,False,False,Nekochu/Luminia-8B-RP,2,meta-llama/Meta-Llama-3.1-8B 🔶,ValiantLabs/Llama3.1-8B-ShiningValiant2,24.29,65.24,0.65,26.35,0.48,11.63,0.12,8.95,0.32,7.19,0.39,26.38,0.34,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,14,True,6b2b5694a192cb29ad0e4314138affa25b630c0e,True,True,2024-08-10,2024-08-06,True,False,ValiantLabs/Llama3.1-8B-ShiningValiant2,2,meta-llama/Meta-Llama-3.1-8B 🔶,VAGOsolutions/SauerkrautLM-Mixtral-8x7B-Instruct,24.29,56.02,0.56,33.95,0.53,8.61,0.09,6.38,0.3,11.32,0.42,29.45,0.37,🔶 fine-tuned on domain-specific datasets,MixtralForCausalLM,Original,bfloat16,True,apache-2.0,46,21,True,30ed549de7d84f68b4c6cb619f73275c99af23cc,True,False,2024-06-26,2023-12-15,True,False,VAGOsolutions/SauerkrautLM-Mixtral-8x7B-Instruct,0,VAGOsolutions/SauerkrautLM-Mixtral-8x7B-Instruct -💬,Vikhrmodels/Vikhr-Nemo-12B-Instruct-R-21-09-24,24.26,59.99,0.6,31.41,0.52,12.61,0.13,5.48,0.29,9.45,0.41,26.64,0.34,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,float16,True,apache-2.0,12,57,True,6abd887cb631f705042c9e8085615fe4d76e9779,True,True,2024-09-21,2024-09-20,True,False,Vikhrmodels/Vikhr-Nemo-12B-Instruct-R-21-09-24,1,Vikhrmodels/Vikhr-Nemo-12B-Instruct-R-21-09-24 (Merge) +💬,Vikhrmodels/Vikhr-Nemo-12B-Instruct-R-21-09-24,24.26,59.99,0.6,31.41,0.52,12.61,0.13,5.48,0.29,9.45,0.41,26.64,0.34,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,float16,True,apache-2.0,12,58,True,6abd887cb631f705042c9e8085615fe4d76e9779,True,True,2024-09-21,2024-09-20,True,False,Vikhrmodels/Vikhr-Nemo-12B-Instruct-R-21-09-24,1,Vikhrmodels/Vikhr-Nemo-12B-Instruct-R-21-09-24 (Merge) 🤝,UKzExecution/LlamaExecutor-8B-3.0.5,24.26,74.03,0.74,28.41,0.5,8.53,0.09,0.78,0.26,4.65,0.38,29.17,0.36,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,True,,8,0,False,2047978e8ab1146b8881cde3d998856594f437a4,True,True,2024-07-30,2024-07-29,True,False,UKzExecution/LlamaExecutor-8B-3.0.5,1,UKzExecution/LlamaExecutor-8B-3.0.5 (Merge) 🔶,ycros/BagelMIsteryTour-v2-8x7B,24.26,59.94,0.6,31.7,0.52,7.85,0.08,7.27,0.3,11.3,0.42,27.48,0.35,🔶 fine-tuned on domain-specific datasets,MixtralForCausalLM,Original,float16,False,cc-by-nc-4.0,46,16,True,98a8b319707be3dab1659594da69a37ed8f8c148,True,True,2024-06-28,2024-01-19,True,False,ycros/BagelMIsteryTour-v2-8x7B,1,ycros/BagelMIsteryTour-v2-8x7B (Merge) 🤝,djuna/MN-Chinofun,24.26,61.1,0.61,28.48,0.5,10.5,0.1,6.15,0.3,10.38,0.41,28.92,0.36,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,True,,12,3,False,71b47c86f32e107b407fada44ec6b893c5eb8bb0,True,True,2024-09-23,2024-09-16,True,False,djuna/MN-Chinofun,1,djuna/MN-Chinofun (Merge) @@ -322,7 +325,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,PranavHarshan/LaMistral-V4,24.12,62.39,0.62,31.09,0.52,6.34,0.06,10.4,0.33,5.64,0.36,28.87,0.36,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,1,True,b373c2a1ab08823b6b119899f807793c96ef7888,True,True,2024-10-05,2024-10-01,True,False,PranavHarshan/LaMistral-V4,1,PranavHarshan/LaMistral-V4 (Merge) 🔶,NeverSleep/Lumimaid-v0.2-8B,24.12,50.38,0.5,31.96,0.52,12.61,0.13,8.17,0.31,12.32,0.43,29.29,0.36,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,8,62,True,4563201f29ef18c62d16e9f6fffd3931a63ccb51,True,True,2024-08-09,2024-07-24,False,False,NeverSleep/Lumimaid-v0.2-8B,0,NeverSleep/Lumimaid-v0.2-8B 💬,OpenBuddy/openbuddy-llama3.1-8b-v22.2-131k,24.07,66.57,0.67,29.06,0.5,9.37,0.09,3.91,0.28,9.81,0.41,25.67,0.33,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,other,8,2,True,0d9d85c7a5e4292e07c346147de56bd3991d525c,True,True,2024-07-29,2024-07-28,True,False,OpenBuddy/openbuddy-llama3.1-8b-v22.2-131k,0,OpenBuddy/openbuddy-llama3.1-8b-v22.2-131k -🤝,Luni/StarDust-12b-v2,24.06,56.29,0.56,34.95,0.54,5.97,0.06,5.82,0.29,14.26,0.43,27.1,0.34,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,24,True,75bffd7b86f37c2cebc4fdf83fbc3ab33d6c6e05,True,True,2024-09-03,2024-09-01,True,False,Luni/StarDust-12b-v2,1,Luni/StarDust-12b-v2 (Merge) +🤝,Luni/StarDust-12b-v2,24.06,56.29,0.56,34.95,0.54,5.97,0.06,5.82,0.29,14.26,0.43,27.1,0.34,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,26,True,75bffd7b86f37c2cebc4fdf83fbc3ab33d6c6e05,True,True,2024-09-03,2024-09-01,True,False,Luni/StarDust-12b-v2,1,Luni/StarDust-12b-v2 (Merge) 🔶,nbeerbower/mistral-nemo-gutenberg-12B-v2,24.05,62.03,0.62,34.73,0.54,2.11,0.02,3.69,0.28,13.99,0.43,27.77,0.35,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,20,True,86bf9c105ff40835132e41699ac1a76ee0e5b683,True,True,2024-09-03,2024-08-13,True,False,nbeerbower/mistral-nemo-gutenberg-12B-v2,1,nbeerbower/mistral-nemo-gutenberg-12B-v2 (Merge) 💬,Jimmy19991222/llama-3-8b-instruct-gapo-v2-rouge2-beta10-1minus-gamma0.3-rerun,24.03,66.78,0.67,28.39,0.49,4.0,0.04,7.49,0.31,8.01,0.4,29.53,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,0,True,e9692d8dbe30273839763757aa9ef07a5fcf0c59,True,True,2024-09-15,2024-09-14,True,False,Jimmy19991222/llama-3-8b-instruct-gapo-v2-rouge2-beta10-1minus-gamma0.3-rerun,1,meta-llama/Meta-Llama-3-8B-Instruct 🤝,PJMixers/LLaMa-3-CursedStock-v2.0-8B,24.03,63.31,0.63,32.56,0.53,8.61,0.09,3.24,0.27,8.04,0.39,28.4,0.36,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,llama3,8,9,True,d47cc29df363f71ffaf6cd21ac4bdeefa27359db,True,True,2024-06-27,2024-06-26,True,False,PJMixers/LLaMa-3-CursedStock-v2.0-8B,1,PJMixers/LLaMa-3-CursedStock-v2.0-8B (Merge) @@ -332,11 +335,11 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🤝,recoilme/recoilme-gemma-2-9B-v0.4,23.96,25.62,0.26,42.44,0.6,7.4,0.07,12.08,0.34,18.39,0.47,37.84,0.44,🤝 base merges and moerges,Gemma2ForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,10,1,True,2691f2cc8d80072f15d78cb7ae72831e1a12139e,True,True,2024-09-19,2024-09-18,False,False,recoilme/recoilme-gemma-2-9B-v0.4,0,recoilme/recoilme-gemma-2-9B-v0.4 💬,princeton-nlp/Llama-3-Instruct-8B-RRHF,23.95,72.75,0.73,27.22,0.49,8.84,0.09,4.03,0.28,1.48,0.35,29.37,0.36,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,0,True,73561d9b0fd42b94250246f8d794251fe9f9d2e9,True,True,2024-10-07,2024-07-06,True,False,princeton-nlp/Llama-3-Instruct-8B-RRHF,0,princeton-nlp/Llama-3-Instruct-8B-RRHF 💬,vicgalle/Roleplay-Llama-3-8B,23.94,73.2,0.73,28.55,0.5,8.69,0.09,1.45,0.26,1.68,0.35,30.09,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,8,36,True,57297eb57dcc2c116f061d9dda341094203da01b,True,True,2024-06-26,2024-04-19,True,False,vicgalle/Roleplay-Llama-3-8B,0,vicgalle/Roleplay-Llama-3-8B -💬,meta-llama/Meta-Llama-3-8B-Instruct,23.91,74.08,0.74,28.24,0.5,8.69,0.09,1.23,0.26,1.6,0.36,29.6,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,3478,True,e1945c40cd546c78e41f1151f4db032b271faeaa,True,True,2024-06-12,2024-04-17,True,True,meta-llama/Meta-Llama-3-8B-Instruct,0,meta-llama/Meta-Llama-3-8B-Instruct -🔶,Delta-Vector/Baldur-8B,23.9,47.82,0.48,32.54,0.53,12.61,0.13,6.94,0.3,14.01,0.44,29.49,0.37,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,agpl-3.0,8,1,True,97f5d321a8346551a5ed704997dd1e93c59883f3,True,True,2024-10-06,2024-09-23,False,False,Delta-Vector/Baldur-8B,1,Delta-Vector/Baldur-8B (Merge) +💬,meta-llama/Meta-Llama-3-8B-Instruct,23.91,74.08,0.74,28.24,0.5,8.69,0.09,1.23,0.26,1.6,0.36,29.6,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,3480,True,e1945c40cd546c78e41f1151f4db032b271faeaa,True,True,2024-06-12,2024-04-17,True,True,meta-llama/Meta-Llama-3-8B-Instruct,0,meta-llama/Meta-Llama-3-8B-Instruct +🔶,Delta-Vector/Baldur-8B,23.9,47.82,0.48,32.54,0.53,12.61,0.13,6.94,0.3,14.01,0.44,29.49,0.37,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,agpl-3.0,8,2,True,97f5d321a8346551a5ed704997dd1e93c59883f3,True,True,2024-10-06,2024-09-23,False,False,Delta-Vector/Baldur-8B,1,Delta-Vector/Baldur-8B (Merge) 💬,01-ai/Yi-34B-Chat,23.9,46.99,0.47,37.62,0.56,4.31,0.04,11.74,0.34,8.36,0.4,34.37,0.41,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,34,342,True,2e528b6a80fb064a0a746c5ca43114b135e30464,True,True,2024-06-12,2023-11-22,True,True,01-ai/Yi-34B-Chat,0,01-ai/Yi-34B-Chat 💬,yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table,23.88,71.32,0.71,28.6,0.5,8.61,0.09,1.23,0.26,3.68,0.37,29.81,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,0,False,7a326a956e6169b287a04ef93cdc0342a0f3311a,True,True,2024-09-29,2024-09-29,True,False,yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table,0,yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table -💬,meta-llama/Llama-3.2-3B-Instruct,23.85,73.93,0.74,24.06,0.46,15.56,0.16,3.8,0.28,1.37,0.35,24.39,0.32,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3.2,3,272,True,276b29ce8303c9b88966a9b32fc75692dce4d8e1,True,True,2024-09-27,2024-09-18,True,True,meta-llama/Llama-3.2-3B-Instruct,0,meta-llama/Llama-3.2-3B-Instruct +💬,meta-llama/Llama-3.2-3B-Instruct,23.85,73.93,0.74,24.06,0.46,15.56,0.16,3.8,0.28,1.37,0.35,24.39,0.32,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3.2,3,275,True,276b29ce8303c9b88966a9b32fc75692dce4d8e1,True,True,2024-09-27,2024-09-18,True,True,meta-llama/Llama-3.2-3B-Instruct,0,meta-llama/Llama-3.2-3B-Instruct 💬,xkp24/Llama-3-8B-Instruct-SPPO-score-Iter2_bt_8b-table-0.002,23.84,71.32,0.71,28.57,0.5,6.34,0.06,1.12,0.26,6.07,0.39,29.6,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,0,False,e5d2f179b4a7bd851dcf2b7db6358b13001bf1af,True,True,2024-10-01,2024-09-30,True,False,xkp24/Llama-3-8B-Instruct-SPPO-score-Iter2_bt_8b-table-0.002,0,xkp24/Llama-3-8B-Instruct-SPPO-score-Iter2_bt_8b-table-0.002 🔶,Jimmy19991222/llama-3-8b-instruct-gapo-v2-rouge2-beta10-gamma0.3-lr1.0e-6-scale-log,23.81,66.05,0.66,28.08,0.49,4.15,0.04,7.16,0.3,7.81,0.4,29.6,0.37,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,0,True,9ff0ce408abb8dbcf7efb9b6533338f2c344a355,True,True,2024-09-22,2024-09-22,True,False,Jimmy19991222/llama-3-8b-instruct-gapo-v2-rouge2-beta10-gamma0.3-lr1.0e-6-scale-log,1,meta-llama/Meta-Llama-3-8B-Instruct 💬,xukp20/Llama-3-8B-Instruct-SPPO-Iter3_bt_8b-table,23.81,70.34,0.7,29.73,0.51,7.7,0.08,1.23,0.26,3.9,0.37,29.92,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,0,False,19a48ccf5ea463afbbbc61d650b8fb63ff2d94c7,True,True,2024-09-29,2024-09-28,True,False,xukp20/Llama-3-8B-Instruct-SPPO-Iter3_bt_8b-table,0,xukp20/Llama-3-8B-Instruct-SPPO-Iter3_bt_8b-table @@ -349,6 +352,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,Jimmy19991222/llama-3-8b-instruct-gapo-v2-bert-f1-beta10-gamma0.3-lr1.0e-6-1minus-rerun,23.74,67.17,0.67,27.76,0.49,3.63,0.04,5.93,0.29,8.71,0.4,29.26,0.36,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,0,True,00c02a823b4ff1a6cfcded6085ba9630df633998,True,True,2024-09-18,2024-09-17,True,False,Jimmy19991222/llama-3-8b-instruct-gapo-v2-bert-f1-beta10-gamma0.3-lr1.0e-6-1minus-rerun,1,meta-llama/Meta-Llama-3-8B-Instruct 🔶,migtissera/Tess-v2.5-Phi-3-medium-128k-14B,23.74,45.39,0.45,46.22,0.62,2.64,0.03,7.72,0.31,10.11,0.41,30.35,0.37,🔶 fine-tuned on domain-specific datasets,Phi3ForCausalLM,Original,bfloat16,True,mit,13,3,True,3a4dbce32e765f659d418c57f0040d290b8b480d,True,True,2024-08-30,2024-06-05,True,False,migtissera/Tess-v2.5-Phi-3-medium-128k-14B,1,microsoft/Phi-3-medium-128k-instruct 🔶,lightblue/suzume-llama-3-8B-multilingual,23.72,66.78,0.67,28.9,0.49,7.85,0.08,4.47,0.28,7.84,0.4,26.48,0.34,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,8,104,True,0cb15aa9ec685eef494f9a15f65aefcfe3c04c66,True,True,2024-07-30,2024-04-23,True,False,lightblue/suzume-llama-3-8B-multilingual,1,meta-llama/Meta-Llama-3-8B-Instruct +🔶,EpistemeAI/Fireball-Meta-Llama-3.1-8B-Instruct-Agent-0.003-128K-code,23.72,59.75,0.6,28.17,0.49,12.08,0.12,6.94,0.3,8.46,0.4,26.92,0.34,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,8,0,True,8e8f1569a8a01ed3d6588f2669c730d4993355b5,True,True,2024-10-05,2024-10-04,False,False,EpistemeAI/Fireball-Meta-Llama-3.1-8B-Instruct-Agent-0.003-128K-code,2,Removed 🔶,adamo1139/Yi-34B-200K-AEZAKMI-v2,23.69,45.55,0.46,35.28,0.54,4.83,0.05,10.96,0.33,6.48,0.39,39.03,0.45,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,34,12,True,189b42b0dae6352fbe7165255aae851961c8e678,True,True,2024-06-26,2023-12-13,True,False,adamo1139/Yi-34B-200K-AEZAKMI-v2,0,adamo1139/Yi-34B-200K-AEZAKMI-v2 🔶,Jimmy19991222/llama-3-8b-instruct-gapo-v2-bert_f1-beta10-gamma0.3-lr1.0e-6-scale-log,23.68,65.56,0.66,28.61,0.49,2.95,0.03,7.27,0.3,8.17,0.4,29.53,0.37,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,0,True,99d9e31df5b7e88b1da78b1bd335cac3215dfd6e,True,True,2024-09-22,2024-09-22,True,False,Jimmy19991222/llama-3-8b-instruct-gapo-v2-bert_f1-beta10-gamma0.3-lr1.0e-6-scale-log,1,meta-llama/Meta-Llama-3-8B-Instruct 🔶,Jimmy19991222/llama-3-8b-instruct-gapo-v2-rougeL-beta10-gamma0.3-lr1.0e-6-scale-log,23.68,64.92,0.65,28.56,0.5,4.15,0.04,6.94,0.3,7.38,0.4,30.12,0.37,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,0,True,ec67f95c4d1813a34bbde52d0ad14824fd7111a0,True,True,2024-09-22,2024-09-22,True,False,Jimmy19991222/llama-3-8b-instruct-gapo-v2-rougeL-beta10-gamma0.3-lr1.0e-6-scale-log,1,meta-llama/Meta-Llama-3-8B-Instruct @@ -386,7 +390,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,MaziyarPanahi/calme-2.1-qwen2-7b,23.2,38.16,0.38,31.01,0.5,21.07,0.21,5.26,0.29,13.8,0.44,29.92,0.37,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,5aac57e2290f7c49af88a9cb9883ce25b58882a1,True,True,2024-09-18,2024-06-27,True,False,MaziyarPanahi/calme-2.1-qwen2-7b,1,Qwen/Qwen2-7B 💬,xukp20/Llama-3-8B-Instruct-SPPO-score-Iter3_bt_8b-table-0.002,23.2,68.52,0.69,29.74,0.51,5.06,0.05,1.12,0.26,5.63,0.38,29.12,0.36,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,,8,0,False,8ef9ef7e2bf522e707a7b090af55f2ec1eafd4b9,True,True,2024-09-29,2024-09-28,True,False,xukp20/Llama-3-8B-Instruct-SPPO-score-Iter3_bt_8b-table-0.002,0,xukp20/Llama-3-8B-Instruct-SPPO-score-Iter3_bt_8b-table-0.002 🔶,CausalLM/34b-beta,23.18,30.43,0.3,36.68,0.56,4.15,0.04,12.86,0.35,6.92,0.37,48.06,0.53,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,gpl-3.0,34,61,True,0429951eb30ccdfff3515e711aaa7649a8a7364c,True,True,2024-06-26,2024-02-06,False,True,CausalLM/34b-beta,0,CausalLM/34b-beta -🤝,Luni/StarDust-12b-v1,23.17,54.59,0.55,34.45,0.54,5.97,0.06,3.47,0.28,13.76,0.43,26.8,0.34,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,11,True,91976b0c71dce1310f4a6139552e10a6149bdc31,True,True,2024-09-03,2024-08-29,True,False,Luni/StarDust-12b-v1,1,Luni/StarDust-12b-v1 (Merge) +🤝,Luni/StarDust-12b-v1,23.17,54.59,0.55,34.45,0.54,5.97,0.06,3.47,0.28,13.76,0.43,26.8,0.34,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,12,True,91976b0c71dce1310f4a6139552e10a6149bdc31,True,True,2024-09-03,2024-08-29,True,False,Luni/StarDust-12b-v1,1,Luni/StarDust-12b-v1 (Merge) 🤝,ClaudioItaly/Book-Gut12B,23.15,39.98,0.4,34.63,0.54,8.76,0.09,7.61,0.31,18.28,0.46,29.67,0.37,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,mit,12,1,True,ae54351faca8170c93bf1de3a51bf16650f5bcf5,True,True,2024-09-17,2024-09-12,False,False,ClaudioItaly/Book-Gut12B,1,ClaudioItaly/Book-Gut12B (Merge) 🔶,maywell/Qwen2-7B-Multilingual-RP,23.15,43.47,0.43,30.54,0.51,20.62,0.21,6.26,0.3,6.23,0.37,31.77,0.39,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,36,True,487e8f0498419e4d1188f661dbb63bd629be4638,True,True,2024-09-05,2024-06-24,True,False,maywell/Qwen2-7B-Multilingual-RP,0,maywell/Qwen2-7B-Multilingual-RP 🔶,mlabonne/Meta-Llama-3.1-8B-Instruct-abliterated,23.13,73.29,0.73,27.13,0.49,6.42,0.06,0.89,0.26,3.21,0.36,27.81,0.35,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,116,True,aef878bdf42c119d007322967006fcdef5ae6ee1,True,True,2024-09-10,2024-07-24,True,True,mlabonne/Meta-Llama-3.1-8B-Instruct-abliterated,2,meta-llama/Meta-Llama-3.1-8B @@ -396,7 +400,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 💬,UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter3,23.06,67.03,0.67,29.72,0.51,7.18,0.07,2.01,0.27,2.89,0.36,29.53,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,8,75,True,f73dafc2923acd56f115f21f76e9d14f8d19a63e,True,True,2024-06-28,2024-06-25,True,False,UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter3,0,UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter3 💬,yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table,23.05,67.85,0.68,27.47,0.49,8.84,0.09,1.23,0.26,2.75,0.36,30.2,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,0,False,0d9cb29aa87b0c17ed011ffbc83803f3f6dd18e7,True,True,2024-09-29,2024-09-29,True,False,yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table,0,yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table 💬,MaziyarPanahi/calme-2.3-phi3-4b,23.02,49.26,0.49,37.66,0.55,2.95,0.03,9.06,0.32,7.75,0.4,31.42,0.38,"💬 chat models (RLHF, DPO, IFT, ...)",Phi3ForCausalLM,Original,bfloat16,True,mit,3,9,True,e1f70c3724c728aadd1c7c1bb279487494f7059e,True,True,2024-06-26,2024-05-10,True,False,MaziyarPanahi/calme-2.3-phi3-4b,1,microsoft/Phi-3-mini-4k-instruct -🔶,Qwen/Qwen2.5-Coder-7B-Instruct,22.95,61.04,0.61,30.47,0.51,7.02,0.07,3.8,0.28,8.61,0.4,26.79,0.34,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,130,True,3030861ab8e72c6155e1821631bf977ef40d3e5b,True,True,2024-09-19,2024-09-17,True,True,Qwen/Qwen2.5-Coder-7B-Instruct,1,Qwen/Qwen2.5-Coder-7B-Instruct (Merge) +🔶,Qwen/Qwen2.5-Coder-7B-Instruct,22.95,61.04,0.61,30.47,0.51,7.02,0.07,3.8,0.28,8.61,0.4,26.79,0.34,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,131,True,3030861ab8e72c6155e1821631bf977ef40d3e5b,True,True,2024-09-19,2024-09-17,True,True,Qwen/Qwen2.5-Coder-7B-Instruct,1,Qwen/Qwen2.5-Coder-7B-Instruct (Merge) 🔶,grimjim/Llama-3.1-8B-Instruct-abliterated_via_adapter,22.95,48.7,0.49,29.42,0.51,12.39,0.12,8.5,0.31,9.26,0.4,29.46,0.37,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,False,llama3.1,8,26,True,b37ab2f859c96b125ff1c45c7ff0e267aa229156,True,True,2024-09-17,2024-07-25,False,False,grimjim/Llama-3.1-8B-Instruct-abliterated_via_adapter,1,grimjim/Llama-3.1-8B-Instruct-abliterated_via_adapter (Merge) 💬,xukp20/Llama-3-8B-Instruct-SPPO-Iter3_gp_8b-table,22.95,66.2,0.66,28.44,0.5,7.63,0.08,1.23,0.26,5.13,0.38,29.05,0.36,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,0,False,d1e19da1029f2d4d45de015754bc52dcb1ea5570,True,True,2024-09-29,2024-09-28,True,False,xukp20/Llama-3-8B-Instruct-SPPO-Iter3_gp_8b-table,0,xukp20/Llama-3-8B-Instruct-SPPO-Iter3_gp_8b-table 🔶,BAAI/Infinity-Instruct-7M-Gen-Llama3_1-8B,22.94,61.32,0.61,30.89,0.51,9.74,0.1,5.7,0.29,5.3,0.36,24.71,0.32,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,6,True,56f9c2845ae024eb8b1dd9ea0d8891cbaf33c596,True,True,2024-08-29,2024-08-02,True,False,BAAI/Infinity-Instruct-7M-Gen-Llama3_1-8B,0,BAAI/Infinity-Instruct-7M-Gen-Llama3_1-8B @@ -405,7 +409,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 💬,LLM360/K2-Chat,22.93,51.52,0.52,33.79,0.54,1.59,0.02,7.49,0.31,16.82,0.46,26.34,0.34,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,65,33,True,5454f2d28031c9127e4227c873ca2f154e02e4c7,True,True,2024-06-12,2024-05-22,True,True,LLM360/K2-Chat,0,LLM360/K2-Chat 💬,yfzp/Llama-3-8B-Instruct-SPPO-Iter1_bt_2b-table,22.9,67.09,0.67,28.17,0.5,6.87,0.07,1.23,0.26,3.85,0.37,30.18,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,0,False,97b2d0e790a6fcdf39c34a2043f0818368c7dcb3,True,True,2024-09-30,2024-09-29,True,False,yfzp/Llama-3-8B-Instruct-SPPO-Iter1_bt_2b-table,0,yfzp/Llama-3-8B-Instruct-SPPO-Iter1_bt_2b-table 💬,01-ai/Yi-1.5-9B-Chat-16K,22.9,42.14,0.42,31.5,0.52,12.61,0.13,7.83,0.31,10.04,0.41,33.26,0.4,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,31,True,2b397e5f0fab87984efa66856c5c4ed4bbe68b50,True,True,2024-06-12,2024-05-15,True,True,01-ai/Yi-1.5-9B-Chat-16K,0,01-ai/Yi-1.5-9B-Chat-16K -💬,HumanLLMs/Humanish-Mistral-Nemo-Instruct-2407,22.88,54.51,0.55,32.71,0.53,7.63,0.08,5.03,0.29,9.4,0.4,28.01,0.35,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,0,True,45b80bdce8d447ef494af06751904afcc607eb37,True,True,2024-10-06,2024-10-06,True,False,HumanLLMs/Humanish-Mistral-Nemo-Instruct-2407,2,mistralai/Mistral-Nemo-Base-2407 +💬,HumanLLMs/Humanish-Mistral-Nemo-Instruct-2407,22.88,54.51,0.55,32.71,0.53,7.63,0.08,5.03,0.29,9.4,0.4,28.01,0.35,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,1,True,45b80bdce8d447ef494af06751904afcc607eb37,True,True,2024-10-06,2024-10-06,True,False,HumanLLMs/Humanish-Mistral-Nemo-Instruct-2407,2,mistralai/Mistral-Nemo-Base-2407 💬,saltlux/luxia-21.4b-alignment-v1.0,22.86,36.93,0.37,48.02,0.64,6.19,0.06,6.82,0.3,12.51,0.43,26.7,0.34,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,21,32,True,87d5673e6d9f60462f195e9414a0bf6874c89ceb,True,True,2024-06-29,2024-03-12,True,False,saltlux/luxia-21.4b-alignment-v1.0,0,saltlux/luxia-21.4b-alignment-v1.0 💬,Jimmy19991222/llama-3-8b-instruct-gapo-v2-bleu-beta0.1-no-length-scale-gamma0.4,22.81,62.85,0.63,29.33,0.5,1.66,0.02,5.7,0.29,9.07,0.4,28.27,0.35,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,0,False,de8bb28ad7a9d1158f318a4461dc47ad03e6e560,True,True,2024-09-06,,True,False,Jimmy19991222/llama-3-8b-instruct-gapo-v2-bleu-beta0.1-no-length-scale-gamma0.4,0,Removed 💬,xkp24/Llama-3-8B-Instruct-SPPO-Iter2_gp_8b-table,22.8,66.21,0.66,28.51,0.5,6.95,0.07,1.23,0.26,5.0,0.38,28.88,0.36,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,0,False,9db00cbbba84453b18956fcc76f264f94a205955,True,True,2024-09-30,2024-09-30,True,False,xkp24/Llama-3-8B-Instruct-SPPO-Iter2_gp_8b-table,0,xkp24/Llama-3-8B-Instruct-SPPO-Iter2_gp_8b-table @@ -432,9 +436,9 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G ❓,Pretergeek/OpenChat-3.5-0106_8.11B_36Layers-Appended,22.57,59.76,0.6,24.06,0.46,6.8,0.07,7.61,0.31,11.78,0.43,25.44,0.33,❓ other,Unknown,Original,bfloat16,False,apache-2.0,0,2,True,e957847e013bdd2f6e852b8a1c369ddce92fca78,True,True,,2024-07-26,False,False,Pretergeek/OpenChat-3.5-0106_8.11B_36Layers-Appended,1,Pretergeek/OpenChat-3.5-0106_8.11B_36Layers-Appended (Merge) 🔶,nbeerbower/Lyra-Gutenberg-mistral-nemo-12B,22.57,34.95,0.35,36.99,0.56,8.31,0.08,11.19,0.33,14.76,0.44,29.2,0.36,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,12,15,True,5c506391eb02075e02f4cf5953b443505d646bce,True,True,2024-09-03,2024-08-23,True,False,nbeerbower/Lyra-Gutenberg-mistral-nemo-12B,1,nbeerbower/Lyra-Gutenberg-mistral-nemo-12B (Merge) 💬,openchat/openchat-3.5-1210,22.56,60.37,0.6,23.24,0.45,6.87,0.07,6.82,0.3,14.28,0.44,23.81,0.31,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,276,True,801f5459b7577241500785f11c2b026912badd6e,True,True,2024-06-12,2023-12-12,True,True,openchat/openchat-3.5-1210,1,mistralai/Mistral-7B-v0.1 +🤝,Pretergeek/OpenChat-3.5-0106_10.7B_48Layers-Appended,22.55,59.61,0.6,24.06,0.46,6.8,0.07,7.61,0.31,11.78,0.43,25.44,0.33,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,apache-2.0,10,2,True,1091b30480f4cc91f26cb1bd7579e527f490f8d2,True,True,2024-07-31,2024-07-27,True,False,Pretergeek/OpenChat-3.5-0106_10.7B_48Layers-Appended,1,Pretergeek/OpenChat-3.5-0106_10.7B_48Layers-Appended (Merge) 🤝,Pretergeek/OpenChat-3.5-0106_8.99B_40Layers-Appended,22.55,59.61,0.6,24.06,0.46,6.8,0.07,7.61,0.31,11.78,0.43,25.44,0.33,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,apache-2.0,8,2,True,2120720b7fb2ecc27b9c03cc876316fd25b26e40,True,True,2024-07-27,2024-07-26,True,False,Pretergeek/OpenChat-3.5-0106_8.99B_40Layers-Appended,1,Pretergeek/OpenChat-3.5-0106_8.99B_40Layers-Appended (Merge) 🤝,Pretergeek/OpenChat-3.5-0106_9.86B_44Layers-Appended,22.55,59.61,0.6,24.06,0.46,6.8,0.07,7.61,0.31,11.78,0.43,25.44,0.33,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,apache-2.0,9,2,True,8a7ef4a2c4faf8760650e26e44509920bace633a,True,True,2024-07-27,2024-07-27,True,False,Pretergeek/OpenChat-3.5-0106_9.86B_44Layers-Appended,1,Pretergeek/OpenChat-3.5-0106_9.86B_44Layers-Appended (Merge) -🤝,Pretergeek/OpenChat-3.5-0106_10.7B_48Layers-Appended,22.55,59.61,0.6,24.06,0.46,6.8,0.07,7.61,0.31,11.78,0.43,25.44,0.33,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,apache-2.0,10,2,True,1091b30480f4cc91f26cb1bd7579e527f490f8d2,True,True,2024-07-31,2024-07-27,True,False,Pretergeek/OpenChat-3.5-0106_10.7B_48Layers-Appended,1,Pretergeek/OpenChat-3.5-0106_10.7B_48Layers-Appended (Merge) 💬,princeton-nlp/Llama-3-Instruct-8B-RDPO,22.55,66.6,0.67,29.03,0.5,2.11,0.02,4.36,0.28,4.2,0.38,28.97,0.36,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,0,True,9497ca226a68981f42df2e5b3a4a1a2ea702a942,True,True,2024-09-28,2024-05-17,True,False,princeton-nlp/Llama-3-Instruct-8B-RDPO,0,princeton-nlp/Llama-3-Instruct-8B-RDPO 🔶,MaziyarPanahi/calme-2.4-qwen2-7b,22.52,33.0,0.33,31.82,0.51,18.35,0.18,4.47,0.28,14.43,0.45,33.08,0.4,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,d683c3ef1feb13e92227f5fd92fe5bc4b55ea4a2,True,True,2024-09-18,2024-06-27,True,False,MaziyarPanahi/calme-2.4-qwen2-7b,1,Qwen/Qwen2-7B 💬,vicgalle/ConfigurableBeagle-11B,22.52,58.34,0.58,32.39,0.53,3.7,0.04,6.94,0.3,7.38,0.4,26.38,0.34,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,float16,True,apache-2.0,10,2,True,bbc16dbf94b8e8a99bb3e2ada6755faf9c2990dd,True,True,2024-06-26,2024-02-17,True,False,vicgalle/ConfigurableBeagle-11B,0,vicgalle/ConfigurableBeagle-11B @@ -448,10 +452,10 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,Replete-AI/Llama3-8B-Instruct-Replete-Adapted,22.4,69.15,0.69,26.89,0.49,4.83,0.05,4.14,0.28,2.82,0.36,26.57,0.34,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,,8,0,False,d930f2111913da6fb7693187e1cdc817191c8e5e,True,True,2024-07-09,,True,False,Replete-AI/Llama3-8B-Instruct-Replete-Adapted,0,Removed 🔶,Locutusque/Hercules-6.1-Llama-3.1-8B,22.4,60.07,0.6,24.15,0.47,15.63,0.16,1.45,0.26,3.42,0.36,29.65,0.37,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,5,True,f4abf4385111b4acbea8bee2c6636ef84b2dac43,True,True,2024-10-01,2024-09-30,True,False,Locutusque/Hercules-6.1-Llama-3.1-8B,0,Locutusque/Hercules-6.1-Llama-3.1-8B 🔶,EpistemeAI2/Fireball-Alpaca-Llama3.1-8B-Philos,22.39,49.86,0.5,29.26,0.5,10.88,0.11,5.7,0.29,11.89,0.43,26.73,0.34,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,1,True,3dcca4cf9bdd9003c8dc91f5c78cefef1d4ae0d7,True,True,2024-08-29,2024-08-29,False,False,EpistemeAI2/Fireball-Alpaca-Llama3.1-8B-Philos,2,unsloth/Meta-Llama-3.1-8B -💬,HumanLLMs/Humanish-LLama3.1-8B-Instruct,22.38,64.98,0.65,28.01,0.5,8.46,0.08,0.78,0.26,2.0,0.36,30.02,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,0,True,42f73ada2b7fb16f18a75404d72b7911bf1e65ce,True,True,2024-10-05,2024-10-04,True,False,HumanLLMs/Humanish-LLama3.1-8B-Instruct,1,meta-llama/Meta-Llama-3-8B-Instruct +💬,HumanLLMs/Humanish-LLama3.1-8B-Instruct,22.38,64.98,0.65,28.01,0.5,8.46,0.08,0.78,0.26,2.0,0.36,30.02,0.37,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,1,True,42f73ada2b7fb16f18a75404d72b7911bf1e65ce,True,True,2024-10-05,2024-10-04,True,False,HumanLLMs/Humanish-LLama3.1-8B-Instruct,1,meta-llama/Meta-Llama-3-8B-Instruct 💬,vicgalle/CarbonBeagle-11B,22.36,54.15,0.54,33.06,0.53,5.51,0.06,6.94,0.3,9.19,0.4,25.29,0.33,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,float16,False,apache-2.0,10,9,True,3fe9bf5327606d013b182fed17a472f5f043759b,True,True,2024-06-26,2024-01-21,True,False,vicgalle/CarbonBeagle-11B,1,vicgalle/CarbonBeagle-11B (Merge) 🔶,MaziyarPanahi/calme-2.5-qwen2-7b,22.34,31.45,0.31,28.28,0.49,20.69,0.21,8.05,0.31,15.79,0.46,29.8,0.37,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,20fb1afc22c0722cb2c57185fff59befeba0fbec,True,True,2024-09-29,2024-06-27,True,False,MaziyarPanahi/calme-2.5-qwen2-7b,1,Qwen/Qwen2-7B -🔶,nbeerbower/Flammades-Mistral-Nemo-12B,22.34,38.42,0.38,32.39,0.53,6.19,0.06,7.16,0.3,20.31,0.48,29.57,0.37,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,0,True,ddc76d1976af06aedc7f06bbffcaa34166c1cbdd,True,True,2024-10-06,2024-10-05,False,False,nbeerbower/Flammades-Mistral-Nemo-12B,1,nbeerbower/Flammades-Mistral-Nemo-12B (Merge) +🔶,nbeerbower/Flammades-Mistral-Nemo-12B,22.34,38.42,0.38,32.39,0.53,6.19,0.06,7.16,0.3,20.31,0.48,29.57,0.37,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,1,True,ddc76d1976af06aedc7f06bbffcaa34166c1cbdd,True,True,2024-10-06,2024-10-05,False,False,nbeerbower/Flammades-Mistral-Nemo-12B,1,nbeerbower/Flammades-Mistral-Nemo-12B (Merge) 🔶,EpistemeAI/Fireball-Alpaca-Llama3.1.08-8B-Philos-C-R2,22.32,46.73,0.47,28.25,0.49,11.03,0.11,4.81,0.29,17.0,0.46,26.13,0.34,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,8,0,True,b19336101aa5f4807d1574f4c11eebc1c1a1c34e,True,True,2024-09-14,2024-09-14,False,False,EpistemeAI/Fireball-Alpaca-Llama3.1.08-8B-Philos-C-R2,2,unsloth/Meta-Llama-3.1-8B 🔶,WizardLMTeam/WizardLM-70B-V1.0,22.32,49.51,0.5,37.54,0.56,3.47,0.03,2.13,0.27,14.09,0.44,27.18,0.34,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama2,70,234,True,54aaecaff7d0790eb9f0ecea1cc267a94cc66949,True,True,2024-06-12,2023-08-09,False,True,WizardLMTeam/WizardLM-70B-V1.0,0,WizardLMTeam/WizardLM-70B-V1.0 🔶,LimYeri/CodeMind-Llama3-8B-unsloth_v2-merged,22.32,69.46,0.69,26.66,0.49,5.74,0.06,2.01,0.27,2.22,0.33,27.84,0.35,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,0,True,d4ec745f8279e3ac6d41709153c21cc077e66385,True,True,2024-08-28,2024-06-04,True,False,LimYeri/CodeMind-Llama3-8B-unsloth_v2-merged,1,unsloth/llama-3-8b-Instruct-bnb-4bit @@ -473,7 +477,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,mlabonne/Daredevil-8B,22.13,45.48,0.45,31.63,0.52,8.99,0.09,7.72,0.31,7.53,0.39,31.45,0.38,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,False,other,8,32,True,717953c83631cc9adf2dddccfff06739308f10f7,True,True,2024-07-02,2024-05-25,True,True,mlabonne/Daredevil-8B,1,mlabonne/Daredevil-8B (Merge) 💬,OpenBuddy/openbuddy-mixtral-7bx8-v18.1-32k,22.12,54.93,0.55,24.54,0.47,9.52,0.1,7.27,0.3,5.28,0.38,31.16,0.38,"💬 chat models (RLHF, DPO, IFT, ...)",MixtralForCausalLM,Original,bfloat16,True,apache-2.0,46,14,True,98596b6731058cc9cca85f3b8ac9077342cb60ae,True,False,2024-06-26,2024-02-12,True,False,OpenBuddy/openbuddy-mixtral-7bx8-v18.1-32k,0,OpenBuddy/openbuddy-mixtral-7bx8-v18.1-32k 🔶,migtissera/Trinity-2-Codestral-22B-v0.2,22.11,44.3,0.44,37.79,0.57,7.85,0.08,7.72,0.31,8.86,0.4,26.15,0.34,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,other,22,4,True,9452a82ac7bfa9092a061ec913e9078ef3525a03,True,True,2024-09-16,2024-08-13,True,False,migtissera/Trinity-2-Codestral-22B-v0.2,1,mistralai/Codestral-22B-v0.1 -🔶,Replete-AI/Replete-LLM-V2.5-Qwen-3b,22.11,53.42,0.53,27.21,0.48,5.06,0.05,7.72,0.31,8.55,0.4,30.68,0.38,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,,3,0,False,26601a8da5afce3b5959d91bdd0faaab6df8bf95,True,True,2024-09-29,,False,False,Replete-AI/Replete-LLM-V2.5-Qwen-3b,0,Removed +🔶,rombodawg/Rombos-LLM-V2.5-Qwen-3b,22.11,53.42,0.53,27.21,0.48,5.06,0.05,7.72,0.31,8.55,0.4,30.68,0.38,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,other,3,2,True,26601a8da5afce3b5959d91bdd0faaab6df8bf95,True,True,2024-09-29,2024-10-06,False,False,rombodawg/Rombos-LLM-V2.5-Qwen-3b,1,rombodawg/Rombos-LLM-V2.5-Qwen-3b (Merge) 🤝,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.1-gamma-0.01,22.09,43.59,0.44,29.53,0.5,4.31,0.04,8.05,0.31,16.34,0.45,30.69,0.38,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,861347cd643d396877d8e560367cf0717c671228,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.1-gamma-0.01,1,johnsutor/Llama-3-8B-Instruct_breadcrumbs_ties-density-0.1-gamma-0.01 (Merge) 🔶,MaziyarPanahi/calme-2.7-qwen2-7b,22.07,35.92,0.36,28.91,0.49,12.08,0.12,5.48,0.29,19.94,0.48,30.06,0.37,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,edc11a1baccedc04a5a4576ee4910fd8922ad47f,True,True,2024-09-18,2024-06-27,True,False,MaziyarPanahi/calme-2.7-qwen2-7b,1,Qwen/Qwen2-7B 💬,01-ai/Yi-1.5-6B-Chat,22.05,48.02,0.48,23.55,0.46,12.54,0.13,9.06,0.32,14.7,0.44,24.41,0.32,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,6,41,True,3f64d3f159c6ad8494227bb77e2a7baef8cd808b,True,True,2024-06-12,2024-05-11,True,True,01-ai/Yi-1.5-6B-Chat,0,01-ai/Yi-1.5-6B-Chat @@ -625,8 +629,9 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🤝,Darkknight535/OpenCrystal-12B-L3,20.51,40.71,0.41,31.84,0.52,7.93,0.08,7.49,0.31,5.74,0.37,29.34,0.36,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,True,,11,10,False,974d2d453afdde40f6a993601bbbbf9d97b43606,True,True,2024-08-26,2024-08-25,False,False,Darkknight535/OpenCrystal-12B-L3,0,Darkknight535/OpenCrystal-12B-L3 🤝,shadowml/BeagSake-7B,20.5,40.19,0.4,32.53,0.52,6.27,0.06,4.03,0.28,16.38,0.46,23.61,0.31,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,cc-by-nc-4.0,7,1,True,b7a3b25a188a4608fd05fc4247ddd504c1f529d1,True,True,2024-06-29,2024-01-31,False,False,shadowml/BeagSake-7B,1,shadowml/BeagSake-7B (Merge) 🔶,sequelbox/Llama3.1-8B-MOTH,20.5,52.45,0.52,27.92,0.49,10.12,0.1,2.46,0.27,4.05,0.37,25.98,0.33,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama3.1,8,1,True,8db363e36b1efc9015ab14648e68bcfba9e8d8a0,True,True,2024-09-19,2024-09-01,True,False,sequelbox/Llama3.1-8B-MOTH,2,meta-llama/Meta-Llama-3.1-8B -💬,meta-llama/Meta-Llama-3-8B-Instruct,20.48,47.82,0.48,26.8,0.49,8.38,0.08,5.7,0.29,5.4,0.38,28.79,0.36,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,llama3,8,3478,True,e1945c40cd546c78e41f1151f4db032b271faeaa,True,True,2024-07-08,2024-04-17,False,True,meta-llama/Meta-Llama-3-8B-Instruct,0,meta-llama/Meta-Llama-3-8B-Instruct +💬,meta-llama/Meta-Llama-3-8B-Instruct,20.48,47.82,0.48,26.8,0.49,8.38,0.08,5.7,0.29,5.4,0.38,28.79,0.36,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,llama3,8,3480,True,e1945c40cd546c78e41f1151f4db032b271faeaa,True,True,2024-07-08,2024-04-17,False,True,meta-llama/Meta-Llama-3-8B-Instruct,0,meta-llama/Meta-Llama-3-8B-Instruct 🔶,Xclbr7/Arcanum-12b,20.48,29.07,0.29,31.88,0.53,10.27,0.1,9.4,0.32,13.53,0.42,28.74,0.36,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,mit,12,0,True,845ac67d2b527296ae8c06da4453bf8a60f2e59b,True,True,2024-09-17,2024-09-17,False,False,Xclbr7/Arcanum-12b,0,Xclbr7/Arcanum-12b +🔶,EpistemeAI/Fireball-Meta-Llama-3.1-8B-Instruct-Agent-0.003-128K,20.48,44.57,0.45,28.03,0.49,11.18,0.11,5.93,0.29,4.9,0.38,28.26,0.35,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,8,2,True,b4a88fb5fb27fc5d8a503303cdb7aaeff373fd92,True,True,2024-10-05,2024-09-26,False,False,EpistemeAI/Fireball-Meta-Llama-3.1-8B-Instruct-Agent-0.003-128K,1,Removed 🤝,grimjim/llama-3-Nephilim-v2-8B,20.41,39.22,0.39,29.9,0.5,9.52,0.1,6.6,0.3,7.89,0.39,29.35,0.36,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,cc-by-nc-4.0,8,1,True,924f56cdefbfaf38deb6aee3ad301ced027e142d,True,True,2024-09-18,2024-06-26,False,False,grimjim/llama-3-Nephilim-v2-8B,1,grimjim/llama-3-Nephilim-v2-8B (Merge) 🔶,SanjiWatsuki/Kunoichi-DPO-v2-7B,20.41,54.31,0.54,20.9,0.44,6.57,0.07,6.15,0.3,11.09,0.42,23.41,0.31,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,cc-by-nc-4.0,7,80,True,5278247beb482c4fceff2294570236d68b74d132,True,True,2024-06-28,2024-01-13,True,False,SanjiWatsuki/Kunoichi-DPO-v2-7B,0,SanjiWatsuki/Kunoichi-DPO-v2-7B 🔶,EpistemeAI/Fireball-Meta-Llama-3.1-8B-Instruct-Math,20.39,46.23,0.46,28.96,0.5,9.82,0.1,5.48,0.29,5.98,0.36,25.9,0.33,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,0,True,677c97b4f92bfc330d4fae628e9a1df1ef606dcc,True,True,2024-09-23,2024-09-23,False,False,EpistemeAI/Fireball-Meta-Llama-3.1-8B-Instruct-Math,2,unsloth/Meta-Llama-3.1-8B @@ -656,7 +661,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🟢,Qwen/Qwen1.5-14B,20.22,29.05,0.29,30.06,0.51,16.47,0.16,5.93,0.29,10.46,0.42,29.37,0.36,🟢 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,other,14,36,True,dce4b190d34470818e5bec2a92cb8233aaa02ca2,True,True,2024-06-13,2024-01-22,False,True,Qwen/Qwen1.5-14B,0,Qwen/Qwen1.5-14B 🤝,icefog72/IceDrunkCherryRP-7b,20.22,48.98,0.49,28.24,0.48,5.82,0.06,3.58,0.28,12.38,0.43,22.33,0.3,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,cc-by-nc-4.0,7,1,True,160b01e50d9c9441886f6cf987a3495bd8fa1c49,True,True,2024-09-24,2024-09-24,False,False,icefog72/IceDrunkCherryRP-7b,0,icefog72/IceDrunkCherryRP-7b 🤝,Locutusque/Llama-3-Yggdrasil-2.0-8B,20.22,53.71,0.54,26.92,0.48,6.87,0.07,1.68,0.26,8.07,0.4,24.07,0.32,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,True,,8,1,False,ec2329946ccc81a7c1ae36210728f717bc4f01d8,True,True,2024-06-26,2024-06-05,True,False,Locutusque/Llama-3-Yggdrasil-2.0-8B,1,Locutusque/Llama-3-Yggdrasil-2.0-8B (Merge) -💬,HumanLLMs/Humanish-LLama3-8B-Instruct,20.21,56.73,0.57,25.41,0.48,6.42,0.06,0.56,0.25,3.7,0.37,28.44,0.36,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,0,True,f87258996499031c7be73e5d47d45e8497bef12c,True,True,2024-09-30,2024-10-04,True,False,HumanLLMs/Humanish-LLama3-8B-Instruct,1,meta-llama/Meta-Llama-3-8B-Instruct +💬,HumanLLMs/Humanish-LLama3-8B-Instruct,20.21,56.73,0.57,25.41,0.48,6.42,0.06,0.56,0.25,3.7,0.37,28.44,0.36,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,1,True,f87258996499031c7be73e5d47d45e8497bef12c,True,True,2024-09-30,2024-10-04,True,False,HumanLLMs/Humanish-LLama3-8B-Instruct,1,meta-llama/Meta-Llama-3-8B-Instruct 🔶,beowolx/CodeNinja-1.0-OpenChat-7B,20.21,54.47,0.54,21.71,0.44,5.21,0.05,5.93,0.29,11.54,0.42,22.39,0.3,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,mit,7,104,True,9934c04c767e6ae0f792712a060f02915391d4ec,True,True,2024-07-30,2023-12-20,True,False,beowolx/CodeNinja-1.0-OpenChat-7B,0,beowolx/CodeNinja-1.0-OpenChat-7B 🔶,VIRNECT/llama-3-Korean-8B,20.18,50.21,0.5,27.56,0.49,9.29,0.09,2.8,0.27,3.03,0.36,28.18,0.35,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,0,True,c658409e094ff04eeb6ab6cee2d4bc56716e45f1,True,True,2024-07-17,2024-07-17,True,False,VIRNECT/llama-3-Korean-8B,0,VIRNECT/llama-3-Korean-8B 🔶,VAGOsolutions/SauerkrautLM-SOLAR-Instruct,20.16,49.17,0.49,31.84,0.52,0.0,0.0,7.38,0.31,8.33,0.4,24.26,0.32,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,10,47,True,2665d7600ccd253728453433d2434844e6f702bd,True,True,2024-06-26,2023-12-20,True,False,VAGOsolutions/SauerkrautLM-SOLAR-Instruct,0,VAGOsolutions/SauerkrautLM-SOLAR-Instruct @@ -799,7 +804,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,uukuguy/speechless-llama2-hermes-orca-platypus-wizardlm-13b,18.6,45.62,0.46,26.79,0.48,1.44,0.01,2.68,0.27,17.75,0.47,17.32,0.26,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,,13,32,False,954cc87b0ed5fa280126de546daf648861031512,True,True,2024-06-26,2023-09-01,False,False,uukuguy/speechless-llama2-hermes-orca-platypus-wizardlm-13b,0,uukuguy/speechless-llama2-hermes-orca-platypus-wizardlm-13b 🤝,icefog72/IceDrinkNameGoesHereRP-7b-Model_Stock,18.57,49.68,0.5,26.22,0.47,3.55,0.04,2.46,0.27,9.31,0.41,20.19,0.28,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,cc-by-nc-4.0,7,2,True,78f7625f85c3cb150565ebb68c3f8d47d48325c8,True,True,2024-09-24,2024-09-14,False,False,icefog72/IceDrinkNameGoesHereRP-7b-Model_Stock,0,icefog72/IceDrinkNameGoesHereRP-7b-Model_Stock 🔶,Dans-DiscountModels/Dans-Instruct-Mix-8b-ChatML-V0.2.0,18.54,50.64,0.51,24.73,0.46,4.08,0.04,5.82,0.29,3.76,0.36,22.22,0.3,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,,8,1,False,15a9988381fdba15281f1bd6b04c34f3f96120cc,True,True,2024-09-30,2024-09-30,True,False,Dans-DiscountModels/Dans-Instruct-Mix-8b-ChatML-V0.2.0,1,Dans-DiscountModels/Meta-Llama-3.1-8B-ChatML -💬,HuggingFaceH4/zephyr-7b-alpha,18.53,51.91,0.52,23.96,0.46,1.51,0.02,6.38,0.3,7.5,0.39,19.94,0.28,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,mit,7,1094,True,2ce2d025864af849b3e5029e2ec9d568eeda892d,True,True,2024-06-12,2023-10-09,True,True,HuggingFaceH4/zephyr-7b-alpha,1,mistralai/Mistral-7B-v0.1 +💬,HuggingFaceH4/zephyr-7b-alpha,18.53,51.91,0.52,23.96,0.46,1.51,0.02,6.38,0.3,7.5,0.39,19.94,0.28,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,mit,7,1095,True,2ce2d025864af849b3e5029e2ec9d568eeda892d,True,True,2024-06-12,2023-10-09,True,True,HuggingFaceH4/zephyr-7b-alpha,1,mistralai/Mistral-7B-v0.1 🤝,Epiculous/Violet_Twilight-v0.2,18.53,45.32,0.45,23.94,0.46,2.72,0.03,2.13,0.27,13.61,0.43,23.45,0.31,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,apache-2.0,12,8,True,30c8bad3c1f565150afbf2fc90cacf4f45d096f6,True,True,2024-09-16,2024-09-12,True,False,Epiculous/Violet_Twilight-v0.2,0,Epiculous/Violet_Twilight-v0.2 🤝,allknowingroger/LimyQstar-7B-slerp,18.52,34.91,0.35,30.19,0.5,5.97,0.06,6.49,0.3,10.2,0.41,23.37,0.31,🤝 base merges and moerges,MistralForCausalLM,Original,bfloat16,False,apache-2.0,7,0,True,6dc557c7bfd6a6f9bc8190bc8a31c3b732deca40,True,True,2024-06-26,2024-03-23,False,False,allknowingroger/LimyQstar-7B-slerp,1,allknowingroger/LimyQstar-7B-slerp (Merge) 🔶,VIRNECT/llama-3-Korean-8B-r-v-0.1,18.51,49.16,0.49,25.88,0.48,7.18,0.07,0.0,0.24,3.74,0.37,25.11,0.33,🔶 fine-tuned on domain-specific datasets,?,Adapter,float16,True,llama3,16,0,True,10acb1aa4f341f2d3c899d78c520b0822a909b95,True,True,2024-07-18,2024-07-18,True,False,VIRNECT/llama-3-Korean-8B-r-v-0.1,2,MLP-KTLim/llama-3-Korean-Bllossom-8B (Merge) @@ -857,6 +862,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 💬,mlabonne/AlphaMonarch-7B,17.59,49.39,0.49,23.95,0.46,3.85,0.04,2.68,0.27,9.32,0.41,16.36,0.25,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,float16,False,cc-by-nc-4.0,7,148,True,3de065d84411d74e5b3590f67f52b0b71faf6161,True,True,2024-06-12,2024-02-14,True,True,mlabonne/AlphaMonarch-7B,1,mlabonne/AlphaMonarch-7B (Merge) 🟢,01-ai/Yi-9B-200K,17.59,23.27,0.23,26.49,0.48,5.82,0.06,8.72,0.32,12.11,0.43,29.13,0.36,���� pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,75,True,8c93accd5589dbb74ee938e103613508c4a9b88d,True,True,2024-06-12,2024-03-15,False,True,01-ai/Yi-9B-200K,0,01-ai/Yi-9B-200K 🔶,theprint/CleverBoi-Nemo-12B-v2,17.57,20.46,0.2,31.65,0.52,8.61,0.09,8.5,0.31,11.43,0.42,24.76,0.32,🔶 fine-tuned on domain-specific datasets,?,Adapter,bfloat16,True,apache-2.0,13,2,True,cd1f9ee1c484f857bb0e5ae6aac37dc434911f10,True,True,2024-09-24,2024-09-16,False,False,theprint/CleverBoi-Nemo-12B-v2,1,unsloth/Mistral-Nemo-Instruct-2407-bnb-4bit +🔶,aws-prototyping/MegaBeam-Mistral-7B-512k,17.54,59.73,0.6,12.36,0.37,2.64,0.03,4.36,0.28,8.52,0.4,17.65,0.26,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,41,True,3e3b8c4b933650eed81ede7c4395df943d2a0796,True,True,2024-10-07,2024-07-30,True,False,aws-prototyping/MegaBeam-Mistral-7B-512k,0,aws-prototyping/MegaBeam-Mistral-7B-512k 💬,princeton-nlp/Mistral-7B-Instruct-SimPO,17.54,46.87,0.47,22.38,0.45,2.49,0.02,3.8,0.28,9.76,0.41,19.96,0.28,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,,7,1,False,03191ee1e60d21a698d11a515703a037073724f8,True,True,2024-09-21,2024-05-24,True,False,princeton-nlp/Mistral-7B-Instruct-SimPO,0,princeton-nlp/Mistral-7B-Instruct-SimPO 🔶,LimYeri/CodeMind-Llama3-8B-unsloth_v4-one-merged,17.54,32.11,0.32,24.57,0.47,5.06,0.05,7.94,0.31,9.4,0.41,26.14,0.34,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,0,True,9c8939ccdc10beee56462eadbc16e28359a6d4c4,True,True,2024-08-28,2024-06-06,False,False,LimYeri/CodeMind-Llama3-8B-unsloth_v4-one-merged,1,unsloth/llama-3-8b-Instruct-bnb-4bit 💬,Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.3,17.52,52.76,0.53,22.39,0.46,3.47,0.03,0.0,0.24,3.65,0.37,22.85,0.31,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,8,0,True,aa176c0db7791a1c09039135791145b0704a5f46,True,True,2024-07-28,2024-05-14,True,False,Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.3,1,Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.3 (Merge) @@ -866,7 +872,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 💬,google/gemma-1.1-7b-it,17.4,50.39,0.5,15.93,0.39,3.17,0.03,5.82,0.29,11.51,0.42,17.6,0.26,"💬 chat models (RLHF, DPO, IFT, ...)",GemmaForCausalLM,Original,bfloat16,True,gemma,8,262,True,16128b0aeb50762ea96430c0c06a37941bf9f274,True,True,2024-06-12,2024-03-26,True,True,google/gemma-1.1-7b-it,0,google/gemma-1.1-7b-it 🔶,theprint/phi-3-mini-4k-python,17.39,24.09,0.24,28.45,0.49,8.46,0.08,5.48,0.29,9.22,0.39,28.63,0.36,🔶 fine-tuned on domain-specific datasets,?,Adapter,bfloat16,True,apache-2.0,4,0,True,81453e5718775630581ab9950e6c0ccf0d7a4177,True,True,2024-09-13,2024-06-03,False,False,theprint/phi-3-mini-4k-python,1,unsloth/Phi-3-mini-4k-instruct-bnb-4bit 🔶,Magpie-Align/Llama-3-8B-Magpie-Align-SFT-v0.3,17.38,50.64,0.51,23.7,0.46,6.27,0.06,2.13,0.27,0.4,0.34,21.14,0.29,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,2,True,d2578eb754d1c20efe604749296580f680950917,True,True,2024-08-06,2024-07-13,True,False,Magpie-Align/Llama-3-8B-Magpie-Align-SFT-v0.3,1,meta-llama/Meta-Llama-3-8B -🔶,theprint/Boptruth-Agatha-7B,17.36,31.24,0.31,29.29,0.5,4.61,0.05,6.6,0.3,11.76,0.43,20.67,0.29,🔶 fine-tuned on domain-specific datasets,?,Adapter,float16,True,,7,0,False,ef7c7570be29a58f4a8358a6d4c75f59a5282191,True,True,2024-09-30,2024-09-11,False,False,theprint/Boptruth-Agatha-7B,0,theprint/Boptruth-Agatha-7B +🔶,theprint/Boptruth-Agatha-7B,17.36,31.24,0.31,29.29,0.5,4.61,0.05,6.6,0.3,11.76,0.43,20.67,0.29,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,,7,0,False,ef7c7570be29a58f4a8358a6d4c75f59a5282191,True,True,2024-09-30,2024-09-11,False,False,theprint/Boptruth-Agatha-7B,0,theprint/Boptruth-Agatha-7B 💬,princeton-nlp/Mistral-7B-Base-SFT-CPO,17.34,46.55,0.47,21.86,0.44,2.42,0.02,5.59,0.29,9.25,0.41,18.35,0.27,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,,7,0,True,7f67394668b94a9ddfb64daff8976b48b135d96c,True,True,2024-10-07,2024-07-06,True,False,princeton-nlp/Mistral-7B-Base-SFT-CPO,0,princeton-nlp/Mistral-7B-Base-SFT-CPO 🤝,johnsutor/Llama-3-8B-Instruct_dare_ties-density-0.9,17.28,21.61,0.22,24.69,0.47,0.0,0.0,7.72,0.31,25.88,0.52,23.81,0.31,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_dare_ties-density-0.9,1,johnsutor/Llama-3-8B-Instruct_dare_ties-density-0.9 (Merge) 🔶,win10/llama3-13.45b-Instruct,17.26,41.44,0.41,26.68,0.49,1.96,0.02,1.12,0.26,6.33,0.38,26.06,0.33,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,False,llama3,13,1,True,94cc0f415e355c6d3d47168a6ff5239ca586904a,True,True,2024-06-26,2024-06-09,True,False,win10/llama3-13.45b-Instruct,1,win10/llama3-13.45b-Instruct (Merge) @@ -883,12 +889,14 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,fblgit/una-cybertron-7b-v2-bf16,17.09,47.37,0.47,14.97,0.4,3.32,0.03,6.38,0.3,14.48,0.45,16.03,0.24,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,116,True,7ab101a153740aec39e95ec02831c56f4eab7910,True,True,2024-06-30,2023-12-02,True,False,fblgit/una-cybertron-7b-v2-bf16,0,fblgit/una-cybertron-7b-v2-bf16 💬,shivam9980/mistral-7b-news-cnn-merged,17.08,46.34,0.46,11.15,0.36,1.21,0.01,7.83,0.31,15.67,0.45,20.31,0.28,"💬 chat models (RLHF, DPO, IFT, ...)",?,Adapter,float16,True,apache-2.0,7,0,True,a0d7029cb00c122843aef3d7ad61d514de334ea3,True,True,2024-09-12,2024-03-18,True,False,shivam9980/mistral-7b-news-cnn-merged,1,unsloth/mistral-7b-instruct-v0.2-bnb-4bit 🔶,nbeerbower/mistral-nemo-cc-12B,17.08,14.35,0.14,34.45,0.54,1.81,0.02,8.72,0.32,14.26,0.44,28.87,0.36,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,1,True,fc32293e0b022d6daef9bfdb0c54d57a5226bf9a,True,True,2024-09-14,2024-08-18,False,False,nbeerbower/mistral-nemo-cc-12B,1,nbeerbower/mistral-nemo-cc-12B (Merge) +🔶,SaisExperiments/Gemma-2-2B-Opus-Instruct,17.06,47.5,0.47,19.53,0.43,3.93,0.04,4.47,0.28,8.58,0.41,18.34,0.27,🔶 fine-tuned on domain-specific datasets,Gemma2ForCausalLM,Original,bfloat16,True,gemma,2,0,True,7caa9e833d3f5713cf1b8ebd8beeb6ef02da99ea,True,True,2024-10-07,2024-09-03,False,False,SaisExperiments/Gemma-2-2B-Opus-Instruct,2,google/gemma-2-2b 🔶,zhengr/MixTAO-7Bx2-MoE-v8.1,17.06,41.88,0.42,19.18,0.42,5.97,0.06,6.49,0.3,8.3,0.4,20.52,0.28,🔶 fine-tuned on domain-specific datasets,MixtralForCausalLM,Original,bfloat16,True,apache-2.0,12,53,True,828e963abf2db0f5af9ed0d4034e538fc1cf5f40,True,False,2024-06-27,2024-02-26,True,False,zhengr/MixTAO-7Bx2-MoE-v8.1,0,zhengr/MixTAO-7Bx2-MoE-v8.1 🔶,google/gemma-2-2b-it,17.05,56.68,0.57,17.98,0.42,0.08,0.0,3.24,0.27,7.08,0.39,17.22,0.25,🔶 fine-tuned on domain-specific datasets,InternLM2ForCausalLM,Original,bfloat16,True,gemma,2,593,True,2b6ac3ff954ad896c115bbfa1b571cd93ea2c20f,True,True,2024-07-31,2024-07-16,True,True,google/gemma-2-2b-it,1,google/gemma-2-2b 🔶,Salesforce/LLaMA-3-8B-SFR-Iterative-DPO-R,17.03,38.16,0.38,29.15,0.5,0.15,0.0,5.03,0.29,5.55,0.36,24.14,0.32,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,72,True,ad7d1aed82eb6d8ca4b3aad627ff76f72ab34f70,True,True,2024-07-02,2024-05-09,True,True,Salesforce/LLaMA-3-8B-SFR-Iterative-DPO-R,0,Salesforce/LLaMA-3-8B-SFR-Iterative-DPO-R +🔶,amazon/MegaBeam-Mistral-7B-300k,17.02,52.03,0.52,19.29,0.42,2.11,0.02,3.13,0.27,8.35,0.4,17.21,0.25,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,15,True,42572e5c9a0747b19af5c5c9962d122622f32295,True,True,2024-10-07,2024-05-13,True,False,amazon/MegaBeam-Mistral-7B-300k,0,amazon/MegaBeam-Mistral-7B-300k 💬,UCLA-AGI/Mistral7B-PairRM-SPPO-Iter2,17.0,44.46,0.44,22.48,0.45,1.51,0.02,5.15,0.29,9.8,0.41,18.63,0.27,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,8201064df67b5762ff9f361ff1b98aae3747855c,True,True,2024-08-07,2024-05-04,True,False,UCLA-AGI/Mistral7B-PairRM-SPPO-Iter2,0,UCLA-AGI/Mistral7B-PairRM-SPPO-Iter2 💬,tanliboy/lambda-gemma-2-9b-dpo,16.97,18.29,0.18,35.74,0.55,0.0,0.0,8.05,0.31,8.57,0.41,31.17,0.38,"💬 chat models (RLHF, DPO, IFT, ...)",Gemma2ForCausalLM,Original,bfloat16,True,gemma,9,1,True,b141471308bc41ffe15180a6668c735396c3949b,True,True,2024-09-18,2024-07-24,True,False,tanliboy/lambda-gemma-2-9b-dpo,2,google/gemma-2-9b -🟢,Qwen/Qwen2.5-3B,16.96,26.9,0.27,24.3,0.46,7.93,0.08,6.38,0.3,11.76,0.43,24.48,0.32,🟢 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,other,3,21,True,e4aa5ac50aa507415cda96cc99eb77ad0a3d2d34,True,True,2024-09-27,2024-09-15,False,True,Qwen/Qwen2.5-3B,0,Qwen/Qwen2.5-3B +🟢,Qwen/Qwen2.5-3B,16.96,26.9,0.27,24.3,0.46,7.93,0.08,6.38,0.3,11.76,0.43,24.48,0.32,🟢 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,other,3,22,True,e4aa5ac50aa507415cda96cc99eb77ad0a3d2d34,True,True,2024-09-27,2024-09-15,False,True,Qwen/Qwen2.5-3B,0,Qwen/Qwen2.5-3B 🔶,Magpie-Align/MagpieLM-8B-SFT-v0.1,16.9,47.21,0.47,23.61,0.46,2.27,0.02,2.35,0.27,3.88,0.36,22.11,0.3,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,3,True,b91f605a511707cb3b7f0893a8ed80c77b32d5a8,True,True,2024-09-19,2024-09-15,True,False,Magpie-Align/MagpieLM-8B-SFT-v0.1,1,meta-llama/Meta-Llama-3.1-8B 💬,princeton-nlp/Mistral-7B-Base-SFT-SimPO,16.89,47.01,0.47,22.33,0.44,0.6,0.01,4.47,0.28,8.03,0.4,18.91,0.27,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,,7,0,True,9d9e8b8de4f673d45bc826efc4a1444f9d480222,True,True,2024-09-21,2024-05-17,True,False,princeton-nlp/Mistral-7B-Base-SFT-SimPO,0,princeton-nlp/Mistral-7B-Base-SFT-SimPO 💬,Magpie-Align/Llama-3-8B-Magpie-Align-v0.3,16.89,44.97,0.45,24.31,0.46,2.57,0.03,2.01,0.27,3.74,0.34,23.71,0.31,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,3,True,7e420ddd6ff48bf213dcab2a9ddb7845b80dd1aa,True,True,2024-08-06,2024-07-15,True,False,Magpie-Align/Llama-3-8B-Magpie-Align-v0.3,2,meta-llama/Meta-Llama-3-8B @@ -933,7 +941,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 💬,princeton-nlp/Mistral-7B-Base-SFT-DPO,16.22,44.03,0.44,20.79,0.44,1.59,0.02,3.02,0.27,9.63,0.41,18.28,0.26,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,,7,0,True,17134fd80cfbf3980353967a30dc6f450f18f78f,True,True,2024-10-07,2024-05-17,True,False,princeton-nlp/Mistral-7B-Base-SFT-DPO,0,princeton-nlp/Mistral-7B-Base-SFT-DPO 💬,princeton-nlp/Mistral-7B-Base-SFT-RRHF,16.14,44.07,0.44,19.6,0.43,2.27,0.02,5.37,0.29,10.03,0.42,15.53,0.24,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,,7,0,True,0d5861072e9d01f420451bf6a5b108bc8d3a76bc,True,True,2024-10-07,2024-07-06,True,False,princeton-nlp/Mistral-7B-Base-SFT-RRHF,0,princeton-nlp/Mistral-7B-Base-SFT-RRHF 🔶,xinchen9/Llama3.1_8B_Instruct_CoT,16.13,29.74,0.3,21.14,0.44,4.91,0.05,6.94,0.3,13.17,0.44,20.88,0.29,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,0,True,cab1b33ddff08de11c5daea8ae079d126d503d8b,True,True,2024-09-19,2024-09-16,False,False,xinchen9/Llama3.1_8B_Instruct_CoT,0,xinchen9/Llama3.1_8B_Instruct_CoT -🔶,Replete-AI/Replete-LLM-V2.5-Qwen-1.5b,16.1,34.02,0.34,18.71,0.43,7.02,0.07,5.15,0.29,10.35,0.42,21.36,0.29,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,,1,0,False,1f634da015ed671efe7dc574bc2a1954f5b2cc93,True,True,2024-09-29,,False,False,Replete-AI/Replete-LLM-V2.5-Qwen-1.5b,0,Removed +🔶,rombodawg/Rombos-LLM-V2.5-Qwen-1.5b,16.1,34.02,0.34,18.71,0.43,7.02,0.07,5.15,0.29,10.35,0.42,21.36,0.29,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,1,1,True,1f634da015ed671efe7dc574bc2a1954f5b2cc93,True,True,2024-09-29,2024-10-06,False,False,rombodawg/Rombos-LLM-V2.5-Qwen-1.5b,1,rombodawg/Rombos-LLM-V2.5-Qwen-1.5b (Merge) 💬,princeton-nlp/Mistral-7B-Instruct-ORPO,16.03,47.2,0.47,18.04,0.41,2.57,0.03,3.24,0.27,6.64,0.39,18.47,0.27,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,,7,0,True,69c0481f4100629a49ae73f760ddbb61d8e98e48,True,True,2024-10-07,2024-05-17,True,False,princeton-nlp/Mistral-7B-Instruct-ORPO,0,princeton-nlp/Mistral-7B-Instruct-ORPO 💬,princeton-nlp/Llama-3-Base-8B-SFT-RRHF,16.01,33.57,0.34,23.66,0.45,2.87,0.03,7.38,0.31,7.56,0.37,20.99,0.29,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,8,0,True,aea8c04b3940cebd1f8296a2c76914f0ce70c276,True,True,2024-10-07,2024-07-06,True,False,princeton-nlp/Llama-3-Base-8B-SFT-RRHF,0,princeton-nlp/Llama-3-Base-8B-SFT-RRHF 💬,CohereForAI/aya-23-8B,15.97,46.99,0.47,20.2,0.43,1.44,0.01,4.59,0.28,8.42,0.39,14.2,0.23,"💬 chat models (RLHF, DPO, IFT, ...)",CohereForCausalLM,Original,float16,True,cc-by-nc-4.0,8,372,True,ec151d218a24031eb039d92fb83d10445427efc9,True,True,2024-06-12,2024-05-19,True,True,CohereForAI/aya-23-8B,0,CohereForAI/aya-23-8B @@ -971,7 +979,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🟢,Qwen/Qwen1.5-7B,15.22,26.84,0.27,23.08,0.46,4.46,0.04,6.49,0.3,9.16,0.41,21.29,0.29,🟢 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,other,7,45,True,831096e3a59a0789a541415da25ef195ceb802fe,True,True,2024-06-09,2024-01-22,False,True,Qwen/Qwen1.5-7B,0,Qwen/Qwen1.5-7B 💬,Enno-Ai/EnnoAi-Pro-French-Llama-3-8B-v0.4,15.18,41.89,0.42,16.88,0.41,0.6,0.01,2.8,0.27,10.76,0.42,18.16,0.26,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,creativeml-openrail-m,8,0,True,328722ae96e3a112ec900dbe77d410788a526c5c,True,True,2024-06-30,2024-06-27,True,False,Enno-Ai/EnnoAi-Pro-French-Llama-3-8B-v0.4,0,Enno-Ai/EnnoAi-Pro-French-Llama-3-8B-v0.4 🔶,WizardLMTeam/WizardLM-13B-V1.2,15.15,33.92,0.34,22.89,0.45,1.74,0.02,1.45,0.26,14.03,0.44,16.88,0.25,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama2,13,225,True,cf5f40382559f19e13874e45b39575171ca46ef8,True,True,2024-06-12,2023-07-25,False,True,WizardLMTeam/WizardLM-13B-V1.2,0,WizardLMTeam/WizardLM-13B-V1.2 -💬,TencentARC/LLaMA-Pro-8B-Instruct,15.14,44.86,0.45,19.49,0.42,1.66,0.02,3.24,0.27,11.11,0.42,10.51,0.19,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama2,8,59,True,9850c8afce19a69d8fc4a1603a82441157514016,True,True,2024-06-12,2024-01-06,True,True,TencentARC/LLaMA-Pro-8B-Instruct,0,TencentARC/LLaMA-Pro-8B-Instruct +💬,TencentARC/LLaMA-Pro-8B-Instruct,15.14,44.86,0.45,19.49,0.42,1.66,0.02,3.24,0.27,11.11,0.42,10.51,0.19,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama2,8,60,True,9850c8afce19a69d8fc4a1603a82441157514016,True,True,2024-06-12,2024-01-06,True,True,TencentARC/LLaMA-Pro-8B-Instruct,0,TencentARC/LLaMA-Pro-8B-Instruct 🔶,EpistemeAI/Athena-gemma-2-2b-it-Philos,15.1,46.21,0.46,13.21,0.38,0.3,0.0,4.14,0.28,12.85,0.43,13.87,0.22,🔶 fine-tuned on domain-specific datasets,Gemma2ForCausalLM,Original,float16,True,apache-2.0,2,0,True,dea2b35d496bd32ed3c88d42ff3022654153f2e1,True,True,2024-09-05,2024-09-05,True,False,EpistemeAI/Athena-gemma-2-2b-it-Philos,1,unsloth/gemma-2-2b-it-bnb-4bit 🤝,oobabooga/CodeBooga-34B-v0.1,15.1,52.5,0.53,8.56,0.34,0.53,0.01,0.89,0.26,12.98,0.43,15.11,0.24,🤝 base merges and moerges,LlamaForCausalLM,Original,float16,True,llama2,33,141,True,8a4e1e16ac46333cbd0c17d733d3d70a956071a6,True,True,2024-07-29,2023-10-19,True,False,oobabooga/CodeBooga-34B-v0.1,0,oobabooga/CodeBooga-34B-v0.1 💬,ghost-x/ghost-8b-beta-1608,15.09,42.73,0.43,23.46,0.45,1.21,0.01,1.12,0.26,1.58,0.35,20.44,0.28,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,other,8,28,True,6d1b3853aab774af5a4db21ff9d5764918fb48f5,True,True,2024-09-17,2024-08-18,True,False,ghost-x/ghost-8b-beta-1608,1,ghost-x/ghost-8b-beta @@ -998,9 +1006,10 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,dreamgen/WizardLM-2-7B,14.74,45.83,0.46,9.21,0.35,2.49,0.02,4.92,0.29,7.53,0.39,18.45,0.27,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,34,True,b5f2d7bff91445a47331dcce588aee009d11d255,True,True,2024-06-27,2024-04-16,True,False,dreamgen/WizardLM-2-7B,0,dreamgen/WizardLM-2-7B 💬,abhishek/autotrain-llama3-70b-orpo-v1,14.71,42.33,0.42,41.57,0.6,0.45,0.0,0.0,0.24,2.57,0.36,1.36,0.11,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,other,70,4,True,053236c6846cc561c1503ba05e2b28c94855a432,True,True,2024-08-30,2024-05-02,True,False,abhishek/autotrain-llama3-70b-orpo-v1,0,abhishek/autotrain-llama3-70b-orpo-v1 💬,OpenLeecher/llama3-8b-lima,14.67,43.71,0.44,19.57,0.43,2.95,0.03,0.0,0.24,3.74,0.37,18.07,0.26,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,0,True,237a2bcb240eecd9355a091f839e42ba3d31bda5,True,True,2024-10-01,2024-10-01,True,False,OpenLeecher/llama3-8b-lima,0,OpenLeecher/llama3-8b-lima +🤝,Marsouuu/MiniQwenMathExpert-ECE-PRYMMAL-Martial,14.67,27.95,0.28,19.02,0.42,8.91,0.09,4.25,0.28,6.51,0.39,21.36,0.29,🤝 base merges and moerges,Qwen2ForCausalLM,Original,bfloat16,True,,1,0,False,0787682e65f7763ef978c4cf2e32803be8b49298,True,True,2024-10-07,2024-10-07,False,False,Marsouuu/MiniQwenMathExpert-ECE-PRYMMAL-Martial,1,Marsouuu/MiniQwenMathExpert-ECE-PRYMMAL-Martial (Merge) 🔶,rhysjones/phi-2-orange-v2,14.64,36.7,0.37,25.61,0.48,0.0,0.0,1.57,0.26,6.97,0.36,17.03,0.25,🔶 fine-tuned on domain-specific datasets,PhiForCausalLM,Original,float16,True,mit,2,27,True,f4085189114accfb65225deb8fbdf15767b7ee56,True,True,2024-06-28,2024-03-04,True,False,rhysjones/phi-2-orange-v2,0,rhysjones/phi-2-orange-v2 🟢,LLM360/K2,14.53,22.52,0.23,28.22,0.5,2.04,0.02,3.58,0.28,8.55,0.4,22.27,0.3,🟢 pretrained,LlamaForCausalLM,Original,float16,True,apache-2.0,65,78,True,49d159b6f2b64d562e745f0ff06e65b9a4c28ead,True,True,2024-06-26,2024-04-17,False,True,LLM360/K2,0,LLM360/K2 -🟢,mistralai/Mistral-7B-v0.1,14.52,23.86,0.24,22.17,0.44,2.49,0.02,5.59,0.29,10.68,0.41,22.36,0.3,🟢 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,3397,True,26bca36bde8333b5d7f72e9ed20ccda6a618af24,True,True,2024-06-12,2023-09-20,False,True,mistralai/Mistral-7B-v0.1,0,mistralai/Mistral-7B-v0.1 +🟢,mistralai/Mistral-7B-v0.1,14.52,23.86,0.24,22.17,0.44,2.49,0.02,5.59,0.29,10.68,0.41,22.36,0.3,🟢 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,3398,True,26bca36bde8333b5d7f72e9ed20ccda6a618af24,True,True,2024-06-12,2023-09-20,False,True,mistralai/Mistral-7B-v0.1,0,mistralai/Mistral-7B-v0.1 🔶,LeroyDyer/LCARS_AI_StarTrek_Computer,14.51,35.83,0.36,21.78,0.44,3.47,0.03,2.35,0.27,7.44,0.4,16.2,0.25,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,mit,7,2,True,9d4af4ab13df574ad0d40ed71de7d43c17f59a94,True,True,2024-08-07,2024-05-11,False,False,LeroyDyer/LCARS_AI_StarTrek_Computer,0,LeroyDyer/LCARS_AI_StarTrek_Computer 🔶,VAGOsolutions/SauerkrautLM-Gemma-7b,14.5,34.07,0.34,18.49,0.42,4.91,0.05,4.81,0.29,2.93,0.36,21.79,0.3,🔶 fine-tuned on domain-specific datasets,GemmaForCausalLM,Original,bfloat16,True,other,8,13,True,4296bdabf82e900235b094e5348be03ebb0ec891,True,True,2024-06-26,2024-02-27,True,False,VAGOsolutions/SauerkrautLM-Gemma-7b,0,VAGOsolutions/SauerkrautLM-Gemma-7b 🔶,BEE-spoke-data/Meta-Llama-3-8Bee,14.49,19.51,0.2,24.2,0.46,3.85,0.04,8.5,0.31,6.24,0.37,24.66,0.32,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,0,True,8143e34e77a49a30ec2617c5c9cc22cb3cda2287,True,True,2024-07-04,2024-04-28,False,False,BEE-spoke-data/Meta-Llama-3-8Bee,1,meta-llama/Meta-Llama-3-8B @@ -1018,9 +1027,10 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,TencentARC/Mistral_Pro_8B_v0.1,14.2,21.15,0.21,22.89,0.45,5.66,0.06,4.03,0.28,11.83,0.42,19.61,0.28,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,8,66,True,366f159fc5b314ba2a955209d2bca4600f84dac0,True,True,2024-06-12,2024-02-22,False,True,TencentARC/Mistral_Pro_8B_v0.1,0,TencentARC/Mistral_Pro_8B_v0.1 🟢,tklohj/WindyFloLLM,14.17,26.69,0.27,24.4,0.46,1.13,0.01,3.36,0.28,11.86,0.43,17.57,0.26,🟢 pretrained,LlamaForCausalLM,Original,float16,True,,13,0,False,21f4241ab3f091d1d309e9076a8d8e3f014908a8,True,True,2024-07-10,2024-06-30,False,False,tklohj/WindyFloLLM,1,tklohj/WindyFloLLM (Merge) 🟢,nvidia/Minitron-8B-Base,14.17,24.24,0.24,22.04,0.44,2.27,0.02,3.13,0.27,9.09,0.4,24.23,0.32,🟢 pretrained,NemotronForCausalLM,Original,bfloat16,True,other,7,64,True,70fa5997afc42807f41eebd5d481f040556fdf97,True,True,2024-09-25,2024-07-19,False,True,nvidia/Minitron-8B-Base,0,nvidia/Minitron-8B-Base +🟢,mistralai/Mistral-7B-v0.3,14.15,22.66,0.23,23.95,0.45,2.64,0.03,5.59,0.29,8.36,0.4,21.7,0.3,🟢 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,368,True,b67d6a03ca097c5122fa65904fce0413500bf8c8,True,True,2024-06-12,2024-05-22,False,True,mistralai/Mistral-7B-v0.3,0,mistralai/Mistral-7B-v0.3 🟢,mistral-community/Mistral-7B-v0.2,14.15,22.66,0.23,23.95,0.45,2.64,0.03,5.59,0.29,8.36,0.4,21.7,0.3,🟢 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,230,True,2c3e624962b1a3f3fbf52e15969565caa7bc064a,True,True,2024-06-12,2024-03-23,False,True,mistral-community/Mistral-7B-v0.2,0,mistral-community/Mistral-7B-v0.2 -🟢,mistralai/Mistral-7B-v0.3,14.15,22.66,0.23,23.95,0.45,2.64,0.03,5.59,0.29,8.36,0.4,21.7,0.3,🟢 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,367,True,b67d6a03ca097c5122fa65904fce0413500bf8c8,True,True,2024-06-12,2024-05-22,False,True,mistralai/Mistral-7B-v0.3,0,mistralai/Mistral-7B-v0.3 🟢,awnr/Mistral-7B-v0.1-signtensors-7-over-16,14.15,22.94,0.23,21.04,0.43,3.25,0.03,7.16,0.3,7.93,0.4,22.56,0.3,🟢 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,0e1f2cb0a81c38fc6c567d9c007883ab62fae266,True,True,2024-07-29,2024-07-29,False,False,awnr/Mistral-7B-v0.1-signtensors-7-over-16,0,awnr/Mistral-7B-v0.1-signtensors-7-over-16 +🤝,LilRg/ECE-1B-merge-PRYMMAL,14.15,27.12,0.27,19.14,0.42,8.01,0.08,4.14,0.28,5.28,0.38,21.18,0.29,🤝 base merges and moerges,Qwen2ForCausalLM,Original,bfloat16,False,apache-2.0,1,0,True,009c75039786c38e2a6168cf93c9a46a4d111fb9,True,True,2024-10-07,2024-10-07,False,False,LilRg/ECE-1B-merge-PRYMMAL,1,LilRg/ECE-1B-merge-PRYMMAL (Merge) 🔶,netcat420/MFANNv0.19,14.14,30.57,0.31,24.92,0.47,2.64,0.03,7.61,0.31,2.72,0.35,16.36,0.25,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama3.1,8,0,True,af26a25549b7ad291766c479bebda58f15fbff42,True,True,2024-07-27,2024-07-27,False,False,netcat420/MFANNv0.19,0,netcat420/MFANNv0.19 🤝,johnsutor/Llama-3-8B-Instruct_dare_linear,14.12,21.45,0.21,19.61,0.43,0.0,0.0,6.15,0.3,21.81,0.5,15.72,0.24,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,8,0,True,abb81fd8fdc2ad32f65befcb7ae369c9837cd563,True,True,2024-06-26,2024-06-07,False,False,johnsutor/Llama-3-8B-Instruct_dare_linear,1,johnsutor/Llama-3-8B-Instruct_dare_linear (Merge) 🔶,Epiculous/Azure_Dusk-v0.2,14.03,34.67,0.35,17.4,0.41,1.66,0.02,1.45,0.26,6.37,0.38,22.6,0.3,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,12,7,True,ebddf1b2efbe7f9cae066d263b0991ded89c88e8,True,True,2024-09-14,2024-09-09,True,False,Epiculous/Azure_Dusk-v0.2,0,Epiculous/Azure_Dusk-v0.2 @@ -1039,11 +1049,11 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,openchat/openchat_v3.2,13.81,29.81,0.3,20.32,0.43,1.13,0.01,2.68,0.27,13.1,0.43,15.8,0.24,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama2,13,42,True,acc7ce92558681e749678648189812f15c1465fe,True,True,2024-06-12,2023-07-30,False,True,openchat/openchat_v3.2,0,openchat/openchat_v3.2 💬,yam-peleg/Hebrew-Gemma-11B-Instruct,13.81,30.21,0.3,16.86,0.4,5.06,0.05,3.47,0.28,9.97,0.41,17.27,0.26,"💬 chat models (RLHF, DPO, IFT, ...)",GemmaForCausalLM,Original,float16,True,other,10,22,True,a40259d1efbcac4829ed44d3b589716f615ed362,True,True,2024-07-31,2024-03-06,True,False,yam-peleg/Hebrew-Gemma-11B-Instruct,0,yam-peleg/Hebrew-Gemma-11B-Instruct 🔶,sequelbox/Llama3.1-8B-PlumMath,13.8,22.42,0.22,16.45,0.4,3.93,0.04,9.06,0.32,8.98,0.39,21.95,0.3,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,False,llama3.1,8,0,True,b857c30a626f7c020fcba89df7bece4bb7381ac2,True,True,2024-10-03,2024-10-01,False,False,sequelbox/Llama3.1-8B-PlumMath,1,sequelbox/Llama3.1-8B-PlumMath (Merge) -🟢,meta-llama/Meta-Llama-3.1-8B,13.78,12.7,0.13,25.29,0.47,4.61,0.05,6.15,0.3,8.98,0.38,24.95,0.32,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,911,True,e5c39e551424c763dbc3e58e32ef2999d33a6d8d,True,True,2024-07-23,2024-07-14,True,True,meta-llama/Meta-Llama-3.1-8B,0,meta-llama/Meta-Llama-3.1-8B +🟢,meta-llama/Meta-Llama-3.1-8B,13.78,12.7,0.13,25.29,0.47,4.61,0.05,6.15,0.3,8.98,0.38,24.95,0.32,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3.1,8,912,True,e5c39e551424c763dbc3e58e32ef2999d33a6d8d,True,True,2024-07-23,2024-07-14,True,True,meta-llama/Meta-Llama-3.1-8B,0,meta-llama/Meta-Llama-3.1-8B 🔶,AI-Sweden-Models/Llama-3-8B-instruct,13.78,24.01,0.24,18.39,0.42,0.45,0.0,2.13,0.27,19.94,0.48,17.75,0.26,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,9,True,4e1c955228bdb4d69c1c4560e8d5872312a8f033,True,True,2024-06-27,2024-06-01,True,False,AI-Sweden-Models/Llama-3-8B-instruct,2,meta-llama/Meta-Llama-3-8B 🟢,tiiuae/falcon-11B,13.78,32.61,0.33,21.94,0.44,2.34,0.02,2.8,0.27,7.53,0.4,15.44,0.24,🟢 pretrained,FalconForCausalLM,Original,bfloat16,True,unknown,11,207,True,066e3bf4e2d9aaeefa129af0a6d39727d27816b3,True,True,2024-06-09,2024-05-09,False,True,tiiuae/falcon-11B,0,tiiuae/falcon-11B 🟩,princeton-nlp/Llama-3-8B-ProLong-64k-Base,13.77,12.49,0.12,25.02,0.47,5.82,0.06,4.81,0.29,9.1,0.39,25.4,0.33,🟩 continuously pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3,8,4,True,97994d6918f80162a893e22d5e7bba586551f941,True,True,2024-09-17,2024-07-22,False,False,princeton-nlp/Llama-3-8B-ProLong-64k-Base,1,princeton-nlp/Llama-3-8B-ProLong-64k-Base (Merge) -💬,meta-llama/Llama-3.2-1B-Instruct,13.76,56.98,0.57,8.74,0.35,2.95,0.03,3.36,0.28,2.97,0.33,7.58,0.17,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3.2,1,296,True,d0a2081ed47e20ce524e8bc5d132f3fad2f69ff0,True,True,2024-09-23,2024-09-18,True,True,meta-llama/Llama-3.2-1B-Instruct,0,meta-llama/Llama-3.2-1B-Instruct +💬,meta-llama/Llama-3.2-1B-Instruct,13.76,56.98,0.57,8.74,0.35,2.95,0.03,3.36,0.28,2.97,0.33,7.58,0.17,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3.2,1,298,True,d0a2081ed47e20ce524e8bc5d132f3fad2f69ff0,True,True,2024-09-23,2024-09-18,True,True,meta-llama/Llama-3.2-1B-Instruct,0,meta-llama/Llama-3.2-1B-Instruct 🟢,awnr/Mistral-7B-v0.1-signtensors-3-over-8,13.73,23.94,0.24,20.44,0.43,2.79,0.03,7.16,0.3,5.79,0.38,22.24,0.3,🟢 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,fa368f705ace05da2fef25c030fe740cf1fef176,True,True,2024-07-29,2024-07-29,False,False,awnr/Mistral-7B-v0.1-signtensors-3-over-8,0,awnr/Mistral-7B-v0.1-signtensors-3-over-8 💬,Goekdeniz-Guelmez/Josiefied-Qwen2.5-1.5B-Instruct-abliterated-v2,13.62,42.16,0.42,16.5,0.4,0.98,0.01,0.0,0.24,4.71,0.38,17.35,0.26,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,1,1,True,ff4a6eff69adb015dfcfbff7a2d2dc43b34afe89,True,True,2024-09-28,2024-09-28,True,False,Goekdeniz-Guelmez/Josiefied-Qwen2.5-1.5B-Instruct-abliterated-v2,2,Qwen/Qwen2.5-1.5B 🟢,Qwen/Qwen2.5-1.5B,13.6,26.74,0.27,16.66,0.41,7.63,0.08,4.7,0.29,5.27,0.36,20.61,0.29,🟢 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,1,27,True,e5dfabbcffd9b0c7b31d89b82c5a6b72e663f32c,True,True,2024-09-19,2024-09-15,False,True,Qwen/Qwen2.5-1.5B,0,Qwen/Qwen2.5-1.5B @@ -1056,11 +1066,11 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,pankajmathur/orca_mini_v3_7b,13.52,28.21,0.28,17.84,0.41,0.3,0.0,0.0,0.25,22.71,0.5,12.04,0.21,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,7,40,True,6252eb7ca29da8d951ae7d2bca948bf84e04a2b9,True,True,2024-06-26,2023-08-07,False,False,pankajmathur/orca_mini_v3_7b,0,pankajmathur/orca_mini_v3_7b 💬,Goekdeniz-Guelmez/Josiefied-Qwen2.5-1.5B-Instruct-abliterated-v3,13.52,42.53,0.43,16.44,0.41,0.6,0.01,0.0,0.24,4.24,0.37,17.29,0.26,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,1,1,True,03ffa6f7a6ada9d63d838707c597297f048d409b,True,True,2024-09-28,2024-09-28,True,False,Goekdeniz-Guelmez/Josiefied-Qwen2.5-1.5B-Instruct-abliterated-v3,3,Qwen/Qwen2.5-1.5B 🟢,google/recurrentgemma-9b,13.5,31.16,0.31,15.32,0.4,5.36,0.05,4.7,0.29,6.6,0.38,17.83,0.26,🟢 pretrained,RecurrentGemmaForCausalLM,Original,bfloat16,True,gemma,9,59,True,7b0ed98fb889ba8bdfa7c690f08f2e57a7c48dae,True,True,2024-07-04,2024-06-07,False,True,google/recurrentgemma-9b,0,google/recurrentgemma-9b -🔶,google/flan-t5-xxl,13.49,22.0,0.22,30.12,0.51,0.0,0.0,2.68,0.27,11.19,0.42,14.92,0.23,🔶 fine-tuned on domain-specific datasets,T5ForConditionalGeneration,Original,float16,True,apache-2.0,11,1176,True,ae7c9136adc7555eeccc78cdd960dfd60fb346ce,True,True,2024-09-06,2022-10-21,False,True,google/flan-t5-xxl,0,google/flan-t5-xxl +🔶,google/flan-t5-xxl,13.49,22.0,0.22,30.12,0.51,0.0,0.0,2.68,0.27,11.19,0.42,14.92,0.23,🔶 fine-tuned on domain-specific datasets,T5ForConditionalGeneration,Original,float16,True,apache-2.0,11,1177,True,ae7c9136adc7555eeccc78cdd960dfd60fb346ce,True,True,2024-09-06,2022-10-21,False,True,google/flan-t5-xxl,0,google/flan-t5-xxl 🔶,chujiezheng/Mistral7B-PairRM-SPPO-ExPO,13.47,36.73,0.37,13.68,0.39,0.91,0.01,3.58,0.28,8.66,0.41,17.24,0.26,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,0,True,d3e8342a63e5ae096f450f2467a92168db12768c,True,True,2024-09-21,2024-05-04,True,False,chujiezheng/Mistral7B-PairRM-SPPO-ExPO,0,chujiezheng/Mistral7B-PairRM-SPPO-ExPO 🔶,LeroyDyer/SpydazWeb_AI_CyberTron_Ultra_7b,13.47,15.56,0.16,27.75,0.48,0.76,0.01,5.7,0.29,10.3,0.41,20.73,0.29,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,apache-2.0,7,4,True,50c69e539578ab5384eb018a60cc1268637becae,True,True,2024-07-12,2024-04-14,False,False,LeroyDyer/SpydazWeb_AI_CyberTron_Ultra_7b,1,LeroyDyer/Mixtral_AI_CyberTron_Ultra 🟩,NousResearch/Yarn-Mistral-7b-64k,13.43,20.8,0.21,20.23,0.43,3.02,0.03,5.37,0.29,9.88,0.41,21.27,0.29,🟩 continuously pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,49,True,0273c624561fcecc8e8f4030492a9307aa60f945,True,True,2024-06-12,2023-10-31,False,True,NousResearch/Yarn-Mistral-7b-64k,0,NousResearch/Yarn-Mistral-7b-64k -🟢,meta-llama/Meta-Llama-3-8B,13.41,14.55,0.15,24.5,0.46,3.25,0.03,7.38,0.31,6.24,0.36,24.55,0.32,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3,8,5718,True,62bd457b6fe961a42a631306577e622c83876cb6,True,True,2024-06-12,2024-04-17,False,True,meta-llama/Meta-Llama-3-8B,0,meta-llama/Meta-Llama-3-8B +🟢,meta-llama/Meta-Llama-3-8B,13.41,14.55,0.15,24.5,0.46,3.25,0.03,7.38,0.31,6.24,0.36,24.55,0.32,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3,8,5719,True,62bd457b6fe961a42a631306577e622c83876cb6,True,True,2024-06-12,2024-04-17,False,True,meta-llama/Meta-Llama-3-8B,0,meta-llama/Meta-Llama-3-8B 🔶,Dans-DiscountModels/Dans-Instruct-Mix-8b-ChatML,13.4,8.25,0.08,26.34,0.47,4.76,0.05,5.93,0.29,9.68,0.39,25.42,0.33,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,0,True,029d84d4f4a618aa798490c046753b12801158e2,True,True,2024-09-14,2024-09-09,False,False,Dans-DiscountModels/Dans-Instruct-Mix-8b-ChatML,1,Dans-DiscountModels/Meta-Llama-3.1-8B-ChatML 🔶,xinchen9/Llama3.1_CoT,13.35,22.46,0.22,19.9,0.43,1.51,0.02,5.15,0.29,11.77,0.43,19.32,0.27,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,8,0,True,3cb467f51a59ff163bb942fcde3ef60573c12b79,True,True,2024-09-06,2024-09-04,True,False,xinchen9/Llama3.1_CoT,0,xinchen9/Llama3.1_CoT 🔶,Alibaba-NLP/gte-Qwen2-7B-instruct,13.34,22.55,0.23,21.93,0.45,3.47,0.03,0.0,0.24,6.32,0.36,25.79,0.33,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,7,171,True,e26182b2122f4435e8b3ebecbf363990f409b45b,True,True,2024-08-05,2024-06-15,True,False,Alibaba-NLP/gte-Qwen2-7B-instruct,0,Alibaba-NLP/gte-Qwen2-7B-instruct @@ -1092,7 +1102,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🟢,Qwen/Qwen1.5-MoE-A2.7B,12.42,26.6,0.27,18.84,0.41,0.15,0.0,1.23,0.26,7.97,0.4,19.75,0.28,🟢 pretrained,Qwen2MoeForCausalLM,Original,bfloat16,True,other,14,190,True,1a758c50ecb6350748b9ce0a99d2352fd9fc11c9,True,False,2024-06-13,2024-02-29,False,True,Qwen/Qwen1.5-MoE-A2.7B,0,Qwen/Qwen1.5-MoE-A2.7B 💬,occiglot/occiglot-7b-es-en-instruct,12.37,34.85,0.35,17.24,0.41,1.89,0.02,1.23,0.26,4.45,0.37,14.56,0.23,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,float16,True,apache-2.0,7,2,True,5858f6ee118eef70896f1870fd61052348ff571e,True,True,2024-09-02,2024-03-05,True,False,occiglot/occiglot-7b-es-en-instruct,0,occiglot/occiglot-7b-es-en-instruct 🔶,netcat420/MFANN3bv0.20,12.36,21.93,0.22,22.79,0.45,1.36,0.01,1.23,0.26,10.17,0.41,16.67,0.25,🔶 fine-tuned on domain-specific datasets,PhiForCausalLM,Original,float16,True,mit,2,0,True,ac8ba24559cbdb5704d77b602580d911c265fdee,True,True,2024-08-29,2024-08-29,False,False,netcat420/MFANN3bv0.20,2,netcat420/MFANN3bv0.19.12 (Merge) -🤝,Marsouuu/MiniMathExpert-2_61B-ECE-PRYMMAL-Martial,12.36,25.48,0.25,15.3,0.4,6.57,0.07,3.36,0.28,9.27,0.41,14.15,0.23,🤝 base merges and moerges,Gemma2ForCausalLM,Original,bfloat16,False,apache-2.0,2,0,True,df21939a22e7233ebb7d62dfaf1c854facc5c772,True,True,2024-10-06,2024-10-06,False,False,Marsouuu/MiniMathExpert-2_61B-ECE-PRYMMAL-Martial,1,Marsouuu/MiniMathExpert-2_61B-ECE-PRYMMAL-Martial (Merge) +🤝,Marsouuu/MiniMathExpert-2_61B-ECE-PRYMMAL-Martial,12.36,25.48,0.25,15.3,0.4,6.57,0.07,3.36,0.28,9.27,0.41,14.15,0.23,🤝 base merges and moerges,Gemma2ForCausalLM,Original,bfloat16,False,apache-2.0,2,1,True,df21939a22e7233ebb7d62dfaf1c854facc5c772,True,True,2024-10-06,2024-10-06,False,False,Marsouuu/MiniMathExpert-2_61B-ECE-PRYMMAL-Martial,1,Marsouuu/MiniMathExpert-2_61B-ECE-PRYMMAL-Martial (Merge) 💬,stabilityai/stablelm-zephyr-3b,12.33,36.83,0.37,14.76,0.39,4.08,0.04,0.0,0.24,9.79,0.42,8.53,0.18,"💬 chat models (RLHF, DPO, IFT, ...)",StableLmForCausalLM,Original,bfloat16,True,other,2,244,True,a14f62d95754d96aea2be6e24c0f6966636797b9,True,True,2024-06-12,2023-11-21,True,True,stabilityai/stablelm-zephyr-3b,0,stabilityai/stablelm-zephyr-3b 💬,Columbia-NLP/LION-Gemma-2b-sft-v1.0,12.33,36.92,0.37,14.12,0.39,5.14,0.05,0.78,0.26,8.31,0.4,8.69,0.18,"💬 chat models (RLHF, DPO, IFT, ...)",GemmaForCausalLM,Original,bfloat16,True,,2,0,False,44d6f26fa7e3b0d238064d844569bf8a07b7515e,True,True,2024-07-04,2024-07-02,True,False,Columbia-NLP/LION-Gemma-2b-sft-v1.0,0,Columbia-NLP/LION-Gemma-2b-sft-v1.0 💬,Qwen/Qwen1.5-4B-Chat,12.33,31.57,0.32,16.3,0.4,0.98,0.01,2.24,0.27,7.36,0.4,15.51,0.24,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,3,38,True,a7a4d4945d28bac955554c9abd2f74a71ebbf22f,True,True,2024-06-12,2024-01-30,True,True,Qwen/Qwen1.5-4B-Chat,0,Qwen/Qwen1.5-4B-Chat @@ -1113,6 +1123,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🤝,gaverfraxz/Meta-Llama-3.1-8B-Instruct-HalfAbliterated-DELLA,11.92,40.09,0.4,15.28,0.4,0.83,0.01,4.59,0.28,3.46,0.37,7.27,0.17,🤝 base merges and moerges,LlamaForCausalLM,Original,float16,False,llama3.1,8,0,True,6b0271a98b8875a65972ed54b0d636d8236ea60b,True,True,2024-09-23,2024-09-22,False,False,gaverfraxz/Meta-Llama-3.1-8B-Instruct-HalfAbliterated-DELLA,1,gaverfraxz/Meta-Llama-3.1-8B-Instruct-HalfAbliterated-DELLA (Merge) 🟢,01-ai/Yi-6B-200K,11.9,8.43,0.08,20.15,0.43,1.21,0.01,4.25,0.28,16.84,0.46,20.49,0.28,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,6,173,True,4a74338e778a599f313e9fa8f5bc08c717604420,True,True,2024-06-12,2023-11-06,False,True,01-ai/Yi-6B-200K,0,01-ai/Yi-6B-200K 🟩,dicta-il/dictalm2.0,11.84,24.13,0.24,16.49,0.4,1.51,0.02,5.59,0.29,5.51,0.38,17.83,0.26,🟩 continuously pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,10,True,f8ab3208e95a7b44a9a2fbb9bbbdd8ea11be509d,True,True,2024-07-31,2024-04-10,False,False,dicta-il/dictalm2.0,0,dicta-il/dictalm2.0 +🔶,gupta-tanish/llama-7b-dpo-baseline,11.84,26.93,0.27,14.38,0.39,1.89,0.02,1.68,0.26,14.77,0.45,11.42,0.2,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,6,0,True,1b5f1ef3ffa3b550619fbf64c33b6fd79e1bd559,True,True,2024-09-29,2024-09-29,False,False,gupta-tanish/llama-7b-dpo-baseline,1,gupta-tanish/llama-7b-dpo-baseline (Merge) 🔶,monsterapi/gemma-2-2b-LoRA-MonsterInstruct,11.84,39.03,0.39,11.97,0.36,0.98,0.01,2.68,0.27,5.41,0.36,10.97,0.2,🔶 fine-tuned on domain-specific datasets,Gemma2ForCausalLM,Original,float16,True,gemma,2,0,True,6422e27e96e15cf93b966c973aacc15f8a27a458,True,True,2024-08-05,2024-08-03,True,False,monsterapi/gemma-2-2b-LoRA-MonsterInstruct,0,monsterapi/gemma-2-2b-LoRA-MonsterInstruct 🔶,LilRg/ECE_Finetunning,11.84,4.45,0.04,26.53,0.47,3.63,0.04,4.36,0.28,7.69,0.38,24.35,0.32,🔶 fine-tuned on domain-specific datasets,?,Adapter,float16,True,apache-2.0,16,0,True,8d10549bcf802355f2d6203a33ed27e81b15b9e5,True,True,2024-09-28,2024-09-28,False,False,LilRg/ECE_Finetunning,3,meta-llama/Meta-Llama-3.1-8B 🔶,Unbabel/TowerInstruct-Mistral-7B-v0.2,11.83,28.43,0.28,14.22,0.39,1.59,0.02,0.0,0.25,15.96,0.45,10.76,0.2,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,float16,True,cc-by-nc-4.0,7,8,True,454bdfedc8b51f292a402aba2c560df145a0817d,True,True,2024-09-06,2024-03-26,False,False,Unbabel/TowerInstruct-Mistral-7B-v0.2,0,Unbabel/TowerInstruct-Mistral-7B-v0.2 @@ -1150,7 +1161,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 💬,dfurman/Llama-3-8B-Orpo-v0.1,11.01,30.0,0.3,13.77,0.39,3.78,0.04,1.57,0.26,2.73,0.36,14.23,0.23,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,llama3,8,1,True,f02aef830e12a50892ac065826d5eb3dfc7675d1,True,True,2024-08-30,2024-04-26,True,False,dfurman/Llama-3-8B-Orpo-v0.1,1,dfurman/Llama-3-8B-Orpo-v0.1 (Merge) 💬,meta-llama/Llama-2-13b-chat-hf,11.0,39.85,0.4,7.16,0.33,0.6,0.01,0.0,0.23,8.16,0.4,10.26,0.19,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,llama2,13,1019,True,a2cb7a712bb6e5e736ca7f8cd98167f81a0b5bd8,True,True,2024-06-12,2023-07-13,True,True,meta-llama/Llama-2-13b-chat-hf,0,meta-llama/Llama-2-13b-chat-hf 🟢,meta-llama/Llama-2-13b-hf,10.99,24.82,0.25,17.22,0.41,1.06,0.01,4.14,0.28,3.39,0.35,15.31,0.24,🟢 pretrained,LlamaForCausalLM,Original,float16,True,llama2,13,569,True,5c31dfb671ce7cfe2d7bb7c04375e44c55e815b1,True,True,2024-06-12,2023-07-13,False,True,meta-llama/Llama-2-13b-hf,0,meta-llama/Llama-2-13b-hf -💬,THUDM/glm-4-9b-chat,10.97,0.0,0.0,25.21,0.47,0.0,0.0,8.5,0.31,8.06,0.4,24.07,0.32,"💬 chat models (RLHF, DPO, IFT, ...)",ChatGLMModelM,Original,bfloat16,True,other,9,578,True,04419001bc63e05e70991ade6da1f91c4aeec278,True,True,2024-07-09,2024-06-04,True,False,THUDM/glm-4-9b-chat,0,THUDM/glm-4-9b-chat +💬,THUDM/glm-4-9b-chat,10.97,0.0,0.0,25.21,0.47,0.0,0.0,8.5,0.31,8.06,0.4,24.07,0.32,"💬 chat models (RLHF, DPO, IFT, ...)",ChatGLMModelM,Original,bfloat16,True,other,9,579,True,04419001bc63e05e70991ade6da1f91c4aeec278,True,True,2024-07-09,2024-06-04,True,False,THUDM/glm-4-9b-chat,0,THUDM/glm-4-9b-chat 🔶,winglian/Llama-3-8b-64k-PoSE,10.89,28.57,0.29,13.31,0.37,2.64,0.03,1.45,0.26,3.08,0.34,16.3,0.25,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,,8,74,False,5481d9b74a3ec5a95789673e194c8ff86e2bc2bc,True,True,2024-06-26,2024-04-24,True,False,winglian/Llama-3-8b-64k-PoSE,0,winglian/Llama-3-8b-64k-PoSE 🔶,FuJhen/mistral_7b_v0.1_structedData_e2e,10.87,17.27,0.17,18.06,0.41,0.23,0.0,3.91,0.28,5.64,0.37,20.12,0.28,🔶 fine-tuned on domain-specific datasets,?,Adapter,bfloat16,True,apache-2.0,7,0,True,7231864981174d9bee8c7687c24c8344414eae6b,True,True,2024-09-13,2024-09-13,False,False,FuJhen/mistral_7b_v0.1_structedData_e2e,1,FuJhen/mistral_7b_v0.1_structedData_e2e (Merge) 🟢,nlpguy/Mistral-NeMo-Minitron-Upscale-v1,10.86,16.48,0.16,22.07,0.45,0.68,0.01,4.03,0.28,4.84,0.38,17.08,0.25,🟢 pretrained,MistralForCausalLM,Original,bfloat16,False,other,12,0,True,9e6d747cbb81e1f25915a0f42802cbeb85b61c3e,True,True,2024-09-29,2024-09-29,False,False,nlpguy/Mistral-NeMo-Minitron-Upscale-v1,1,nlpguy/Mistral-NeMo-Minitron-Upscale-v1 (Merge) @@ -1176,7 +1187,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🟩,h2oai/h2o-danube3-4b-base,9.99,23.38,0.23,10.56,0.36,1.66,0.02,5.48,0.29,6.53,0.38,12.33,0.21,🟩 continuously pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,3,19,True,6bdf2f1e317143c998b88d9e9d72facc621a863f,True,True,2024-08-10,2024-07-04,False,False,h2oai/h2o-danube3-4b-base,0,h2oai/h2o-danube3-4b-base 💬,AALF/gemma-2-27b-it-SimPO-37K-100steps,9.89,25.68,0.26,15.26,0.39,0.0,0.0,5.15,0.29,0.78,0.33,12.5,0.21,"💬 chat models (RLHF, DPO, IFT, ...)",Gemma2ForCausalLM,Original,bfloat16,True,gemma,27,8,True,d5cbf18b2eb90b77f5ddbb74cfcaeedfa692c90c,True,True,2024-09-21,2024-08-13,True,False,AALF/gemma-2-27b-it-SimPO-37K-100steps,2,google/gemma-2-27b 🔶,sequelbox/Llama3.1-8B-PlumCode,9.77,20.45,0.2,8.5,0.34,2.42,0.02,3.47,0.28,8.97,0.38,14.84,0.23,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,,8,0,False,171cd599d574000607491f08e6cf7b7eb199e33d,True,True,2024-10-03,2024-10-02,False,False,sequelbox/Llama3.1-8B-PlumCode,1,sequelbox/Llama3.1-8B-PlumCode (Merge) -💬,cognitivecomputations/dolphin-2.9.4-gemma2-2b,9.71,8.96,0.09,17.37,0.41,4.15,0.04,4.59,0.28,10.91,0.42,12.28,0.21,"💬 chat models (RLHF, DPO, IFT, ...)",Gemma2ForCausalLM,Original,bfloat16,True,gemma,2,23,True,5c0854beb88a6711221771d1b13d51f733e6ca06,True,True,2024-08-25,2024-08-24,True,True,cognitivecomputations/dolphin-2.9.4-gemma2-2b,1,google/gemma-2-2b +💬,cognitivecomputations/dolphin-2.9.4-gemma2-2b,9.71,8.96,0.09,17.37,0.41,4.15,0.04,4.59,0.28,10.91,0.42,12.28,0.21,"💬 chat models (RLHF, DPO, IFT, ...)",Gemma2ForCausalLM,Original,bfloat16,True,gemma,2,24,True,5c0854beb88a6711221771d1b13d51f733e6ca06,True,True,2024-08-25,2024-08-24,True,True,cognitivecomputations/dolphin-2.9.4-gemma2-2b,1,google/gemma-2-2b 🔶,uukuguy/speechless-coder-ds-6.7b,9.64,25.05,0.25,15.9,0.4,1.66,0.02,1.9,0.26,5.34,0.38,7.99,0.17,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,6,5,True,c813a5268c6dfe267a720ad3b51773f1ab0feb59,True,True,2024-06-26,2023-12-30,False,False,uukuguy/speechless-coder-ds-6.7b,0,uukuguy/speechless-coder-ds-6.7b 🔶,NotASI/FineTome-Llama3.2-1B-0929,9.56,39.91,0.4,5.74,0.32,1.28,0.01,3.02,0.27,2.66,0.35,4.76,0.14,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama3.2,1,1,True,61c8742238d0cfe68a0a3f61326b84cd6624ad02,True,True,2024-10-04,2024-09-29,True,False,NotASI/FineTome-Llama3.2-1B-0929,2,meta-llama/Llama-3.2-1B-Instruct 🔶,teknium/OpenHermes-7B,9.48,18.13,0.18,12.08,0.36,1.06,0.01,2.57,0.27,12.68,0.43,10.37,0.19,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,mit,7,13,True,9f55d6eb15f1edd52ee1fd863a220aa682e78a00,True,True,2024-06-12,2023-09-14,False,True,teknium/OpenHermes-7B,1,NousResearch/Llama-2-7b-hf @@ -1193,15 +1204,16 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🟢,ai21labs/Jamba-v0.1,9.12,20.26,0.2,10.72,0.36,0.98,0.01,2.46,0.27,3.71,0.36,16.57,0.25,🟢 pretrained,JambaForCausalLM,Original,bfloat16,True,apache-2.0,51,1171,True,ce13f3fe99555a2606d1892665bb67649032ff2d,True,False,2024-09-16,2024-03-28,True,True,ai21labs/Jamba-v0.1,0,ai21labs/Jamba-v0.1 💬,SenseLLM/ReflectionCoder-DS-33B,9.03,37.87,0.38,8.34,0.34,2.04,0.02,3.24,0.27,0.46,0.33,2.24,0.12,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,33,4,True,07ae97a21fbef0503294e1eb258ce0a308b8dc35,True,True,2024-09-15,2024-05-28,True,False,SenseLLM/ReflectionCoder-DS-33B,0,SenseLLM/ReflectionCoder-DS-33B 💬,Qwen/Qwen1.5-1.8B-Chat,9.01,20.19,0.2,5.91,0.33,0.45,0.0,6.38,0.3,12.18,0.43,8.93,0.18,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,other,1,44,True,e482ee3f73c375a627a16fdf66fd0c8279743ca6,True,True,2024-06-12,2024-01-30,True,True,Qwen/Qwen1.5-1.8B-Chat,0,Qwen/Qwen1.5-1.8B-Chat +🔶,NotASI/FineTome-v1.5-Llama3.2-1B-1007,8.94,39.24,0.39,5.8,0.32,1.36,0.01,0.0,0.25,2.5,0.35,4.74,0.14,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,,1,1,False,5e329d987e9f74dd2703a4fefa56ab8c72b5702b,True,True,2024-10-07,2024-10-07,True,False,NotASI/FineTome-v1.5-Llama3.2-1B-1007,1,NotASI/FineTome-v1.5-Llama3.2-1B-1007 (Merge) 🟢,CortexLM/btlm-7b-base-v0.2,8.84,14.83,0.15,16.19,0.4,1.06,0.01,0.45,0.25,5.54,0.38,15.0,0.23,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,mit,6,1,True,eda8b4298365a26c8981316e09427c237b11217f,True,True,2024-06-26,2024-06-13,False,False,CortexLM/btlm-7b-base-v0.2,0,CortexLM/btlm-7b-base-v0.2 💬,0-hero/Matter-0.2-7B-DPO,8.81,33.03,0.33,10.06,0.36,0.83,0.01,1.23,0.26,5.87,0.38,1.82,0.12,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,3,True,26a66f0d862e2024ce4ad0a09c37052ac36e8af6,True,True,2024-08-05,2024-04-13,True,False,0-hero/Matter-0.2-7B-DPO,0,0-hero/Matter-0.2-7B-DPO 🔶,TencentARC/LLaMA-Pro-8B,8.78,22.77,0.23,9.29,0.35,1.66,0.02,1.34,0.26,8.59,0.4,9.01,0.18,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama2,8,171,True,7115e7179060e0623d1ee9ff4476faed7e478d8c,True,True,2024-06-12,2024-01-05,False,True,TencentARC/LLaMA-Pro-8B,0,TencentARC/LLaMA-Pro-8B 🤝,wave-on-discord/qwent-7b,8.73,20.15,0.2,18.07,0.42,0.0,0.0,2.01,0.27,5.47,0.38,6.7,0.16,🤝 base merges and moerges,Qwen2ForCausalLM,Original,bfloat16,True,,7,0,False,40000e76d2a4d0ad054aff9fe873c5beb0e4925e,True,True,2024-09-30,2024-09-30,False,False,wave-on-discord/qwent-7b,1,wave-on-discord/qwent-7b (Merge) 🟢,meta-llama/Llama-2-7b-hf,8.72,25.19,0.25,10.35,0.35,1.21,0.01,2.24,0.27,3.76,0.37,9.57,0.19,🟢 pretrained,LlamaForCausalLM,Original,float16,True,llama2,6,1710,True,01c7f73d771dfac7d292323805ebc428287df4f9,True,True,2024-06-12,2023-07-13,False,True,meta-llama/Llama-2-7b-hf,0,meta-llama/Llama-2-7b-hf 🟢,awnr/Mistral-7B-v0.1-signtensors-1-over-4,8.71,21.33,0.21,9.23,0.35,2.27,0.02,2.68,0.27,2.19,0.35,14.56,0.23,🟢 pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,1,True,b288ab9d8adfd2963a44a7935bb47649f55bcbee,True,True,2024-07-29,2024-07-29,False,False,awnr/Mistral-7B-v0.1-signtensors-1-over-4,0,awnr/Mistral-7B-v0.1-signtensors-1-over-4 -🔶,Replete-AI/Replete-LLM-V2.5-Qwen-0.5b,8.67,28.47,0.28,8.41,0.33,2.49,0.02,2.24,0.27,0.78,0.32,9.62,0.19,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,,0,0,False,aae2e55548c8090ce357c64ca78e8b9ef6baf118,True,True,2024-09-29,,False,False,Replete-AI/Replete-LLM-V2.5-Qwen-0.5b,0,Removed +🔶,rombodawg/Rombos-LLM-V2.5-Qwen-0.5b,8.67,28.47,0.28,8.41,0.33,2.49,0.02,2.24,0.27,0.78,0.32,9.62,0.19,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,0,1,True,aae2e55548c8090ce357c64ca78e8b9ef6baf118,True,True,2024-09-29,2024-10-06,False,False,rombodawg/Rombos-LLM-V2.5-Qwen-0.5b,1,rombodawg/Rombos-LLM-V2.5-Qwen-0.5b (Merge) 💬,stabilityai/stablelm-2-1_6b-chat,8.63,30.6,0.31,7.49,0.34,1.06,0.01,0.0,0.25,5.71,0.36,6.91,0.16,"💬 chat models (RLHF, DPO, IFT, ...)",StableLmForCausalLM,Original,bfloat16,True,other,1,30,True,f3fe67057c2789ae1bb1fe42b038da99840d4f13,True,True,2024-06-12,2024-04-08,True,True,stabilityai/stablelm-2-1_6b-chat,0,stabilityai/stablelm-2-1_6b-chat -🟢,meta-llama/Llama-3.2-3B,8.58,13.37,0.13,14.23,0.39,1.21,0.01,2.35,0.27,3.81,0.36,16.53,0.25,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3.2,3,155,True,95c102307f55fbd6d18ddf28bfbcb537ffdc2806,True,True,2024-09-27,2024-09-18,False,True,meta-llama/Llama-3.2-3B,0,meta-llama/Llama-3.2-3B +🟢,meta-llama/Llama-3.2-3B,8.58,13.37,0.13,14.23,0.39,1.21,0.01,2.35,0.27,3.81,0.36,16.53,0.25,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3.2,3,159,True,95c102307f55fbd6d18ddf28bfbcb537ffdc2806,True,True,2024-09-27,2024-09-18,False,True,meta-llama/Llama-3.2-3B,0,meta-llama/Llama-3.2-3B 🟢,internlm/internlm2-1_8b,8.58,21.98,0.22,13.63,0.39,1.13,0.01,0.0,0.25,8.23,0.38,6.54,0.16,🟢 pretrained,InternLM2ForCausalLM,Original,bfloat16,True,other,8,28,True,c24f301c7374ad9f9b58d1ea80f68b5f57cbca13,True,True,2024-06-12,2024-01-30,False,True,internlm/internlm2-1_8b,0,internlm/internlm2-1_8b 🤝,TeeZee/DoubleBagel-57B-v1.0,8.54,23.36,0.23,5.52,0.33,0.0,0.0,3.47,0.28,13.6,0.43,5.31,0.15,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,56,1,True,6e10dc1fb5223d1b045dc2a19c9c267a574e520f,True,True,2024-08-10,2024-08-05,True,False,TeeZee/DoubleBagel-57B-v1.0,1,TeeZee/DoubleBagel-57B-v1.0 (Merge) 💬,LenguajeNaturalAI/leniachat-qwen2-1.5B-v0,8.54,22.21,0.22,12.77,0.37,1.06,0.01,1.57,0.26,3.87,0.37,9.78,0.19,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,1,19,True,031a2efebb3cc1150e46f42ba0bea9fa7b855436,True,True,2024-09-30,2024-06-16,True,False,LenguajeNaturalAI/leniachat-qwen2-1.5B-v0,1,Qwen/Qwen2-1.5B @@ -1214,9 +1226,9 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,Nexusflow/NexusRaven-V2-13B,8.3,17.91,0.18,15.34,0.39,1.81,0.02,1.34,0.26,3.71,0.37,9.69,0.19,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,other,13,459,True,cdab7132db4a4fd64513123374ea1451d85a7ace,True,True,2024-06-12,2023-12-04,False,True,Nexusflow/NexusRaven-V2-13B,1,codellama/CodeLlama-13b-Instruct-hf 🟢,nlpguy/Mistral-NeMo-Minitron-Upscale-v2,8.22,15.73,0.16,14.38,0.39,0.53,0.01,3.13,0.27,5.25,0.38,10.29,0.19,🟢 pretrained,MistralForCausalLM,Original,bfloat16,False,other,12,0,True,4ac077e496705687fdcbe51f3b915be42e91bf79,True,True,2024-09-29,2024-09-29,False,False,nlpguy/Mistral-NeMo-Minitron-Upscale-v2,1,nlpguy/Mistral-NeMo-Minitron-Upscale-v2 (Merge) 🟩,yam-peleg/Hebrew-Mistral-7B-200K,8.21,17.7,0.18,7.67,0.34,2.04,0.02,0.45,0.25,4.42,0.37,16.99,0.25,🟩 continuously pretrained,MistralForCausalLM,Original,bfloat16,True,apache-2.0,7,15,True,7b51c7b31e3d9e29ea964c579a45233cfad255fe,True,True,2024-08-06,2024-05-05,True,False,yam-peleg/Hebrew-Mistral-7B-200K,0,yam-peleg/Hebrew-Mistral-7B-200K -🟢,bigcode/starcoder2-7b,8.21,22.09,0.22,11.4,0.37,2.57,0.03,0.22,0.25,5.82,0.38,7.14,0.16,🟢 pretrained,Starcoder2ForCausalLM,Original,bfloat16,True,bigcode-openrail-m,7,156,True,a3d33687b51284b528abeb17830776ffd24892a9,True,True,2024-06-09,2024-02-20,False,True,bigcode/starcoder2-7b,0,bigcode/starcoder2-7b +🟢,bigcode/starcoder2-7b,8.21,22.09,0.22,11.4,0.37,2.57,0.03,0.22,0.25,5.82,0.38,7.14,0.16,🟢 pretrained,Starcoder2ForCausalLM,Original,bfloat16,True,bigcode-openrail-m,7,157,True,a3d33687b51284b528abeb17830776ffd24892a9,True,True,2024-06-09,2024-02-20,False,True,bigcode/starcoder2-7b,0,bigcode/starcoder2-7b 🔶,togethercomputer/Llama-2-7B-32K-Instruct,8.17,21.3,0.21,8.56,0.34,1.06,0.01,0.22,0.25,9.2,0.41,8.68,0.18,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,llama2,7,160,True,d27380af003252f5eb0d218e104938b4e673e3f3,True,True,2024-06-12,2023-08-08,False,True,togethercomputer/Llama-2-7B-32K-Instruct,0,togethercomputer/Llama-2-7B-32K-Instruct -💬,Qwen/Qwen2.5-0.5B-Instruct,8.14,30.71,0.31,8.43,0.33,0.0,0.0,1.01,0.26,0.94,0.33,7.75,0.17,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,0,54,True,a8b602d9dafd3a75d382e62757d83d89fca3be54,True,True,2024-09-19,2024-09-16,True,True,Qwen/Qwen2.5-0.5B-Instruct,1,Qwen/Qwen2.5-0.5B +💬,Qwen/Qwen2.5-0.5B-Instruct,8.14,30.71,0.31,8.43,0.33,0.0,0.0,1.01,0.26,0.94,0.33,7.75,0.17,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,0,56,True,a8b602d9dafd3a75d382e62757d83d89fca3be54,True,True,2024-09-19,2024-09-16,True,True,Qwen/Qwen2.5-0.5B-Instruct,1,Qwen/Qwen2.5-0.5B 🟢,deepseek-ai/deepseek-llm-7b-base,8.1,21.79,0.22,9.77,0.35,1.21,0.01,3.13,0.27,3.76,0.37,8.96,0.18,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,other,7,35,True,7683fea62db869066ddaff6a41d032262c490d4f,True,True,2024-06-12,2023-11-29,False,True,deepseek-ai/deepseek-llm-7b-base,0,deepseek-ai/deepseek-llm-7b-base 💬,meditsolutions/Llama-3.2-SUN-2.4B-checkpoint-34800,8.04,25.01,0.25,5.47,0.32,0.15,0.0,4.81,0.29,8.85,0.4,3.97,0.14,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,2,0,False,ef65f05f577a69a1992349c8d33c96cd099844f7,True,True,2024-10-05,,True,False,meditsolutions/Llama-3.2-SUN-2.4B-checkpoint-34800,0,Removed 💬,meditsolutions/Llama-3.2-SUN-2.4B-checkpoint-26000,7.94,28.14,0.28,2.9,0.3,0.6,0.01,3.69,0.28,8.49,0.41,3.83,0.13,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,,2,0,False,1300885555ca8bbed20a57cf0ec9f7ae014200c3,True,True,2024-10-04,,True,False,meditsolutions/Llama-3.2-SUN-2.4B-checkpoint-26000,0,Removed @@ -1230,7 +1242,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,VAGOsolutions/SauerkrautLM-Gemma-2b,7.58,24.75,0.25,9.13,0.34,1.96,0.02,0.89,0.26,3.51,0.37,5.21,0.15,🔶 fine-tuned on domain-specific datasets,GemmaForCausalLM,Original,bfloat16,True,other,2,8,True,f9d5575c23da96f33ce77dea3b0776746b9469bc,True,True,2024-06-26,2024-03-06,True,False,VAGOsolutions/SauerkrautLM-Gemma-2b,0,VAGOsolutions/SauerkrautLM-Gemma-2b 🔶,KingNish/Qwen2.5-0.5b-Test-ft,7.46,26.71,0.27,6.06,0.32,1.13,0.01,1.79,0.26,1.43,0.34,7.65,0.17,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,float16,True,apache-2.0,0,3,True,f905bb1d37c7853fb5c7157d8d3ad0f062b65c0f,True,True,2024-09-29,2024-09-26,False,False,KingNish/Qwen2.5-0.5b-Test-ft,1,KingNish/Qwen2.5-0.5b-Test-ft (Merge) 🟢,deepseek-ai/deepseek-moe-16b-base,7.37,24.5,0.24,8.36,0.34,1.81,0.02,0.56,0.25,3.36,0.37,5.61,0.15,🟢 pretrained,DeepseekForCausalLM,Original,bfloat16,True,other,16,80,True,521d2bc4fb69a3f3ae565310fcc3b65f97af2580,True,False,2024-06-12,2024-01-08,False,True,deepseek-ai/deepseek-moe-16b-base,0,deepseek-ai/deepseek-moe-16b-base -🟢,google/gemma-2b,7.31,20.38,0.2,8.47,0.34,2.72,0.03,0.67,0.26,7.56,0.4,4.06,0.14,🟢 pretrained,GemmaForCausalLM,Original,bfloat16,True,gemma,2,894,True,2ac59a5d7bf4e1425010f0d457dde7d146658953,True,True,2024-06-12,2024-02-08,False,True,google/gemma-2b,0,google/gemma-2b +🟢,google/gemma-2b,7.31,20.38,0.2,8.47,0.34,2.72,0.03,0.67,0.26,7.56,0.4,4.06,0.14,🟢 pretrained,GemmaForCausalLM,Original,bfloat16,True,gemma,2,895,True,2ac59a5d7bf4e1425010f0d457dde7d146658953,True,True,2024-06-12,2024-02-08,False,True,google/gemma-2b,0,google/gemma-2b 🟢,stabilityai/stablelm-3b-4e1t,7.26,22.03,0.22,9.01,0.35,0.68,0.01,0.0,0.24,4.42,0.38,7.43,0.17,🟢 pretrained,StableLmForCausalLM,Original,bfloat16,True,cc-by-sa-4.0,2,307,True,fa4a6a92fca83c3b4223a3c9bf792887090ebfba,True,True,2024-08-10,2023-09-29,False,True,stabilityai/stablelm-3b-4e1t,0,stabilityai/stablelm-3b-4e1t 🔶,euclaise/ReMask-3B,7.26,24.19,0.24,8.74,0.35,1.74,0.02,2.24,0.27,2.66,0.33,3.97,0.14,🔶 fine-tuned on domain-specific datasets,StableLmForCausalLM,Original,bfloat16,True,cc-by-sa-4.0,2,14,True,e094dae96097c2bc6f758101ee269c089b65a2cf,True,True,2024-08-10,2024-03-28,True,False,euclaise/ReMask-3B,0,euclaise/ReMask-3B 💬,google/gemma-2b-it,7.22,26.9,0.27,5.21,0.32,0.45,0.0,3.8,0.28,3.03,0.33,3.92,0.14,"💬 chat models (RLHF, DPO, IFT, ...)",GemmaForCausalLM,Original,bfloat16,True,gemma,2,661,True,de144fb2268dee1066f515465df532c05e699d48,True,True,2024-06-12,2024-02-08,True,True,google/gemma-2b-it,0,google/gemma-2b-it @@ -1247,7 +1259,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🟢,google/recurrentgemma-2b,6.94,30.17,0.3,4.82,0.32,1.59,0.02,0.0,0.25,3.1,0.34,1.96,0.12,🟢 pretrained,RecurrentGemmaForCausalLM,Original,bfloat16,True,gemma,2,92,True,195f13c55b371fc721eda0662c00c64642c70e17,True,True,2024-06-13,2024-04-06,False,True,google/recurrentgemma-2b,0,google/recurrentgemma-2b 🟢,databricks/dolly-v1-6b,6.89,22.24,0.22,4.78,0.32,1.36,0.01,1.9,0.26,8.12,0.4,2.95,0.13,🟢 pretrained,GPTJForCausalLM,Original,bfloat16,True,cc-by-nc-4.0,6,310,True,c9a85b3a322b402e20c839c702c725afe0cb454d,True,True,2024-06-12,2023-03-23,False,True,databricks/dolly-v1-6b,0,databricks/dolly-v1-6b 🟩,shivam9980/NEPALI-LLM,6.88,4.17,0.04,13.13,0.38,0.6,0.01,1.57,0.26,9.99,0.41,11.83,0.21,🟩 continuously pretrained,Gemma2ForCausalLM,Original,bfloat16,True,apache-2.0,10,0,True,5fe146065b53bfd6d8e242cffbe9176bc245551d,True,True,2024-09-24,2024-09-17,False,False,shivam9980/NEPALI-LLM,1,unsloth/gemma-2-9b-bnb-4bit -🟢,HuggingFaceTB/SmolLM-135M,6.84,21.25,0.21,3.29,0.3,0.68,0.01,1.12,0.26,13.34,0.44,1.36,0.11,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,0,159,True,eec6e461571fba3e197a57c298f60b75422eae02,True,True,2024-07-18,2024-07-14,False,False,HuggingFaceTB/SmolLM-135M,0,HuggingFaceTB/SmolLM-135M +🟢,HuggingFaceTB/SmolLM-135M,6.84,21.25,0.21,3.29,0.3,0.68,0.01,1.12,0.26,13.34,0.44,1.36,0.11,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,0,160,True,eec6e461571fba3e197a57c298f60b75422eae02,True,True,2024-07-18,2024-07-14,False,False,HuggingFaceTB/SmolLM-135M,0,HuggingFaceTB/SmolLM-135M 🔶,togethercomputer/GPT-JT-6B-v1,6.83,20.61,0.21,7.32,0.33,0.76,0.01,1.45,0.26,3.87,0.37,6.95,0.16,🔶 fine-tuned on domain-specific datasets,GPTJForCausalLM,Original,float16,True,apache-2.0,6,302,True,f34aa35f906895602c1f86f5685e598afdea8051,True,True,2024-06-12,2022-11-24,False,True,togethercomputer/GPT-JT-6B-v1,0,togethercomputer/GPT-JT-6B-v1 🤝,Solshine/Llama-3-1-big-thoughtful-passthrough-merge-2,6.78,25.47,0.25,5.01,0.32,0.15,0.0,1.23,0.26,6.75,0.39,2.06,0.12,🤝 base merges and moerges,LlamaForCausalLM,Original,float16,True,,18,1,False,d48047d6577e22fdda73a1be8e18971912db66d2,True,True,2024-09-24,2024-09-19,True,False,Solshine/Llama-3-1-big-thoughtful-passthrough-merge-2,1,Solshine/Llama-3-1-big-thoughtful-passthrough-merge-2 (Merge) 🟢,allenai/OLMo-7B-hf,6.78,27.19,0.27,5.76,0.33,0.68,0.01,3.02,0.27,2.08,0.35,1.92,0.12,🟢 pretrained,OlmoForCausalLM,Original,bfloat16,True,apache-2.0,6,9,True,687d934d36a05417048d0fe7482f24f389fef6aa,True,True,2024-06-27,2024-04-12,False,True,allenai/OLMo-7B-hf,0,allenai/OLMo-7B-hf @@ -1276,7 +1288,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🟢,princeton-nlp/Sheared-LLaMA-2.7B,6.31,24.17,0.24,5.66,0.33,0.53,0.01,3.36,0.28,2.09,0.36,2.08,0.12,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,2,59,True,2f157a0306b75d37694ae05f6a4067220254d540,True,True,2024-07-29,2023-10-10,False,False,princeton-nlp/Sheared-LLaMA-2.7B,0,princeton-nlp/Sheared-LLaMA-2.7B 🟢,openai-community/gpt2,6.3,17.8,0.18,2.82,0.3,0.3,0.0,1.12,0.26,13.91,0.44,1.84,0.12,🟢 pretrained,GPT2LMHeadModel,Original,float16,True,mit,0,2272,True,607a30d783dfa663caf39e06633721c8d4cfcd7e,True,True,2024-08-12,2022-03-02,False,True,openai-community/gpt2,0,openai-community/gpt2 🟢,google/flan-t5-base,6.24,18.91,0.19,11.34,0.35,0.0,0.0,0.0,0.24,3.22,0.37,3.97,0.14,🟢 pretrained,T5ForConditionalGeneration,Original,float16,True,apache-2.0,0,784,True,7bcac572ce56db69c1ea7c8af255c5d7c9672fc2,True,True,2024-08-14,2022-10-21,False,True,google/flan-t5-base,0,google/flan-t5-base -🟢,Qwen/Qwen2.5-0.5B,6.22,16.27,0.16,6.95,0.33,1.96,0.02,0.0,0.25,2.08,0.34,10.06,0.19,🟢 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,0,60,True,2630d3d2321bc1f1878f702166d1b2af019a7310,True,True,2024-09-19,2024-09-15,False,True,Qwen/Qwen2.5-0.5B,0,Qwen/Qwen2.5-0.5B +🟢,Qwen/Qwen2.5-0.5B,6.22,16.27,0.16,6.95,0.33,1.96,0.02,0.0,0.25,2.08,0.34,10.06,0.19,🟢 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,0,61,True,2630d3d2321bc1f1878f702166d1b2af019a7310,True,True,2024-09-19,2024-09-15,False,True,Qwen/Qwen2.5-0.5B,0,Qwen/Qwen2.5-0.5B 🟢,facebook/opt-30b,6.2,24.53,0.25,3.5,0.31,0.6,0.01,2.57,0.27,4.19,0.36,1.82,0.12,🟢 pretrained,OPTForCausalLM,Original,float16,True,other,30,133,True,ceea0a90ac0f6fae7c2c34bcb40477438c152546,True,True,2024-06-12,2022-05-11,False,True,facebook/opt-30b,0,facebook/opt-30b 🟢,HuggingFaceTB/SmolLM-360M,6.15,21.34,0.21,3.28,0.31,0.45,0.0,2.35,0.27,8.09,0.4,1.37,0.11,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,0,52,True,318cc630b73730bfd712e5873063156ffb8936b5,True,True,2024-07-18,2024-07-14,False,False,HuggingFaceTB/SmolLM-360M,0,HuggingFaceTB/SmolLM-360M 🔶,LeroyDyer/_Spydaz_Web_AI_ChatQA_003,6.13,22.09,0.22,4.29,0.32,0.3,0.0,2.8,0.27,5.83,0.38,1.48,0.11,🔶 fine-tuned on domain-specific datasets,MistralForCausalLM,Original,bfloat16,True,,7,0,False,,True,True,2024-09-14,,False,False,LeroyDyer/_Spydaz_Web_AI_ChatQA_003,0,Removed @@ -1317,11 +1329,11 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🟢,EleutherAI/gpt-neo-1.3B,5.33,20.79,0.21,3.02,0.3,0.68,0.01,0.78,0.26,4.87,0.38,1.82,0.12,🟢 pretrained,GPTNeoForCausalLM,Original,bfloat16,True,mit,1,257,True,dbe59a7f4a88d01d1ba9798d78dbe3fe038792c8,True,True,2024-06-12,2022-03-02,False,True,EleutherAI/gpt-neo-1.3B,0,EleutherAI/gpt-neo-1.3B 🔶,realtreetune/rho-1b-sft-MATH,5.3,21.21,0.21,4.2,0.31,1.89,0.02,0.34,0.25,2.9,0.35,1.3,0.11,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,,1,0,False,b5f93df6af679a860caac9a9598e0f70c326b4fb,True,True,2024-10-05,2024-06-06,False,False,realtreetune/rho-1b-sft-MATH,1,realtreetune/rho-1b-sft-MATH (Merge) 🟢,facebook/opt-1.3b,5.25,23.83,0.24,3.65,0.31,0.76,0.01,0.0,0.24,2.08,0.34,1.19,0.11,🟢 pretrained,OPTForCausalLM,Original,float16,True,other,1,148,True,3f5c25d0bc631cb57ac65913f76e22c2dfb61d62,True,True,2024-06-12,2022-05-11,False,True,facebook/opt-1.3b,0,facebook/opt-1.3b -💬,microsoft/DialoGPT-medium,5.25,14.79,0.15,2.56,0.3,0.0,0.0,0.56,0.25,12.28,0.43,1.32,0.11,"💬 chat models (RLHF, DPO, IFT, ...)",GPT2LMHeadModel,Original,bfloat16,True,mit,0,322,True,7b40bb0f92c45fefa957d088000d8648e5c7fa33,True,True,2024-06-13,2022-03-02,True,True,microsoft/DialoGPT-medium,0,microsoft/DialoGPT-medium +💬,microsoft/DialoGPT-medium,5.25,14.79,0.15,2.56,0.3,0.0,0.0,0.56,0.25,12.28,0.43,1.32,0.11,"💬 chat models (RLHF, DPO, IFT, ...)",GPT2LMHeadModel,Original,bfloat16,True,mit,0,323,True,7b40bb0f92c45fefa957d088000d8648e5c7fa33,True,True,2024-06-13,2022-03-02,True,True,microsoft/DialoGPT-medium,0,microsoft/DialoGPT-medium 🟢,stabilityai/stablelm-2-1_6b,5.22,11.57,0.12,8.63,0.34,0.15,0.0,0.0,0.25,5.79,0.39,5.15,0.15,🟢 pretrained,StableLmForCausalLM,Original,float16,True,other,1,181,True,8879812cccd176fbbe9ceb747b815bcc7d6499f8,True,True,2024-06-12,2024-01-18,False,True,stabilityai/stablelm-2-1_6b,0,stabilityai/stablelm-2-1_6b 🔶,Youlln/ECE-PRYMMAL0.5-FT,5.2,18.51,0.19,5.15,0.31,0.0,0.0,0.78,0.26,1.43,0.33,5.3,0.15,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,0,0,True,56b9fd5f26e5b6379fe4aa62e0f66b87b5c6f8e8,True,True,2024-10-02,2024-10-02,False,False,Youlln/ECE-PRYMMAL0.5-FT,1,Youlln/ECE-PRYMMAL0.5-FT (Merge) 🔶,google/mt5-xl,5.19,19.6,0.2,3.28,0.3,0.0,0.0,1.9,0.26,5.04,0.38,1.33,0.11,🔶 fine-tuned on domain-specific datasets,MT5ForConditionalGeneration,Original,float16,True,apache-2.0,3,20,True,63fc6450d80515b48e026b69ef2fbbd426433e84,True,True,2024-09-06,2022-03-02,False,True,google/mt5-xl,0,google/mt5-xl -🟢,amd/AMD-Llama-135m,5.19,19.18,0.19,2.54,0.3,0.53,0.01,1.12,0.26,5.9,0.38,1.87,0.12,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,0,98,True,8f9c39b5ed86d422ab332ed1ecf042fdaeb57903,True,True,2024-10-01,2024-07-19,False,True,amd/AMD-Llama-135m,0,amd/AMD-Llama-135m +🟢,amd/AMD-Llama-135m,5.19,19.18,0.19,2.54,0.3,0.53,0.01,1.12,0.26,5.9,0.38,1.87,0.12,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,0,100,True,8f9c39b5ed86d422ab332ed1ecf042fdaeb57903,True,True,2024-10-01,2024-07-19,False,True,amd/AMD-Llama-135m,0,amd/AMD-Llama-135m 🔶,sakhan10/quantized_open_llama_3b_v2,5.14,18.72,0.19,2.81,0.3,0.0,0.0,3.58,0.28,4.69,0.37,1.06,0.11,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,,3,0,False,e8d51ad5204806edf9c2eeb8c56139a440a70265,True,True,2024-08-28,2024-08-23,False,False,sakhan10/quantized_open_llama_3b_v2,1,openlm-research/open_llama_3b_v2 💬,HuggingFaceTB/SmolLM-1.7B-Instruct,5.14,23.48,0.23,2.08,0.29,0.0,0.0,1.34,0.26,2.08,0.35,1.85,0.12,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,1,97,True,0ad161e59935a9a691dfde2818df8b98786f30a7,True,True,2024-07-18,2024-07-15,True,False,HuggingFaceTB/SmolLM-1.7B-Instruct,1,HuggingFaceTB/SmolLM-1.7B 🟢,Qwen/Qwen1.5-0.5B,5.14,17.06,0.17,5.04,0.32,0.45,0.0,0.56,0.25,4.3,0.36,3.41,0.13,🟢 pretrained,Qwen2ForCausalLM,Original,bfloat16,True,other,0,142,True,8f445e3628f3500ee69f24e1303c9f10f5342a39,True,True,2024-06-13,2024-01-22,False,True,Qwen/Qwen1.5-0.5B,0,Qwen/Qwen1.5-0.5B @@ -1340,7 +1352,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 💬,monsterapi/Llama-3_1-8B-Instruct-orca-ORPO,4.83,22.73,0.23,1.34,0.29,0.0,0.0,0.0,0.25,3.06,0.34,1.86,0.12,"💬 chat models (RLHF, DPO, IFT, ...)",?,Adapter,bfloat16,True,apache-2.0,16,1,True,5206a32e0bd3067aef1ce90f5528ade7d866253f,True,True,2024-08-30,2024-08-01,True,False,monsterapi/Llama-3_1-8B-Instruct-orca-ORPO,2,meta-llama/Meta-Llama-3.1-8B 💬,BEE-spoke-data/smol_llama-220M-openhermes,4.76,15.55,0.16,3.11,0.3,0.0,0.0,2.35,0.27,6.22,0.38,1.34,0.11,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,0,5,True,fb4bcd4b7eee363baacb4176a26cea2aaeb173f4,True,True,2024-09-21,2023-12-30,False,False,BEE-spoke-data/smol_llama-220M-openhermes,1,BEE-spoke-data/smol_llama-220M-GQA 💬,CoolSpring/Qwen2-0.5B-Abyme,4.76,19.15,0.19,2.28,0.29,1.51,0.02,0.45,0.25,1.48,0.35,3.7,0.13,"💬 chat models (RLHF, DPO, IFT, ...)",Qwen2ForCausalLM,Original,bfloat16,True,apache-2.0,0,0,True,a48b7c04b854e5c60fe3464f96904bfc53c8640c,True,True,2024-09-04,2024-07-18,True,False,CoolSpring/Qwen2-0.5B-Abyme,1,Qwen/Qwen2-0.5B -🔶,amd/AMD-Llama-135m,4.76,18.42,0.18,2.49,0.3,0.53,0.01,0.34,0.25,4.91,0.38,1.87,0.12,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,0,98,True,8f9c39b5ed86d422ab332ed1ecf042fdaeb57903,True,True,2024-09-29,2024-07-19,False,True,amd/AMD-Llama-135m,0,amd/AMD-Llama-135m +🔶,amd/AMD-Llama-135m,4.76,18.42,0.18,2.49,0.3,0.53,0.01,0.34,0.25,4.91,0.38,1.87,0.12,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,0,100,True,8f9c39b5ed86d422ab332ed1ecf042fdaeb57903,True,True,2024-09-29,2024-07-19,False,True,amd/AMD-Llama-135m,0,amd/AMD-Llama-135m 🔶,togethercomputer/RedPajama-INCITE-Chat-3B-v1,4.75,16.52,0.17,5.16,0.32,0.3,0.0,0.0,0.24,5.09,0.37,1.41,0.11,🔶 fine-tuned on domain-specific datasets,GPTNeoXForCausalLM,Original,float16,True,apache-2.0,3,150,True,f0e0995eba801096ed04cb87931d96a8316871af,True,True,2024-06-13,2023-05-05,False,True,togethercomputer/RedPajama-INCITE-Chat-3B-v1,0,togethercomputer/RedPajama-INCITE-Chat-3B-v1 🔶,HuggingFaceTB/SmolLM-360M-Instruct,4.71,19.52,0.2,2.08,0.29,0.0,0.0,1.9,0.26,2.9,0.35,1.85,0.12,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,0,65,True,8e951de8c220295ea4f85d078c4e320df7137535,True,True,2024-08-20,2024-07-15,True,False,HuggingFaceTB/SmolLM-360M-Instruct,1,HuggingFaceTB/SmolLM-360M 🟢,TinyLlama/TinyLlama_v1.1,4.7,20.01,0.2,3.21,0.3,0.45,0.0,0.0,0.25,3.98,0.37,0.54,0.1,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,1,70,True,ff3c701f2424c7625fdefb9dd470f45ef18b02d6,True,True,2024-06-12,2024-03-09,False,True,TinyLlama/TinyLlama_v1.1,0,TinyLlama/TinyLlama_v1.1 @@ -1365,14 +1377,14 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,tangledgroup/tangled-llama-pints-1.5b-v0.1-instruct,4.18,15.09,0.15,3.84,0.31,0.08,0.0,0.0,0.24,4.85,0.38,1.21,0.11,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,1,0,True,3e1429f20007740877c51e44ed63b870a57a2e17,True,True,2024-08-29,2024-08-27,True,False,tangledgroup/tangled-llama-pints-1.5b-v0.1-instruct,1,pints-ai/1.5-Pints-16K-v0.1 🟢,pints-ai/1.5-Pints-16K-v0.1,4.15,16.36,0.16,3.66,0.31,0.83,0.01,0.0,0.24,2.73,0.36,1.32,0.11,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,mit,1,12,True,7862a52f250be68fad593f3a4030f00d658ede56,True,True,2024-09-09,2024-08-07,True,False,pints-ai/1.5-Pints-16K-v0.1,0,pints-ai/1.5-Pints-16K-v0.1 💬,Felladrin/Llama-160M-Chat-v1,4.1,15.75,0.16,3.17,0.3,0.0,0.0,1.01,0.26,3.17,0.37,1.51,0.11,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,float16,True,apache-2.0,0,14,True,e7f50665676821867ee7dfad32d0ca9fb68fc6bc,True,True,2024-07-23,2023-12-20,True,False,Felladrin/Llama-160M-Chat-v1,1,JackFram/llama-160m -🟢,meta-llama/Llama-3.2-1B,4.03,14.78,0.15,4.37,0.31,0.23,0.0,0.0,0.23,2.56,0.34,2.26,0.12,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3.2,1,355,True,a7c18587d7f473bfea02aa5639aa349403307b54,True,True,2024-09-23,2024-09-18,False,True,meta-llama/Llama-3.2-1B,0,meta-llama/Llama-3.2-1B +🟢,meta-llama/Llama-3.2-1B,4.03,14.78,0.15,4.37,0.31,0.23,0.0,0.0,0.23,2.56,0.34,2.26,0.12,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,llama3.2,1,362,True,a7c18587d7f473bfea02aa5639aa349403307b54,True,True,2024-09-23,2024-09-18,False,True,meta-llama/Llama-3.2-1B,0,meta-llama/Llama-3.2-1B 💬,davidkim205/Rhea-72b-v0.5,4.02,1.45,0.01,3.67,0.31,5.51,0.06,0.34,0.25,11.32,0.42,1.85,0.12,"💬 chat models (RLHF, DPO, IFT, ...)",LlamaForCausalLM,Original,bfloat16,True,apache-2.0,72,132,True,bc3806efb23d2713e6630a748d9747fd76b27169,True,True,2024-09-15,2024-03-22,False,False,davidkim205/Rhea-72b-v0.5,0,davidkim205/Rhea-72b-v0.5 🟢,bigscience/bloom-1b7,3.97,10.44,0.1,4.4,0.31,0.08,0.0,1.12,0.26,6.84,0.39,0.96,0.11,🟢 pretrained,BloomForCausalLM,Original,bfloat16,True,bigscience-bloom-rail-1.0,1,116,True,cc72a88036c2fb937d65efeacc57a0c2ef5d6fe5,True,True,2024-06-13,2022-05-19,False,True,bigscience/bloom-1b7,0,bigscience/bloom-1b7 🔶,togethercomputer/RedPajama-INCITE-7B-Chat,3.96,15.58,0.16,4.5,0.32,0.15,0.0,0.34,0.25,1.86,0.34,1.35,0.11,🔶 fine-tuned on domain-specific datasets,GPTNeoXForCausalLM,Original,float16,True,apache-2.0,7,92,True,47b94a739e2f3164b438501c8684acc5d5acc146,True,True,2024-06-13,2023-05-04,False,True,togethercomputer/RedPajama-INCITE-7B-Chat,0,togethercomputer/RedPajama-INCITE-7B-Chat 🟢,bigscience/bloom-1b1,3.96,13.73,0.14,4.04,0.31,0.15,0.0,1.23,0.26,3.42,0.37,1.2,0.11,🟢 pretrained,BloomForCausalLM,Original,bfloat16,True,bigscience-bloom-rail-1.0,1,59,True,eb3dd7399312f5f94fd13f41d2f318117d3eb1e4,True,True,2024-06-13,2022-05-19,False,True,bigscience/bloom-1b1,0,bigscience/bloom-1b1 🟢,BEE-spoke-data/smol_llama-101M-GQA,3.92,13.84,0.14,3.2,0.3,0.0,0.0,1.01,0.26,4.28,0.37,1.19,0.11,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,0,26,True,bb26643db413bada7e0c3c50752bf9da82403dba,True,True,2024-07-06,2023-10-26,False,False,BEE-spoke-data/smol_llama-101M-GQA,0,BEE-spoke-data/smol_llama-101M-GQA 🔶,IDEA-CCNL/Ziya-LLaMA-13B-v1,3.91,16.97,0.17,1.46,0.29,0.0,0.0,0.0,0.25,3.88,0.38,1.12,0.11,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,gpl-3.0,13,272,True,64d931f346e1a49ea3bbca07a83137075bab1c66,True,True,2024-06-12,2023-05-16,False,True,IDEA-CCNL/Ziya-LLaMA-13B-v1,0,IDEA-CCNL/Ziya-LLaMA-13B-v1 -🟢,distilbert/distilgpt2,3.9,6.11,0.06,2.84,0.3,0.0,0.0,1.23,0.26,11.16,0.42,2.08,0.12,🟢 pretrained,GPT2LMHeadModel,Original,bfloat16,True,apache-2.0,0,427,True,2290a62682d06624634c1f46a6ad5be0f47f38aa,True,True,2024-06-12,2022-03-02,False,True,distilbert/distilgpt2,0,distilbert/distilgpt2 +🟢,distilbert/distilgpt2,3.9,6.11,0.06,2.84,0.3,0.0,0.0,1.23,0.26,11.16,0.42,2.08,0.12,🟢 pretrained,GPT2LMHeadModel,Original,bfloat16,True,apache-2.0,0,428,True,2290a62682d06624634c1f46a6ad5be0f47f38aa,True,True,2024-06-12,2022-03-02,False,True,distilbert/distilgpt2,0,distilbert/distilgpt2 🔶,RWKV/rwkv-raven-14b,3.89,7.68,0.08,6.76,0.33,0.0,0.0,0.0,0.23,7.19,0.4,1.67,0.12,🔶 fine-tuned on domain-specific datasets,RwkvForCausalLM,Original,float16,True,,14,55,False,359c0649b4f1d10a26ebea32908035bc00d152ee,True,True,2024-07-08,2023-05-05,False,False,RWKV/rwkv-raven-14b,0,RWKV/rwkv-raven-14b 🟩,Locutusque/TinyMistral-248M-v2.5,3.87,13.36,0.13,3.18,0.3,0.0,0.0,0.11,0.25,5.07,0.38,1.5,0.11,🟩 continuously pretrained,MistralForCausalLM,Original,float16,False,apache-2.0,0,26,True,214e48aabc01235e25c67477898756f1bebef215,True,True,2024-09-17,2024-01-24,True,False,Locutusque/TinyMistral-248M-v2.5,0,Locutusque/TinyMistral-248M-v2.5 💬,Felladrin/Minueza-32M-UltraChat,3.85,13.76,0.14,2.44,0.29,0.0,0.0,0.78,0.26,4.64,0.37,1.48,0.11,"💬 chat models (RLHF, DPO, IFT, ...)",MistralForCausalLM,Original,float16,True,apache-2.0,0,4,True,28506b99c5902d2215eb378ec91d4226a7396c49,True,True,2024-07-23,2024-02-27,True,False,Felladrin/Minueza-32M-UltraChat,1,Felladrin/Minueza-32M-Base @@ -1386,6 +1398,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🔶,Replete-AI/Replete-LLM-Qwen2-7b_Beta-Preview,3.58,8.58,0.09,1.97,0.29,0.0,0.0,0.0,0.25,7.76,0.4,3.17,0.13,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,,7,0,False,fe4c3fc2314db69083527ddd0c9a658fcbc54f15,True,True,2024-07-26,,True,False,Replete-AI/Replete-LLM-Qwen2-7b_Beta-Preview,0,Removed 🔶,google/mt5-base,3.57,16.45,0.16,1.3,0.29,0.0,0.0,0.0,0.24,2.87,0.37,0.77,0.11,🔶 fine-tuned on domain-specific datasets,MT5ForConditionalGeneration,Original,float16,True,apache-2.0,0,184,True,2eb15465c5dd7f72a8f7984306ad05ebc3dd1e1f,True,True,2024-09-06,2022-03-02,False,True,google/mt5-base,0,google/mt5-base 🔶,Replete-AI/Replete-LLM-Qwen2-7b,3.51,9.32,0.09,2.72,0.3,0.0,0.0,0.0,0.25,7.26,0.39,1.74,0.12,🔶 fine-tuned on domain-specific datasets,Qwen2ForCausalLM,Original,bfloat16,True,,7,0,False,5b75b6180b45d83124e04a00766dc19d2ad52622,True,True,2024-08-13,,True,False,Replete-AI/Replete-LLM-Qwen2-7b,0,Removed +🤝,Youlln/ECE-PRYMMAL0.5B-Youri,3.51,14.46,0.14,1.5,0.28,0.0,0.0,0.0,0.24,4.01,0.37,1.06,0.11,🤝 base merges and moerges,Qwen2ForCausalLM,Original,bfloat16,True,,0,0,False,1477d3deff98f35f523aa222bc0442278d464566,True,True,2024-10-07,2024-10-07,False,False,Youlln/ECE-PRYMMAL0.5B-Youri,1,Youlln/ECE-PRYMMAL0.5B-Youri (Merge) 🔶,carsenk/phi3.5_mini_exp_825_uncensored,3.47,13.64,0.14,1.83,0.3,0.0,0.0,0.0,0.25,3.39,0.36,1.95,0.12,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,apache-2.0,3,1,True,6b208dc3df02e0d5ef0c3fe5899f9f31618f2e94,True,True,2024-08-29,2024-08-29,True,False,carsenk/phi3.5_mini_exp_825_uncensored,1,unsloth/phi-3.5-mini-instruct-bnb-4bit 🟢,bigscience/bloom-560m,3.46,6.2,0.06,2.89,0.3,0.08,0.0,1.57,0.26,8.19,0.4,1.83,0.12,🟢 pretrained,BloomForCausalLM,Original,bfloat16,True,bigscience-bloom-rail-1.0,0,343,True,ac2ae5fab2ce3f9f40dc79b5ca9f637430d24971,True,True,2024-06-13,2022-05-19,False,True,bigscience/bloom-560m,0,bigscience/bloom-560m 🔶,google/umt5-base,3.44,17.46,0.17,0.81,0.28,0.0,0.0,0.56,0.25,0.94,0.34,0.87,0.11,🔶 fine-tuned on domain-specific datasets,UMT5ForConditionalGeneration,Original,float16,True,apache-2.0,-1,12,True,0de9394d54f8975e71838d309de1cb496c894ab9,True,True,2024-09-06,2023-07-02,False,True,google/umt5-base,0,google/umt5-base @@ -1395,7 +1408,7 @@ T,Model,Average ⬆️,IFEval,IFEval Raw,BBH,BBH Raw,MATH Lvl 5,MATH Lvl 5 Raw,G 🤝,LilRg/PRYMMAL-6B-slerp,3.23,11.53,0.12,2.21,0.29,0.0,0.0,0.0,0.25,4.45,0.37,1.2,0.11,🤝 base merges and moerges,LlamaForCausalLM,Original,bfloat16,False,apache-2.0,3,0,True,1ce0f5fdaae6a7866eda77df18378e9b5621af65,True,True,2024-09-24,2024-09-24,False,False,LilRg/PRYMMAL-6B-slerp,1,LilRg/PRYMMAL-6B-slerp (Merge) 🔶,pankajmathur/orca_mini_3b,3.07,7.42,0.07,4.69,0.32,0.53,0.01,0.0,0.25,4.2,0.33,1.61,0.11,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,cc-by-nc-sa-4.0,3,158,True,31e1a7bc3f7ea2f247b432d60036d975b8d590e9,True,True,2024-06-26,2023-06-22,False,False,pankajmathur/orca_mini_3b,0,pankajmathur/orca_mini_3b 🟢,instruction-pretrain/InstructLM-500M,2.85,10.28,0.1,2.32,0.29,0.0,0.0,0.89,0.26,2.07,0.35,1.57,0.11,🟢 pretrained,MistralForCausalLM,Original,float16,True,apache-2.0,0,34,True,e9d33823c76303dfaff6a8397a8b70d0118ea350,True,True,2024-06-27,2024-06-18,False,False,instruction-pretrain/InstructLM-500M,0,instruction-pretrain/InstructLM-500M -🔶,TinyLlama/TinyLlama-1.1B-Chat-v1.0,2.71,5.96,0.06,4.01,0.31,0.83,0.01,0.0,0.25,4.31,0.35,1.12,0.11,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,1,1071,True,fe8a4ea1ffedaf415f4da2f062534de366a451e6,True,True,2024-08-04,2023-12-30,False,True,TinyLlama/TinyLlama-1.1B-Chat-v1.0,0,TinyLlama/TinyLlama-1.1B-Chat-v1.0 +🔶,TinyLlama/TinyLlama-1.1B-Chat-v1.0,2.71,5.96,0.06,4.01,0.31,0.83,0.01,0.0,0.25,4.31,0.35,1.12,0.11,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,apache-2.0,1,1072,True,fe8a4ea1ffedaf415f4da2f062534de366a451e6,True,True,2024-08-04,2023-12-30,False,True,TinyLlama/TinyLlama-1.1B-Chat-v1.0,0,TinyLlama/TinyLlama-1.1B-Chat-v1.0 🔶,Josephgflowers/TinyLlama_v1.1_math_code-world-test-1,1.83,0.78,0.01,4.16,0.31,0.91,0.01,0.0,0.23,3.64,0.35,1.47,0.11,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,float16,True,mit,1,0,True,6f7c2aaf0b8723bc6a1dc23a4a1ff0ec24dc11ec,True,True,2024-09-09,2024-06-23,False,False,Josephgflowers/TinyLlama_v1.1_math_code-world-test-1,0,Josephgflowers/TinyLlama_v1.1_math_code-world-test-1 🟢,NucleusAI/nucleus-22B-token-500B,1.63,2.57,0.03,1.89,0.29,0.0,0.0,0.0,0.25,3.55,0.35,1.8,0.12,🟢 pretrained,LlamaForCausalLM,Original,bfloat16,True,mit,21,25,True,49bb1a47c0d32b4bfa6630a4eff04a857adcd4ca,True,True,2024-06-26,2023-10-06,False,False,NucleusAI/nucleus-22B-token-500B,0,NucleusAI/nucleus-22B-token-500B 🔶,pankajmathur/orca_mini_v6_8b,1.41,1.11,0.01,3.22,0.3,0.0,0.0,0.0,0.24,2.77,0.36,1.38,0.11,🔶 fine-tuned on domain-specific datasets,LlamaForCausalLM,Original,bfloat16,True,llama3,8,1,True,e95dc8e4c6b6ca5957b657cc2d905683142eaf3e,True,True,2024-06-26,2024-06-02,True,False,pankajmathur/orca_mini_v6_8b,0,pankajmathur/orca_mini_v6_8b