ggbetz's picture
Update leaderboard for model Locutusque/Hercules-4.0-Mistral-v0.2-7B (#25)
776120c verified
raw
history blame
790 Bytes
{
"config": {
"model_dtype": "bfloat16",
"model_sha": "main",
"model_name": "Locutusque/Hercules-4.0-Mistral-v0.2-7B"
},
"results": {
"logiqa": {
"delta_abs": 0.06070287539936098,
"delta_rel": 0.20765027322404356
},
"logiqa2": {
"delta_abs": 0.06870229007633588,
"delta_rel": 0.2168674698795181
},
"lsat-ar": {
"delta_abs": 0.026086956521739146,
"delta_rel": 0.12500000000000008
},
"lsat-rc": {
"delta_abs": 0.10408921933085502,
"delta_rel": 0.34146341463414637
},
"lsat-lr": {
"delta_abs": 0.10392156862745097,
"delta_rel": 0.49999999999999994
}
}
}