ggbetz's picture
Update teknium/OpenHermes-2.5-Mistral-7B/results_leaderboard.json
8530e37 verified
raw
history blame
784 Bytes
{
"config": {
"model_dtype": "bfloat16",
"model_sha": "main",
"model_name": "teknium/OpenHermes-2.5-Mistral-7B"
},
"results": {
"logiqa": {
"delta_abs": 0.03194888178913741,
"delta_rel": 0.10101010101010109
},
"logiqa2": {
"delta_abs": 0.030534351145038163,
"delta_rel": 0.0793388429752066
},
"lsat-ar": {
"delta_abs": 0.03913043478260869,
"delta_rel": 0.19148936170212763
},
"lsat-rc": {
"delta_abs": 0.09293680297397772,
"delta_rel": 0.23364485981308417
},
"lsat-lr": {
"delta_abs": 0.05686274509803918,
"delta_rel": 0.18471337579617822
}
}
}