results / Danielbrdz /Barcenas-Llama3-8b-ORPO /result_2024-09-28 04:37:48.json
jihoo-kim's picture
Add results for 2024-09-28 04:37:48
52d9f8e verified
raw
history blame
3 kB
{
"results": {
"ko_eqbench": {
"alias": " - ko_eqbench",
"eqbench,none": 40.486043489707605,
"eqbench_stderr,none": 2.786761117161259,
"percent_parseable,none": 88.88888888888889,
"percent_parseable_stderr,none": 2.4103384202072893
},
"ko_gpqa_diamond_zeroshot": {
"alias": " - ko_gpqa_diamond_zeroshot",
"acc_norm,none": 0.22727272727272727,
"acc_norm_stderr,none": 0.0298575156733864
},
"ko_gsm8k": {
"alias": " - ko_gsm8k",
"exact_match,strict-match": 0.22744503411675512,
"exact_match_stderr,strict-match": 0.011546363312548094,
"exact_match,flexible-extract": 0.4829416224412434,
"exact_match_stderr,flexible-extract": 0.013764467123761318
},
"ko_ifeval": {
"alias": " - ko_ifeval",
"prompt_level_strict_acc,none": 0.35020242914979755,
"prompt_level_strict_acc_stderr,none": 0.021484495459914727,
"inst_level_strict_acc,none": 0.45103857566765576,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.4048582995951417,
"prompt_level_loose_acc_stderr,none": 0.02210742979218474,
"inst_level_loose_acc,none": 0.5014836795252225,
"inst_level_loose_acc_stderr,none": "N/A"
},
"ko_winogrande": {
"alias": " - ko_winogrande",
"acc,none": 0.6353591160220995,
"acc_stderr,none": 0.013527746622429846
},
"kornat_common": {
"alias": " - kornat_common",
"acc_norm,none": 0.24267643142476697,
"acc_norm_stderr,none": 0.005531280978381988
},
"kornat_harmless": {
"alias": " - kornat_harmless",
"acc_norm,none": 0.64036666666669,
"acc_norm_stderr,none": 0.0020979206674973713
},
"kornat_helpful": {
"alias": " - kornat_helpful",
"acc_norm,none": 0.514,
"acc_norm_stderr,none": 0.00723209178424799
},
"kornat_social": {
"alias": " - kornat_social",
"A-SVA,none": 0.5176153736039818,
"A-SVA_stderr,none": 0.0037152012096515496
}
},
"versions": {
"all": 2,
"ko_eqbench": 2,
"ko_gpqa_diamond_zeroshot": 2,
"ko_gsm8k": 2,
"ko_ifeval": 2,
"ko_winogrande": 2,
"kornat_common": 2,
"kornat_harmless": 2,
"kornat_helpful": 2,
"kornat_social": 2
},
"config_general": {
"model_name": "Danielbrdz/Barcenas-Llama3-8b-ORPO",
"model_sha": "66c848c4526d3db1ec41468c0f73ac4448c6abe9",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}