|
{ |
|
"results": { |
|
"ko_eqbench": { |
|
"alias": " - ko_eqbench", |
|
"eqbench,none": -6.274270308412872, |
|
"eqbench_stderr,none": 2.1764191206043764, |
|
"percent_parseable,none": 19.29824561403509, |
|
"percent_parseable_stderr,none": 3.0267457554898445 |
|
}, |
|
"ko_gpqa_diamond_zeroshot": { |
|
"alias": " - ko_gpqa_diamond_zeroshot", |
|
"acc_norm,none": 0.21212121212121213, |
|
"acc_norm_stderr,none": 0.02912652283458682 |
|
}, |
|
"ko_gsm8k": { |
|
"alias": " - ko_gsm8k", |
|
"exact_match,strict-match": 0.1379833206974981, |
|
"exact_match_stderr,strict-match": 0.009499777327746846, |
|
"exact_match,flexible-extract": 0.14480667172100076, |
|
"exact_match_stderr,flexible-extract": 0.009693234799052708 |
|
}, |
|
"ko_ifeval": { |
|
"alias": " - ko_ifeval", |
|
"prompt_level_strict_acc,none": 0.21052631578947367, |
|
"prompt_level_strict_acc_stderr,none": 0.01836109444859075, |
|
"inst_level_strict_acc,none": 0.27299703264094954, |
|
"inst_level_strict_acc_stderr,none": "N/A", |
|
"prompt_level_loose_acc,none": 0.2165991902834008, |
|
"prompt_level_loose_acc_stderr,none": 0.018552266010973918, |
|
"inst_level_loose_acc,none": 0.2789317507418398, |
|
"inst_level_loose_acc_stderr,none": "N/A" |
|
}, |
|
"ko_winogrande": { |
|
"alias": " - ko_winogrande", |
|
"acc,none": 0.6448303078137332, |
|
"acc_stderr,none": 0.013450047479569254 |
|
}, |
|
"kornat_common": { |
|
"alias": " - kornat_common", |
|
"acc_norm,none": 0.2331890812250333, |
|
"acc_norm_stderr,none": 0.0054559380922563185 |
|
}, |
|
"kornat_harmless": { |
|
"alias": " - kornat_harmless", |
|
"acc_norm,none": 0.6186000000000198, |
|
"acc_norm_stderr,none": 0.0020522190278016586 |
|
}, |
|
"kornat_helpful": { |
|
"alias": " - kornat_helpful", |
|
"acc_norm,none": 0.44575, |
|
"acc_norm_stderr,none": 0.007026069425773491 |
|
}, |
|
"kornat_social": { |
|
"alias": " - kornat_social", |
|
"A-SVA,none": 0.5097210562353294, |
|
"A-SVA_stderr,none": 0.003763936183672199 |
|
} |
|
}, |
|
"versions": { |
|
"all": 2, |
|
"ko_eqbench": 2, |
|
"ko_gpqa_diamond_zeroshot": 2, |
|
"ko_gsm8k": 2, |
|
"ko_ifeval": 2, |
|
"ko_winogrande": 2, |
|
"kornat_common": 2, |
|
"kornat_harmless": 2, |
|
"kornat_helpful": 2, |
|
"kornat_social": 2 |
|
}, |
|
"config_general": { |
|
"model_name": "GAI-LLM/llama-2-koen-13b-mixed-v9", |
|
"model_sha": "0d3fe7df627660f041bd73a62362898e05b67196", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |