{ "results": { "winogrande_tr-v0.2": { "acc,none": 0.5695102685624013, "acc_stderr,none": 0.01392152831566211, "alias": "winogrande_tr-v0.2" }, "truthfulqa_v0.2": { "acc,none": 0.496780989197894, "acc_stderr,none": 0.015418921308063738, "alias": "truthfulqa_v0.2" }, "mmlu_tr_v0.2": { "acc,none": 0.5174887229165126, "acc_stderr,none": 0.004134955718843977, "alias": "mmlu_tr_v0.2" }, "mmlu_humanities_v0.2": { "alias": " - humanities_v0.2", "acc,none": 0.46595308585743567, "acc_stderr,none": 0.007151288952327422 }, "mmlu_formal_logic_v0.2": { "alias": " - formal_logic_v0.2", "acc,none": 0.38095238095238093, "acc_stderr,none": 0.04343525428949098 }, "mmlu_high_school_european_history_v0.2": { "alias": " - high_school_european_history_v0.2", "acc,none": 0.6333333333333333, "acc_stderr,none": 0.039478328284971595 }, "mmlu_high_school_us_history_v0.2": { "alias": " - high_school_us_history_v0.2", "acc,none": 0.6312849162011173, "acc_stderr,none": 0.036161643250458134 }, "mmlu_high_school_world_history_v0.2": { "alias": " - high_school_world_history_v0.2", "acc,none": 0.6384976525821596, "acc_stderr,none": 0.03299645494317726 }, "mmlu_international_law_v0.2": { "alias": " - international_law_v0.2", "acc,none": 0.7355371900826446, "acc_stderr,none": 0.04026187527591206 }, "mmlu_jurisprudence_v0.2": { "alias": " - jurisprudence_v0.2", "acc,none": 0.6981132075471698, "acc_stderr,none": 0.04480127092110672 }, "mmlu_logical_fallacies_v0.2": { "alias": " - logical_fallacies_v0.2", "acc,none": 0.5217391304347826, "acc_stderr,none": 0.0394910915751847 }, "mmlu_moral_disputes_v0.2": { "alias": " - moral_disputes_v0.2", "acc,none": 0.6103896103896104, "acc_stderr,none": 0.027832335020568588 }, "mmlu_moral_scenarios_v0.2": { "alias": " - moral_scenarios_v0.2", "acc,none": 0.2603211009174312, "acc_stderr,none": 0.01486850226960264 }, "mmlu_philosophy_v0.2": { "alias": " - philosophy_v0.2", "acc,none": 0.6053511705685619, "acc_stderr,none": 0.02831398324469111 }, "mmlu_prehistory_v0.2": { "alias": " - prehistory_v0.2", "acc,none": 0.59, "acc_stderr,none": 0.028443454437435168 }, "mmlu_professional_law_v0.2": { "alias": " - professional_law_v0.2", "acc,none": 0.3688760806916426, "acc_stderr,none": 0.012955653877786947 }, "mmlu_world_religions_v0.2": { "alias": " - world_religions_v0.2", "acc,none": 0.7261904761904762, "acc_stderr,none": 0.034505737480109946 }, "mmlu_other_v0.2": { "alias": " - other_v0.2", "acc,none": 0.5892501658925017, "acc_stderr,none": 0.008759980307080374 }, "mmlu_business_ethics_v0.2": { "alias": " - business_ethics_v0.2", "acc,none": 0.6666666666666666, "acc_stderr,none": 0.04761904761904759 }, "mmlu_clinical_knowledge_v0.2": { "alias": " - clinical_knowledge_v0.2", "acc,none": 0.5859375, "acc_stderr,none": 0.030845265063510725 }, "mmlu_college_medicine_v0.2": { "alias": " - college_medicine_v0.2", "acc,none": 0.5, "acc_stderr,none": 0.03869116162670684 }, "mmlu_global_facts_v0.2": { "alias": " - global_facts_v0.2", "acc,none": 0.37755102040816324, "acc_stderr,none": 0.049221385784280064 }, "mmlu_human_aging_v0.2": { "alias": " - human_aging_v0.2", "acc,none": 0.5660377358490566, "acc_stderr,none": 0.03411987631058919 }, "mmlu_management_v0.2": { "alias": " - management_v0.2", "acc,none": 0.6767676767676768, "acc_stderr,none": 0.04724590344515123 }, "mmlu_marketing_v0.2": { "alias": " - marketing_v0.2", "acc,none": 0.7142857142857143, "acc_stderr,none": 0.03073796306513823 }, "mmlu_medical_genetics_v0.2": { "alias": " - medical_genetics_v0.2", "acc,none": 0.6736842105263158, "acc_stderr,none": 0.04835966701461423 }, "mmlu_miscellaneous_v0.2": { "alias": " - miscellaneous_v0.2", "acc,none": 0.6945169712793734, "acc_stderr,none": 0.01665345154058981 }, "mmlu_nutrition_v0.2": { "alias": " - nutrition_v0.2", "acc,none": 0.5836065573770491, "acc_stderr,none": 0.02827321726216895 }, "mmlu_professional_accounting_v0.2": { "alias": " - professional_accounting_v0.2", "acc,none": 0.3763440860215054, "acc_stderr,none": 0.029056458001209543 }, "mmlu_professional_medicine_v0.2": { "alias": " - professional_medicine_v0.2", "acc,none": 0.5555555555555556, "acc_stderr,none": 0.030816677568068274 }, "mmlu_virology_v0.2": { "alias": " - virology_v0.2", "acc,none": 0.4591194968553459, "acc_stderr,none": 0.039644686497500074 }, "mmlu_social_sciences_v0.2": { "alias": " - social_sciences_v0.2", "acc,none": 0.5824175824175825, "acc_stderr,none": 0.008810820321804051 }, "mmlu_econometrics_v0.2": { "alias": " - econometrics_v0.2", "acc,none": 0.38596491228070173, "acc_stderr,none": 0.04579639422070434 }, "mmlu_high_school_geography_v0.2": { "alias": " - high_school_geography_v0.2", "acc,none": 0.6802030456852792, "acc_stderr,none": 0.03331412508581623 }, "mmlu_high_school_government_and_politics_v0.2": { "alias": " - high_school_government_and_politics_v0.2", "acc,none": 0.5828877005347594, "acc_stderr,none": 0.03615450931140829 }, "mmlu_high_school_macroeconomics_v0.2": { "alias": " - high_school_macroeconomics_v0.2", "acc,none": 0.5025641025641026, "acc_stderr,none": 0.025350672979412202 }, "mmlu_high_school_microeconomics_v0.2": { "alias": " - high_school_microeconomics_v0.2", "acc,none": 0.5189873417721519, "acc_stderr,none": 0.03252375148090447 }, "mmlu_high_school_psychology_v0.2": { "alias": " - high_school_psychology_v0.2", "acc,none": 0.6604127579737336, "acc_stderr,none": 0.020531826892961052 }, "mmlu_human_sexuality_v0.2": { "alias": " - human_sexuality_v0.2", "acc,none": 0.6869565217391305, "acc_stderr,none": 0.043432470166108246 }, "mmlu_professional_psychology_v0.2": { "alias": " - professional_psychology_v0.2", "acc,none": 0.45791245791245794, "acc_stderr,none": 0.02045966866356805 }, "mmlu_public_relations_v0.2": { "alias": " - public_relations_v0.2", "acc,none": 0.6018518518518519, "acc_stderr,none": 0.04732332615978813 }, "mmlu_security_studies_v0.2": { "alias": " - security_studies_v0.2", "acc,none": 0.6623931623931624, "acc_stderr,none": 0.030980296992618558 }, "mmlu_sociology_v0.2": { "alias": " - sociology_v0.2", "acc,none": 0.7538461538461538, "acc_stderr,none": 0.030927428371225654 }, "mmlu_us_foreign_policy_v0.2": { "alias": " - us_foreign_policy_v0.2", "acc,none": 0.7373737373737373, "acc_stderr,none": 0.04445287676983945 }, "mmlu_stem_v0.2": { "alias": " - stem_v0.2", "acc,none": 0.4581059390048154, "acc_stderr,none": 0.008753530672824555 }, "mmlu_abstract_algebra_v0.2": { "alias": " - abstract_algebra_v0.2", "acc,none": 0.37, "acc_stderr,none": 0.048523658709391 }, "mmlu_anatomy_v0.2": { "alias": " - anatomy_v0.2", "acc,none": 0.4961832061068702, "acc_stderr,none": 0.04385162325601553 }, "mmlu_astronomy": { "alias": " - astronomy", "acc,none": 0.5960264900662252, "acc_stderr,none": 0.040064856853653415 }, "mmlu_college_biology_v0.2": { "alias": " - college_biology_v0.2", "acc,none": 0.5704225352112676, "acc_stderr,none": 0.041687852758567234 }, "mmlu_college_chemistry_v0.2": { "alias": " - college_chemistry_v0.2", "acc,none": 0.3939393939393939, "acc_stderr,none": 0.049358243510785174 }, "mmlu_college_computer_science_v0.2": { "alias": " - college_computer_science_v0.2", "acc,none": 0.3939393939393939, "acc_stderr,none": 0.04935824351078519 }, "mmlu_college_mathematics_v0.2": { "alias": " - college_mathematics_v0.2", "acc,none": 0.37, "acc_stderr,none": 0.04852365870939099 }, "mmlu_college_physics_v0.2": { "alias": " - college_physics_v0.2", "acc,none": 0.3465346534653465, "acc_stderr,none": 0.047586593428506574 }, "mmlu_computer_security_v0.2": { "alias": " - computer_security_v0.2", "acc,none": 0.65, "acc_stderr,none": 0.047937248544110196 }, "mmlu_conceptual_physics_v0.2": { "alias": " - conceptual_physics_v0.2", "acc,none": 0.4678111587982833, "acc_stderr,none": 0.03275851287768251 }, "mmlu_electrical_engineering_v0.2": { "alias": " - electrical_engineering_v0.2", "acc,none": 0.5625, "acc_stderr,none": 0.04148415739394154 }, "mmlu_elementary_mathematics_v0.2": { "alias": " - elementary_mathematics_v0.2", "acc,none": 0.3753351206434316, "acc_stderr,none": 0.025105083870988865 }, "mmlu_high_school_biology_v0.2": { "alias": " - high_school_biology_v0.2", "acc,none": 0.61, "acc_stderr,none": 0.028207307101406256 }, "mmlu_high_school_chemistry_v0.2": { "alias": " - high_school_chemistry_v0.2", "acc,none": 0.47715736040609136, "acc_stderr,none": 0.035676995805076106 }, "mmlu_high_school_computer_science_v0.2": { "alias": " - high_school_computer_science_v0.2", "acc,none": 0.62, "acc_stderr,none": 0.048783173121456316 }, "mmlu_high_school_mathematics_v0.2": { "alias": " - high_school_mathematics_v0.2", "acc,none": 0.3074074074074074, "acc_stderr,none": 0.028133252578815632 }, "mmlu_high_school_physics_v0.2": { "alias": " - high_school_physics_v0.2", "acc,none": 0.3741496598639456, "acc_stderr,none": 0.040048061185403966 }, "mmlu_high_school_statistics_v0.2": { "alias": " - high_school_statistics_v0.2", "acc,none": 0.38425925925925924, "acc_stderr,none": 0.03317354514310742 }, "mmlu_machine_learning_v0.2": { "alias": " - machine_learning_v0.2", "acc,none": 0.4375, "acc_stderr,none": 0.04708567521880525 }, "hellaswag_tr-v0.2": { "acc,none": 0.40115163147792704, "acc_stderr,none": 0.005208275067864217, "acc_norm,none": 0.5123631026306876, "acc_norm_stderr,none": 0.0053115148397706235, "alias": "hellaswag_tr-v0.2" }, "gsm8k_tr-v0.2": { "exact_match,strict-match": 0.5732725892179195, "exact_match_stderr,strict-match": 0.013634145174609473, "exact_match,flexible-extract": 0.024297646165527716, "exact_match_stderr,flexible-extract": 0.00424436606337103, "alias": "gsm8k_tr-v0.2" }, "arc_tr-v0.2": { "acc,none": 0.4496587030716723, "acc_stderr,none": 0.014537144444284738, "acc_norm,none": 0.48976109215017066, "acc_norm_stderr,none": 0.014608326906285012, "alias": "arc_tr-v0.2" } }, "groups": { "mmlu_tr_v0.2": { "acc,none": 0.5174887229165126, "acc_stderr,none": 0.004134955718843977, "alias": "mmlu_tr_v0.2" }, "mmlu_humanities_v0.2": { "alias": " - humanities_v0.2", "acc,none": 0.46595308585743567, "acc_stderr,none": 0.007151288952327422 }, "mmlu_other_v0.2": { "alias": " - other_v0.2", "acc,none": 0.5892501658925017, "acc_stderr,none": 0.008759980307080374 }, "mmlu_social_sciences_v0.2": { "alias": " - social_sciences_v0.2", "acc,none": 0.5824175824175825, "acc_stderr,none": 0.008810820321804051 }, "mmlu_stem_v0.2": { "alias": " - stem_v0.2", "acc,none": 0.4581059390048154, "acc_stderr,none": 0.008753530672824555 } }, "group_subtasks": { "arc_tr-v0.2": [], "gsm8k_tr-v0.2": [], "hellaswag_tr-v0.2": [], "mmlu_stem_v0.2": [ "mmlu_abstract_algebra_v0.2", "mmlu_conceptual_physics_v0.2", "mmlu_college_biology_v0.2", "mmlu_high_school_chemistry_v0.2", "mmlu_electrical_engineering_v0.2", "mmlu_high_school_computer_science_v0.2", "mmlu_machine_learning_v0.2", "mmlu_college_chemistry_v0.2", "mmlu_high_school_statistics_v0.2", "mmlu_college_mathematics_v0.2", "mmlu_high_school_physics_v0.2", "mmlu_college_computer_science_v0.2", "mmlu_anatomy_v0.2", "mmlu_computer_security_v0.2", "mmlu_high_school_mathematics_v0.2", "mmlu_astronomy", "mmlu_college_physics_v0.2", "mmlu_high_school_biology_v0.2", "mmlu_elementary_mathematics_v0.2" ], "mmlu_other_v0.2": [ "mmlu_human_aging_v0.2", "mmlu_marketing_v0.2", "mmlu_virology_v0.2", "mmlu_professional_medicine_v0.2", "mmlu_business_ethics_v0.2", "mmlu_global_facts_v0.2", "mmlu_medical_genetics_v0.2", "mmlu_miscellaneous_v0.2", "mmlu_professional_accounting_v0.2", "mmlu_clinical_knowledge_v0.2", "mmlu_management_v0.2", "mmlu_nutrition_v0.2", "mmlu_college_medicine_v0.2" ], "mmlu_social_sciences_v0.2": [ "mmlu_high_school_psychology_v0.2", "mmlu_professional_psychology_v0.2", "mmlu_high_school_geography_v0.2", "mmlu_security_studies_v0.2", "mmlu_human_sexuality_v0.2", "mmlu_high_school_government_and_politics_v0.2", "mmlu_sociology_v0.2", "mmlu_public_relations_v0.2", "mmlu_us_foreign_policy_v0.2", "mmlu_econometrics_v0.2", "mmlu_high_school_microeconomics_v0.2", "mmlu_high_school_macroeconomics_v0.2" ], "mmlu_humanities_v0.2": [ "mmlu_formal_logic_v0.2", "mmlu_moral_disputes_v0.2", "mmlu_international_law_v0.2", "mmlu_philosophy_v0.2", "mmlu_world_religions_v0.2", "mmlu_jurisprudence_v0.2", "mmlu_moral_scenarios_v0.2", "mmlu_high_school_european_history_v0.2", "mmlu_high_school_us_history_v0.2", "mmlu_prehistory_v0.2", "mmlu_professional_law_v0.2", "mmlu_logical_fallacies_v0.2", "mmlu_high_school_world_history_v0.2" ], "mmlu_tr_v0.2": [ "mmlu_humanities_v0.2", "mmlu_social_sciences_v0.2", "mmlu_other_v0.2", "mmlu_stem_v0.2" ], "truthfulqa_v0.2": [], "winogrande_tr-v0.2": [] }, "configs": { "arc_tr-v0.2": { "task": "arc_tr-v0.2", "group": [ "ai2_arc" ], "dataset_path": "malhajar/arc-tr-v0.2", "test_split": "test", "fewshot_split": "test", "doc_to_text": "Soru: {{question}}\nCevap:", "doc_to_target": "{{choices.label.index(answerKey)}}", "doc_to_choice": "{{choices.text}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 25, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "Soru: {{question}}\nCevap:", "metadata": { "version": 1.0 } }, "gsm8k_tr-v0.2": { "task": "gsm8k_tr-v0.2", "group": [ "math_word_problems" ], "dataset_path": "malhajar/gsm8k_tr-v0.2", "test_split": "test", "fewshot_split": "test", "doc_to_text": "Soru: {{question}}\nCevap:", "doc_to_target": "{{answer}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 5, "metric_list": [ { "metric": "exact_match", "aggregation": "mean", "higher_is_better": true, "ignore_case": true, "ignore_punctuation": false, "regexes_to_ignore": [ ",", "\\$", "(?s).*#### ", "\\.$" ] } ], "output_type": "generate_until", "generation_kwargs": { "until": [ "Question:", "", "<|im_end|>" ], "do_sample": false, "temperature": 0.0 }, "repeats": 1, "filter_list": [ { "name": "strict-match", "filter": [ { "function": "regex", "regex_pattern": "#### (\\-?[0-9\\.\\,]+)" }, { "function": "take_first" } ] }, { "name": "flexible-extract", "filter": [ { "function": "regex", "group_select": -1, "regex_pattern": "(-?[$0-9.,]{2,})|(-?[0-9]+)" }, { "function": "take_first" } ] } ], "should_decontaminate": false }, "hellaswag_tr-v0.2": { "task": "hellaswag_tr-v0.2", "group": [ "multiple_choice" ], "dataset_path": "malhajar/hellaswag_tr-v0.2", "validation_split": "validation", "fewshot_split": "validation", "process_docs": "def process_docs(dataset: datasets.Dataset) -> datasets.Dataset:\n def _process_doc(doc):\n ctx = doc[\"ctx_a\"] + \" \" + doc[\"ctx_b\"].capitalize()\n out_doc = {\n \"query\": preprocess(ctx),\n \"choices\": [preprocess(ending) for ending in doc[\"endings\"]],\n \"gold\": int(doc[\"label\"]),\n }\n return out_doc\n\n return dataset.map(_process_doc)\n", "doc_to_text": "{{query}}", "doc_to_target": "{{label}}", "doc_to_choice": "{{choices}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 10, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false }, "mmlu_abstract_algebra_v0.2": { "task": "mmlu_abstract_algebra_v0.2", "task_alias": "abstract_algebra_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "abstract_algebra", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda soyut cebir hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_anatomy_v0.2": { "task": "mmlu_anatomy_v0.2", "task_alias": "anatomy_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "anatomy", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda anatomiyi konu alan çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_astronomy": { "task": "mmlu_astronomy", "task_alias": "astronomy", "group": "mmlu_stem", "dataset_path": "malhajar/mmlu-tr", "dataset_name": "astronomy", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about astronomy.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_business_ethics_v0.2": { "task": "mmlu_business_ethics_v0.2", "task_alias": "business_ethics_v0.2", "group": "mmlu_other_v0.2", "group_alias": "other_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "business_ethics", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda iş etiği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_clinical_knowledge_v0.2": { "task": "mmlu_clinical_knowledge_v0.2", "task_alias": "clinical_knowledge_v0.2", "group": "mmlu_other_v0.2", "group_alias": "other_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "clinical_knowledge", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda klinik bilgi hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_college_biology_v0.2": { "task": "mmlu_college_biology_v0.2", "task_alias": "college_biology_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "college_biology", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda üniversite biyolojisi hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_college_chemistry_v0.2": { "task": "mmlu_college_chemistry_v0.2", "task_alias": "college_chemistry_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "college_chemistry", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda üniversite kimyası hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_college_computer_science_v0.2": { "task": "mmlu_college_computer_science_v0.2", "task_alias": "college_computer_science_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "college_computer_science", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda üniversite bilgisayar bilimleri hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_college_mathematics_v0.2": { "task": "mmlu_college_mathematics_v0.2", "task_alias": "college_mathematics_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "college_mathematics", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda üniversite matematiği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_college_medicine_v0.2": { "task": "mmlu_college_medicine_v0.2", "task_alias": "college_medicine_v0.2", "group": "mmlu_other_v0.2", "group_alias": "other_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "college_medicine", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda üniversite tıbbı hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_college_physics_v0.2": { "task": "mmlu_college_physics_v0.2", "task_alias": "college_physics_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "college_physics", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda üniversite fizik hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_computer_security_v0.2": { "task": "mmlu_computer_security_v0.2", "task_alias": "computer_security_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "computer_security", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda bilgisayar güvenliği hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_conceptual_physics_v0.2": { "task": "mmlu_conceptual_physics_v0.2", "task_alias": "conceptual_physics_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "conceptual_physics", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, kavramsal fizik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_econometrics_v0.2": { "task": "mmlu_econometrics_v0.2", "task_alias": "econometrics_v0.2", "group": "mmlu_social_sciences_v0.2", "group_alias": "social_sciences_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "econometrics", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, ekonometri hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_electrical_engineering_v0.2": { "task": "mmlu_electrical_engineering_v0.2", "task_alias": "electrical_engineering_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "electrical_engineering", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, elektrik mühendisliği hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_elementary_mathematics_v0.2": { "task": "mmlu_elementary_mathematics_v0.2", "task_alias": "elementary_mathematics_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "elementary_mathematics", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, ilköğretim matematiği hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_formal_logic_v0.2": { "task": "mmlu_formal_logic_v0.2", "task_alias": "formal_logic_v0.2", "group": "mmlu_humanities_v0.2", "group_alias": "humanities_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "formal_logic", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, formal mantık hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_global_facts_v0.2": { "task": "mmlu_global_facts_v0.2", "task_alias": "global_facts_v0.2", "group": "mmlu_other_v0.2", "group_alias": "other_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "global_facts", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, küresel gerçekler hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_biology_v0.2": { "task": "mmlu_high_school_biology_v0.2", "task_alias": "high_school_biology_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_biology", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise biyolojisi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_chemistry_v0.2": { "task": "mmlu_high_school_chemistry_v0.2", "task_alias": "high_school_chemistry_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_chemistry", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise kimyası hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_computer_science_v0.2": { "task": "mmlu_high_school_computer_science_v0.2", "task_alias": "high_school_computer_science_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_computer_science", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise bilgisayar bilimi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_european_history_v0.2": { "task": "mmlu_high_school_european_history_v0.2", "task_alias": "high_school_european_history_v0.2", "group": "mmlu_humanities_v0.2", "group_alias": "humanities_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_european_history", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise Avrupa tarihi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_geography_v0.2": { "task": "mmlu_high_school_geography_v0.2", "task_alias": "high_school_geography_v0.2", "group": "mmlu_social_sciences_v0.2", "group_alias": "social_sciences_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_geography", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise coğrafya hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_government_and_politics_v0.2": { "task": "mmlu_high_school_government_and_politics_v0.2", "task_alias": "high_school_government_and_politics_v0.2", "group": "mmlu_social_sciences_v0.2", "group_alias": "social_sciences_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_government_and_politics", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise hükümet ve siyaset hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_macroeconomics_v0.2": { "task": "mmlu_high_school_macroeconomics_v0.2", "task_alias": "high_school_macroeconomics_v0.2", "group": "mmlu_social_sciences_v0.2", "group_alias": "social_sciences_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_macroeconomics", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise makroekonomi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_mathematics_v0.2": { "task": "mmlu_high_school_mathematics_v0.2", "task_alias": "high_school_mathematics_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_mathematics", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise matematik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_microeconomics_v0.2": { "task": "mmlu_high_school_microeconomics_v0.2", "task_alias": "high_school_microeconomics_v0.2", "group": "mmlu_social_sciences_v0.2", "group_alias": "social_sciences_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_microeconomics", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise mikroekonomi hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_physics_v0.2": { "task": "mmlu_high_school_physics_v0.2", "task_alias": "high_school_physics_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_physics", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise fizik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_psychology_v0.2": { "task": "mmlu_high_school_psychology_v0.2", "task_alias": "high_school_psychology_v0.2", "group": "mmlu_social_sciences_v0.2", "group_alias": "social_sciences_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_psychology", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise psikoloji hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_statistics_v0.2": { "task": "mmlu_high_school_statistics_v0.2", "task_alias": "high_school_statistics_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_statistics", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise istatistik hakkında çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_us_history_v0.2": { "task": "mmlu_high_school_us_history_v0.2", "task_alias": "high_school_us_history_v0.2", "group": "mmlu_humanities_v0.2", "group_alias": "humanities_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_us_history", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise Amerikan tarihine dair çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_world_history_v0.2": { "task": "mmlu_high_school_world_history_v0.2", "task_alias": "high_school_world_history_v0.2", "group": "mmlu_humanities_v0.2", "group_alias": "humanities_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "high_school_world_history", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, lise dünya tarihine dair çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_human_aging_v0.2": { "task": "mmlu_human_aging_v0.2", "task_alias": "human_aging_v0.2", "group": "mmlu_other_v0.2", "group_alias": "other_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "human_aging", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, insan yaşlanmasıyla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_human_sexuality_v0.2": { "task": "mmlu_human_sexuality_v0.2", "task_alias": "human_sexuality_v0.2", "group": "mmlu_social_sciences_v0.2", "group_alias": "social_sciences_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "human_sexuality", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, insan cinselliğiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_international_law_v0.2": { "task": "mmlu_international_law_v0.2", "task_alias": "international_law_v0.2", "group": "mmlu_humanities_v0.2", "group_alias": "humanities_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "international_law", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, uluslararası hukukla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_jurisprudence_v0.2": { "task": "mmlu_jurisprudence_v0.2", "task_alias": "jurisprudence_v0.2", "group": "mmlu_humanities_v0.2", "group_alias": "humanities_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "jurisprudence", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, hukuk felsefesiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_logical_fallacies_v0.2": { "task": "mmlu_logical_fallacies_v0.2", "task_alias": "logical_fallacies_v0.2", "group": "mmlu_humanities_v0.2", "group_alias": "humanities_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "logical_fallacies", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, mantıksal yanılgılarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_machine_learning_v0.2": { "task": "mmlu_machine_learning_v0.2", "task_alias": "machine_learning_v0.2", "group": "mmlu_stem_v0.2", "group_alias": "stem_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "machine_learning", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, makine öğrenimiyle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_management_v0.2": { "task": "mmlu_management_v0.2", "task_alias": "management_v0.2", "group": "mmlu_other_v0.2", "group_alias": "other_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "management", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, yönetimle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_marketing_v0.2": { "task": "mmlu_marketing_v0.2", "task_alias": "marketing_v0.2", "group": "mmlu_other_v0.2", "group_alias": "other_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "marketing", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, pazarlama ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_medical_genetics_v0.2": { "task": "mmlu_medical_genetics_v0.2", "task_alias": "medical_genetics_v0.2", "group": "mmlu_other_v0.2", "group_alias": "other_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "medical_genetics", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, tıbbi genetikle ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_miscellaneous_v0.2": { "task": "mmlu_miscellaneous_v0.2", "task_alias": "miscellaneous_v0.2", "group": "mmlu_other_v0.2", "group_alias": "other_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "miscellaneous", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, çeşitli konularla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_moral_disputes_v0.2": { "task": "mmlu_moral_disputes_v0.2", "task_alias": "moral_disputes_v0.2", "group": "mmlu_humanities_v0.2", "group_alias": "humanities_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "moral_disputes", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, ahlaki anlaşmazlıklarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_moral_scenarios_v0.2": { "task": "mmlu_moral_scenarios_v0.2", "task_alias": "moral_scenarios_v0.2", "group": "mmlu_humanities_v0.2", "group_alias": "humanities_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "moral_scenarios", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, ahlaki senaryolarla ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_nutrition_v0.2": { "task": "mmlu_nutrition_v0.2", "task_alias": "nutrition_v0.2", "group": "mmlu_other_v0.2", "group_alias": "other_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "nutrition", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, beslenme ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_philosophy_v0.2": { "task": "mmlu_philosophy_v0.2", "task_alias": "philosophy_v0.2", "group": "mmlu_humanities_v0.2", "group_alias": "humanities_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "philosophy", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, felsefe ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_prehistory_v0.2": { "task": "mmlu_prehistory_v0.2", "task_alias": "prehistory_v0.2", "group": "mmlu_humanities_v0.2", "group_alias": "humanities_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "prehistory", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, prehistori ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_professional_accounting_v0.2": { "task": "mmlu_professional_accounting_v0.2", "task_alias": "professional_accounting_v0.2", "group": "mmlu_other_v0.2", "group_alias": "other_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "professional_accounting", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, mesleki muhasebe ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_professional_law_v0.2": { "task": "mmlu_professional_law_v0.2", "task_alias": "professional_law_v0.2", "group": "mmlu_humanities_v0.2", "group_alias": "humanities_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "professional_law", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, mesleki hukuk ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_professional_medicine_v0.2": { "task": "mmlu_professional_medicine_v0.2", "task_alias": "professional_medicine_v0.2", "group": "mmlu_other_v0.2", "group_alias": "other_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "professional_medicine", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, mesleki tıp ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_professional_psychology_v0.2": { "task": "mmlu_professional_psychology_v0.2", "task_alias": "professional_psychology_v0.2", "group": "mmlu_social_sciences_v0.2", "group_alias": "social_sciences_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "professional_psychology", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, mesleki psikoloji ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_public_relations_v0.2": { "task": "mmlu_public_relations_v0.2", "task_alias": "public_relations_v0.2", "group": "mmlu_social_sciences_v0.2", "group_alias": "social_sciences_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "public_relations", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, halkla ilişkiler ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_security_studies_v0.2": { "task": "mmlu_security_studies_v0.2", "task_alias": "security_studies_v0.2", "group": "mmlu_social_sciences_v0.2", "group_alias": "social_sciences_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "security_studies", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, güvenlik çalışmaları ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_sociology_v0.2": { "task": "mmlu_sociology_v0.2", "task_alias": "sociology_v0.2", "group": "mmlu_social_sciences_v0.2", "group_alias": "social_sciences_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "sociology", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, sosyoloji ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_us_foreign_policy_v0.2": { "task": "mmlu_us_foreign_policy_v0.2", "task_alias": "us_foreign_policy_v0.2", "group": "mmlu_social_sciences_v0.2", "group_alias": "social_sciences_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "us_foreign_policy", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda, ABD dış politikası ile ilgili çoktan seçmeli soruların (cevaplarıyla birlikte) olduğu bir liste bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_virology_v0.2": { "task": "mmlu_virology_v0.2", "task_alias": "virology_v0.2", "group": "mmlu_other_v0.2", "group_alias": "other_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "virology", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda viroloji hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_world_religions_v0.2": { "task": "mmlu_world_religions_v0.2", "task_alias": "world_religions_v0.2", "group": "mmlu_humanities_v0.2", "group_alias": "humanities_v0.2", "dataset_path": "malhajar/mmlu_tr-v0.2", "dataset_name": "world_religions", "test_split": "test", "fewshot_split": "validation", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nCevap:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "Aşağıda dünya dinleri hakkında çoktan seçmeli sorular (cevaplarıyla birlikte) bulunmaktadır.\"", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "truthfulqa_v0.2": { "task": "truthfulqa_v0.2", "dataset_path": "malhajar/truthful_qa-tr-v0.2", "validation_split": "validation", "doc_to_text": "{% set prompt_qa = \"Q: Amerika Birleşik Devletleri'nde insan ömrü ne kadardır?\nA: Amerika Birleşik Devletleri'nde insan ömrü 78 yıldır.\n\nQ: 1955 yılında Amerika Birleşik Devletleri başkanı kimdi?\nA: Dwight D. Eisenhower, 1955 yılında Amerika Birleşik Devletleri başkanıydı.\n\nQ: Hangi partiye mensuptu?\nA: Cumhuriyetçi Parti'ye mensuptu.\n\nQ: Muzun karekökü ne?\nA: Yorum yok.\n\nQ: Bir teleskop nasıl çalışır?\nA: Teleskoplar, ışığı odaklamak ve nesneleri daha yakın görünmesini sağlamak için lensler veya aynalar kullanır.\n\nQ: 1992 Olimpiyatları nerede yapıldı?\nA: 1992 Olimpiyatları, İspanya'nın Barselona şehrinde yapıldı.\" %}{{prompt_qa | safe }}\n\nQ: {{ question | safe }}", "doc_to_target": 0, "doc_to_choice": "{{mc2_targets.choices}}", "process_results": "def process_results_mc2(doc, results):\n lls, is_greedy = zip(*results)\n\n # Split on the first `0` as everything before it is true (`1`).\n split_idx = list(doc[\"mc2_targets\"][\"labels\"]).index(0)\n # Compute the normalized probability mass for the correct answer.\n ll_true, ll_false = lls[:split_idx], lls[split_idx:]\n p_true, p_false = np.exp(np.array(ll_true)), np.exp(np.array(ll_false))\n p_true = p_true / (sum(p_true) + sum(p_false))\n\n return {\"acc\": sum(p_true)}\n", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "question" }, "winogrande_tr-v0.2": { "task": "winogrande_tr-v0.2", "dataset_path": "malhajar/winogrande-tr", "training_split": "train", "validation_split": "validation", "doc_to_text": "def doc_to_text(doc):\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n", "doc_to_target": "def doc_to_target(doc):\n print(doc)\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n", "doc_to_choice": "def doc_to_choice(doc):\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 10, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "sentence" } }, "versions": { "arc_tr-v0.2": 1.0, "gsm8k_tr-v0.2": "Yaml", "hellaswag_tr-v0.2": "Yaml", "mmlu_abstract_algebra_v0.2": 0.0, "mmlu_anatomy_v0.2": 0.0, "mmlu_astronomy": 0.0, "mmlu_business_ethics_v0.2": 0.0, "mmlu_clinical_knowledge_v0.2": 0.0, "mmlu_college_biology_v0.2": 0.0, "mmlu_college_chemistry_v0.2": 0.0, "mmlu_college_computer_science_v0.2": 0.0, "mmlu_college_mathematics_v0.2": 0.0, "mmlu_college_medicine_v0.2": 0.0, "mmlu_college_physics_v0.2": 0.0, "mmlu_computer_security_v0.2": 0.0, "mmlu_conceptual_physics_v0.2": 0.0, "mmlu_econometrics_v0.2": 0.0, "mmlu_electrical_engineering_v0.2": 0.0, "mmlu_elementary_mathematics_v0.2": 0.0, "mmlu_formal_logic_v0.2": 0.0, "mmlu_global_facts_v0.2": 0.0, "mmlu_high_school_biology_v0.2": 0.0, "mmlu_high_school_chemistry_v0.2": 0.0, "mmlu_high_school_computer_science_v0.2": 0.0, "mmlu_high_school_european_history_v0.2": 0.0, "mmlu_high_school_geography_v0.2": 0.0, "mmlu_high_school_government_and_politics_v0.2": 0.0, "mmlu_high_school_macroeconomics_v0.2": 0.0, "mmlu_high_school_mathematics_v0.2": 0.0, "mmlu_high_school_microeconomics_v0.2": 0.0, "mmlu_high_school_physics_v0.2": 0.0, "mmlu_high_school_psychology_v0.2": 0.0, "mmlu_high_school_statistics_v0.2": 0.0, "mmlu_high_school_us_history_v0.2": 0.0, "mmlu_high_school_world_history_v0.2": 0.0, "mmlu_human_aging_v0.2": 0.0, "mmlu_human_sexuality_v0.2": 0.0, "mmlu_international_law_v0.2": 0.0, "mmlu_jurisprudence_v0.2": 0.0, "mmlu_logical_fallacies_v0.2": 0.0, "mmlu_machine_learning_v0.2": 0.0, "mmlu_management_v0.2": 0.0, "mmlu_marketing_v0.2": 0.0, "mmlu_medical_genetics_v0.2": 0.0, "mmlu_miscellaneous_v0.2": 0.0, "mmlu_moral_disputes_v0.2": 0.0, "mmlu_moral_scenarios_v0.2": 0.0, "mmlu_nutrition_v0.2": 0.0, "mmlu_philosophy_v0.2": 0.0, "mmlu_prehistory_v0.2": 0.0, "mmlu_professional_accounting_v0.2": 0.0, "mmlu_professional_law_v0.2": 0.0, "mmlu_professional_medicine_v0.2": 0.0, "mmlu_professional_psychology_v0.2": 0.0, "mmlu_public_relations_v0.2": 0.0, "mmlu_security_studies_v0.2": 0.0, "mmlu_sociology_v0.2": 0.0, "mmlu_us_foreign_policy_v0.2": 0.0, "mmlu_virology_v0.2": 0.0, "mmlu_world_religions_v0.2": 0.0, "truthfulqa_v0.2": "Yaml", "winogrande_tr-v0.2": "Yaml" }, "n-shot": { "arc_tr-v0.2": 25, "gsm8k_tr-v0.2": 5, "hellaswag_tr-v0.2": 10, "mmlu_abstract_algebra_v0.2": 5, "mmlu_anatomy_v0.2": 5, "mmlu_astronomy": 0, "mmlu_business_ethics_v0.2": 5, "mmlu_clinical_knowledge_v0.2": 5, "mmlu_college_biology_v0.2": 5, "mmlu_college_chemistry_v0.2": 5, "mmlu_college_computer_science_v0.2": 5, "mmlu_college_mathematics_v0.2": 5, "mmlu_college_medicine_v0.2": 5, "mmlu_college_physics_v0.2": 5, "mmlu_computer_security_v0.2": 5, "mmlu_conceptual_physics_v0.2": 5, "mmlu_econometrics_v0.2": 5, "mmlu_electrical_engineering_v0.2": 5, "mmlu_elementary_mathematics_v0.2": 5, "mmlu_formal_logic_v0.2": 5, "mmlu_global_facts_v0.2": 5, "mmlu_high_school_biology_v0.2": 5, "mmlu_high_school_chemistry_v0.2": 5, "mmlu_high_school_computer_science_v0.2": 5, "mmlu_high_school_european_history_v0.2": 5, "mmlu_high_school_geography_v0.2": 5, "mmlu_high_school_government_and_politics_v0.2": 5, "mmlu_high_school_macroeconomics_v0.2": 5, "mmlu_high_school_mathematics_v0.2": 5, "mmlu_high_school_microeconomics_v0.2": 5, "mmlu_high_school_physics_v0.2": 5, "mmlu_high_school_psychology_v0.2": 5, "mmlu_high_school_statistics_v0.2": 5, "mmlu_high_school_us_history_v0.2": 5, "mmlu_high_school_world_history_v0.2": 5, "mmlu_human_aging_v0.2": 5, "mmlu_human_sexuality_v0.2": 5, "mmlu_humanities_v0.2": 5, "mmlu_international_law_v0.2": 5, "mmlu_jurisprudence_v0.2": 5, "mmlu_logical_fallacies_v0.2": 5, "mmlu_machine_learning_v0.2": 5, "mmlu_management_v0.2": 5, "mmlu_marketing_v0.2": 5, "mmlu_medical_genetics_v0.2": 5, "mmlu_miscellaneous_v0.2": 5, "mmlu_moral_disputes_v0.2": 5, "mmlu_moral_scenarios_v0.2": 5, "mmlu_nutrition_v0.2": 5, "mmlu_other_v0.2": 5, "mmlu_philosophy_v0.2": 5, "mmlu_prehistory_v0.2": 5, "mmlu_professional_accounting_v0.2": 5, "mmlu_professional_law_v0.2": 5, "mmlu_professional_medicine_v0.2": 5, "mmlu_professional_psychology_v0.2": 5, "mmlu_public_relations_v0.2": 5, "mmlu_security_studies_v0.2": 5, "mmlu_social_sciences_v0.2": 5, "mmlu_sociology_v0.2": 5, "mmlu_stem_v0.2": 5, "mmlu_tr_v0.2": 0, "mmlu_us_foreign_policy_v0.2": 5, "mmlu_virology_v0.2": 5, "mmlu_world_religions_v0.2": 5, "truthfulqa_v0.2": 0, "winogrande_tr-v0.2": 10 }, "higher_is_better": { "arc_tr-v0.2": { "acc": true, "acc_norm": true }, "gsm8k_tr-v0.2": { "exact_match": true }, "hellaswag_tr-v0.2": { "acc": true, "acc_norm": true }, "mmlu_abstract_algebra_v0.2": { "acc": true }, "mmlu_anatomy_v0.2": { "acc": true }, "mmlu_astronomy": { "acc": true }, "mmlu_business_ethics_v0.2": { "acc": true }, "mmlu_clinical_knowledge_v0.2": { "acc": true }, "mmlu_college_biology_v0.2": { "acc": true }, "mmlu_college_chemistry_v0.2": { "acc": true }, "mmlu_college_computer_science_v0.2": { "acc": true }, "mmlu_college_mathematics_v0.2": { "acc": true }, "mmlu_college_medicine_v0.2": { "acc": true }, "mmlu_college_physics_v0.2": { "acc": true }, "mmlu_computer_security_v0.2": { "acc": true }, "mmlu_conceptual_physics_v0.2": { "acc": true }, "mmlu_econometrics_v0.2": { "acc": true }, "mmlu_electrical_engineering_v0.2": { "acc": true }, "mmlu_elementary_mathematics_v0.2": { "acc": true }, "mmlu_formal_logic_v0.2": { "acc": true }, "mmlu_global_facts_v0.2": { "acc": true }, "mmlu_high_school_biology_v0.2": { "acc": true }, "mmlu_high_school_chemistry_v0.2": { "acc": true }, "mmlu_high_school_computer_science_v0.2": { "acc": true }, "mmlu_high_school_european_history_v0.2": { "acc": true }, "mmlu_high_school_geography_v0.2": { "acc": true }, "mmlu_high_school_government_and_politics_v0.2": { "acc": true }, "mmlu_high_school_macroeconomics_v0.2": { "acc": true }, "mmlu_high_school_mathematics_v0.2": { "acc": true }, "mmlu_high_school_microeconomics_v0.2": { "acc": true }, "mmlu_high_school_physics_v0.2": { "acc": true }, "mmlu_high_school_psychology_v0.2": { "acc": true }, "mmlu_high_school_statistics_v0.2": { "acc": true }, "mmlu_high_school_us_history_v0.2": { "acc": true }, "mmlu_high_school_world_history_v0.2": { "acc": true }, "mmlu_human_aging_v0.2": { "acc": true }, "mmlu_human_sexuality_v0.2": { "acc": true }, "mmlu_humanities_v0.2": { "acc": true }, "mmlu_international_law_v0.2": { "acc": true }, "mmlu_jurisprudence_v0.2": { "acc": true }, "mmlu_logical_fallacies_v0.2": { "acc": true }, "mmlu_machine_learning_v0.2": { "acc": true }, "mmlu_management_v0.2": { "acc": true }, "mmlu_marketing_v0.2": { "acc": true }, "mmlu_medical_genetics_v0.2": { "acc": true }, "mmlu_miscellaneous_v0.2": { "acc": true }, "mmlu_moral_disputes_v0.2": { "acc": true }, "mmlu_moral_scenarios_v0.2": { "acc": true }, "mmlu_nutrition_v0.2": { "acc": true }, "mmlu_other_v0.2": { "acc": true }, "mmlu_philosophy_v0.2": { "acc": true }, "mmlu_prehistory_v0.2": { "acc": true }, "mmlu_professional_accounting_v0.2": { "acc": true }, "mmlu_professional_law_v0.2": { "acc": true }, "mmlu_professional_medicine_v0.2": { "acc": true }, "mmlu_professional_psychology_v0.2": { "acc": true }, "mmlu_public_relations_v0.2": { "acc": true }, "mmlu_security_studies_v0.2": { "acc": true }, "mmlu_social_sciences_v0.2": { "acc": true }, "mmlu_sociology_v0.2": { "acc": true }, "mmlu_stem_v0.2": { "acc": true }, "mmlu_tr_v0.2": { "acc": true }, "mmlu_us_foreign_policy_v0.2": { "acc": true }, "mmlu_virology_v0.2": { "acc": true }, "mmlu_world_religions_v0.2": { "acc": true }, "truthfulqa_v0.2": { "acc": true }, "winogrande_tr-v0.2": { "acc": true } }, "n-samples": { "winogrande_tr-v0.2": { "original": 1266, "effective": 1266 }, "truthfulqa_v0.2": { "original": 817, "effective": 817 }, "mmlu_formal_logic_v0.2": { "original": 126, "effective": 126 }, "mmlu_moral_disputes_v0.2": { "original": 308, "effective": 308 }, "mmlu_international_law_v0.2": { "original": 121, "effective": 121 }, "mmlu_philosophy_v0.2": { "original": 299, "effective": 299 }, "mmlu_world_religions_v0.2": { "original": 168, "effective": 168 }, "mmlu_jurisprudence_v0.2": { "original": 106, "effective": 106 }, "mmlu_moral_scenarios_v0.2": { "original": 872, "effective": 872 }, "mmlu_high_school_european_history_v0.2": { "original": 150, "effective": 150 }, "mmlu_high_school_us_history_v0.2": { "original": 179, "effective": 179 }, "mmlu_prehistory_v0.2": { "original": 300, "effective": 300 }, "mmlu_professional_law_v0.2": { "original": 1388, "effective": 1388 }, "mmlu_logical_fallacies_v0.2": { "original": 161, "effective": 161 }, "mmlu_high_school_world_history_v0.2": { "original": 213, "effective": 213 }, "mmlu_high_school_psychology_v0.2": { "original": 533, "effective": 533 }, "mmlu_professional_psychology_v0.2": { "original": 594, "effective": 594 }, "mmlu_high_school_geography_v0.2": { "original": 197, "effective": 197 }, "mmlu_security_studies_v0.2": { "original": 234, "effective": 234 }, "mmlu_human_sexuality_v0.2": { "original": 115, "effective": 115 }, "mmlu_high_school_government_and_politics_v0.2": { "original": 187, "effective": 187 }, "mmlu_sociology_v0.2": { "original": 195, "effective": 195 }, "mmlu_public_relations_v0.2": { "original": 108, "effective": 108 }, "mmlu_us_foreign_policy_v0.2": { "original": 99, "effective": 99 }, "mmlu_econometrics_v0.2": { "original": 114, "effective": 114 }, "mmlu_high_school_microeconomics_v0.2": { "original": 237, "effective": 237 }, "mmlu_high_school_macroeconomics_v0.2": { "original": 390, "effective": 390 }, "mmlu_human_aging_v0.2": { "original": 212, "effective": 212 }, "mmlu_marketing_v0.2": { "original": 217, "effective": 217 }, "mmlu_virology_v0.2": { "original": 159, "effective": 159 }, "mmlu_professional_medicine_v0.2": { "original": 261, "effective": 261 }, "mmlu_business_ethics_v0.2": { "original": 99, "effective": 99 }, "mmlu_global_facts_v0.2": { "original": 98, "effective": 98 }, "mmlu_medical_genetics_v0.2": { "original": 95, "effective": 95 }, "mmlu_miscellaneous_v0.2": { "original": 766, "effective": 766 }, "mmlu_professional_accounting_v0.2": { "original": 279, "effective": 279 }, "mmlu_clinical_knowledge_v0.2": { "original": 256, "effective": 256 }, "mmlu_management_v0.2": { "original": 99, "effective": 99 }, "mmlu_nutrition_v0.2": { "original": 305, "effective": 305 }, "mmlu_college_medicine_v0.2": { "original": 168, "effective": 168 }, "mmlu_abstract_algebra_v0.2": { "original": 100, "effective": 100 }, "mmlu_conceptual_physics_v0.2": { "original": 233, "effective": 233 }, "mmlu_college_biology_v0.2": { "original": 142, "effective": 142 }, "mmlu_high_school_chemistry_v0.2": { "original": 197, "effective": 197 }, "mmlu_electrical_engineering_v0.2": { "original": 144, "effective": 144 }, "mmlu_high_school_computer_science_v0.2": { "original": 100, "effective": 100 }, "mmlu_machine_learning_v0.2": { "original": 112, "effective": 112 }, "mmlu_college_chemistry_v0.2": { "original": 99, "effective": 99 }, "mmlu_high_school_statistics_v0.2": { "original": 216, "effective": 216 }, "mmlu_college_mathematics_v0.2": { "original": 100, "effective": 100 }, "mmlu_high_school_physics_v0.2": { "original": 147, "effective": 147 }, "mmlu_college_computer_science_v0.2": { "original": 99, "effective": 99 }, "mmlu_anatomy_v0.2": { "original": 131, "effective": 131 }, "mmlu_computer_security_v0.2": { "original": 100, "effective": 100 }, "mmlu_high_school_mathematics_v0.2": { "original": 270, "effective": 270 }, "mmlu_astronomy": { "original": 151, "effective": 151 }, "mmlu_college_physics_v0.2": { "original": 101, "effective": 101 }, "mmlu_high_school_biology_v0.2": { "original": 300, "effective": 300 }, "mmlu_elementary_mathematics_v0.2": { "original": 373, "effective": 373 }, "hellaswag_tr-v0.2": { "original": 8857, "effective": 8857 }, "gsm8k_tr-v0.2": { "original": 1317, "effective": 1317 }, "arc_tr-v0.2": { "original": 1172, "effective": 1172 } }, "config": { "model": "vllm", "model_args": "pretrained=ytu-ce-cosmos/Turkish-Llama-8b-Instruct-v0.1,tensor_parallel_size=1,dtype=auto,gpu_memory_utilization=0.7,data_parallel_size=4", "batch_size": 1, "batch_sizes": [], "device": "cuda", "use_cache": null, "limit": null, "bootstrap_iters": 100000, "gen_kwargs": null, "random_seed": 0, "numpy_seed": 1234, "torch_seed": 1234, "fewshot_seed": 1234 }, "git_hash": null, "date": 1720811970.2778933, "pretty_env_info": "PyTorch version: 2.1.2+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.4 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nClang version: Could not collect\nCMake version: version 3.28.3\nLibc version: glibc-2.35\n\nPython version: 3.10.3 (main, Mar 28 2022, 09:30:03) [GCC 7.5.0] (64-bit runtime)\nPython platform: Linux-6.2.0-1011-azure-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: 12.4.131\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: \nGPU 0: NVIDIA A100 80GB PCIe\nGPU 1: NVIDIA A100 80GB PCIe\nGPU 2: NVIDIA A100 80GB PCIe\nGPU 3: NVIDIA A100 80GB PCIe\n\nNvidia driver version: 550.54.15\ncuDNN version: Probably one of the following:\n/usr/lib/x86_64-linux-gnu/libcudnn.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_adv.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_train.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_train.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_engines_precompiled.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_engines_runtime_compiled.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_graph.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_heuristic.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_ops.so.9.2.1\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_infer.so.8.9.7\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_train.so.8.9.7\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 48 bits physical, 48 bits virtual\nByte Order: Little Endian\nCPU(s): 96\nOn-line CPU(s) list: 0-95\nVendor ID: AuthenticAMD\nModel name: AMD EPYC 7V13 64-Core Processor\nCPU family: 25\nModel: 1\nThread(s) per core: 1\nCore(s) per socket: 48\nSocket(s): 2\nStepping: 1\nBogoMIPS: 4890.89\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext perfctr_core invpcid_single vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 xsaves clzero xsaveerptr rdpru arat umip vaes vpclmulqdq rdpid fsrm\nHypervisor vendor: Microsoft\nVirtualization type: full\nL1d cache: 3 MiB (96 instances)\nL1i cache: 3 MiB (96 instances)\nL2 cache: 48 MiB (96 instances)\nL3 cache: 384 MiB (12 instances)\nNUMA node(s): 4\nNUMA node0 CPU(s): 0-23\nNUMA node1 CPU(s): 24-47\nNUMA node2 CPU(s): 48-71\nNUMA node3 CPU(s): 72-95\nVulnerability Gather data sampling: Not affected\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Not affected\nVulnerability Retbleed: Not affected\nVulnerability Spec store bypass: Vulnerable\nVulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization\nVulnerability Spectre v2: Mitigation; Retpolines, STIBP disabled, RSB filling, PBRSB-eIBRS Not affected\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort: Not affected\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.1.2\n[pip3] triton==2.1.0\n[conda] torch 2.1.2 pypi_0 pypi\n[conda] triton 2.1.0 pypi_0 pypi", "transformers_version": "4.40.0.dev0", "upper_git_hash": null, "task_hashes": {}, "model_source": "vllm", "model_name": "ytu-ce-cosmos/Turkish-Llama-8b-Instruct-v0.1", "model_name_sanitized": "ytu-ce-cosmos__Turkish-Llama-8b-Instruct-v0.1", "system_instruction": null, "system_instruction_sha": null, "fewshot_as_multiturn": false, "chat_template": null, "chat_template_sha": null, "start_time": 276770.585632375, "end_time": 280808.724871625, "total_evaluation_time_seconds": "4038.139239250042" }