config_general
dict
results
dict
versions
dict
config_tasks
dict
summary_tasks
dict
summary_general
dict
{ "lighteval_sha": "?", "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null, "job_id": "", "start_time": 632.338974387, "end_time": 30421.902729126, "total_evaluation_time_secondes": "29789.563754739", "model_name": "tanliboy/lambda-qwen2.5-14b-dpo-test", "model_sha": "96607eea3c67f14f73e576580610dba7530c5dd9", "model_dtype": "torch.bfloat16", "model_size": "27.51 GB", "config": null }
{ "community|acva:Algeria|0": { "acc_norm": 0.7384615384615385, "acc_norm_stderr": 0.031552288027427614 }, "community|acva:Ancient_Egypt|0": { "acc_norm": 0.1111111111111111, "acc_norm_stderr": 0.01773526409928044 }, "community|acva:Arab_Empire|0": { "acc_norm": 0.3660377358490566, "acc_norm_stderr": 0.029647813539365245 }, "community|acva:Arabic_Architecture|0": { "acc_norm": 0.6205128205128205, "acc_norm_stderr": 0.03483959266365359 }, "community|acva:Arabic_Art|0": { "acc_norm": 0.36923076923076925, "acc_norm_stderr": 0.03464841141863756 }, "community|acva:Arabic_Astronomy|0": { "acc_norm": 0.47692307692307695, "acc_norm_stderr": 0.0358596530894741 }, "community|acva:Arabic_Calligraphy|0": { "acc_norm": 0.5098039215686274, "acc_norm_stderr": 0.031366758767809214 }, "community|acva:Arabic_Ceremony|0": { "acc_norm": 0.5567567567567567, "acc_norm_stderr": 0.036622239513304726 }, "community|acva:Arabic_Clothing|0": { "acc_norm": 0.48205128205128206, "acc_norm_stderr": 0.0358747709877383 }, "community|acva:Arabic_Culture|0": { "acc_norm": 0.29743589743589743, "acc_norm_stderr": 0.03282001717838809 }, "community|acva:Arabic_Food|0": { "acc_norm": 0.5692307692307692, "acc_norm_stderr": 0.0355521325205876 }, "community|acva:Arabic_Funeral|0": { "acc_norm": 0.4, "acc_norm_stderr": 0.050529115263991134 }, "community|acva:Arabic_Geography|0": { "acc_norm": 0.7241379310344828, "acc_norm_stderr": 0.037245636197746325 }, "community|acva:Arabic_History|0": { "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03384487217112063 }, "community|acva:Arabic_Language_Origin|0": { "acc_norm": 0.631578947368421, "acc_norm_stderr": 0.049753325624911644 }, "community|acva:Arabic_Literature|0": { "acc_norm": 0.6551724137931034, "acc_norm_stderr": 0.03960933549451208 }, "community|acva:Arabic_Math|0": { "acc_norm": 0.4, "acc_norm_stderr": 0.035172622905632896 }, "community|acva:Arabic_Medicine|0": { "acc_norm": 0.7379310344827587, "acc_norm_stderr": 0.036646663372252565 }, "community|acva:Arabic_Music|0": { "acc_norm": 0.2446043165467626, "acc_norm_stderr": 0.03659146222520568 }, "community|acva:Arabic_Ornament|0": { "acc_norm": 0.5076923076923077, "acc_norm_stderr": 0.03589365940635213 }, "community|acva:Arabic_Philosophy|0": { "acc_norm": 0.6275862068965518, "acc_norm_stderr": 0.04028731532947558 }, "community|acva:Arabic_Physics_and_Chemistry|0": { "acc_norm": 0.6974358974358974, "acc_norm_stderr": 0.032980708700856204 }, "community|acva:Arabic_Wedding|0": { "acc_norm": 0.4153846153846154, "acc_norm_stderr": 0.03538013280575029 }, "community|acva:Bahrain|0": { "acc_norm": 0.5111111111111111, "acc_norm_stderr": 0.07535922203472523 }, "community|acva:Comoros|0": { "acc_norm": 0.6, "acc_norm_stderr": 0.07385489458759965 }, "community|acva:Egypt_modern|0": { "acc_norm": 0.5894736842105263, "acc_norm_stderr": 0.05073863564551209 }, "community|acva:InfluenceFromAncientEgypt|0": { "acc_norm": 0.6051282051282051, "acc_norm_stderr": 0.03509545602262038 }, "community|acva:InfluenceFromByzantium|0": { "acc_norm": 0.7103448275862069, "acc_norm_stderr": 0.03780019230438015 }, "community|acva:InfluenceFromChina|0": { "acc_norm": 0.2512820512820513, "acc_norm_stderr": 0.031141461571214363 }, "community|acva:InfluenceFromGreece|0": { "acc_norm": 0.6410256410256411, "acc_norm_stderr": 0.03444042881521375 }, "community|acva:InfluenceFromIslam|0": { "acc_norm": 0.3448275862068966, "acc_norm_stderr": 0.039609335494512087 }, "community|acva:InfluenceFromPersia|0": { "acc_norm": 0.6971428571428572, "acc_norm_stderr": 0.03483414676585985 }, "community|acva:InfluenceFromRome|0": { "acc_norm": 0.5846153846153846, "acc_norm_stderr": 0.03538013280575029 }, "community|acva:Iraq|0": { "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.04628210543937906 }, "community|acva:Islam_Education|0": { "acc_norm": 0.5435897435897435, "acc_norm_stderr": 0.03576123096991214 }, "community|acva:Islam_branches_and_schools|0": { "acc_norm": 0.5314285714285715, "acc_norm_stderr": 0.037829946546821834 }, "community|acva:Islamic_law_system|0": { "acc_norm": 0.5282051282051282, "acc_norm_stderr": 0.03584074674920833 }, "community|acva:Jordan|0": { "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.0752101433090355 }, "community|acva:Kuwait|0": { "acc_norm": 0.4, "acc_norm_stderr": 0.07385489458759964 }, "community|acva:Lebanon|0": { "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.0752101433090355 }, "community|acva:Libya|0": { "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.06666666666666668 }, "community|acva:Mauritania|0": { "acc_norm": 0.7111111111111111, "acc_norm_stderr": 0.06832943242540508 }, "community|acva:Mesopotamia_civilization|0": { "acc_norm": 0.6129032258064516, "acc_norm_stderr": 0.03925051588729523 }, "community|acva:Morocco|0": { "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.07446027270295806 }, "community|acva:Oman|0": { "acc_norm": 0.37777777777777777, "acc_norm_stderr": 0.07309112127323451 }, "community|acva:Palestine|0": { "acc_norm": 0.6352941176470588, "acc_norm_stderr": 0.052519327704200834 }, "community|acva:Qatar|0": { "acc_norm": 0.6888888888888889, "acc_norm_stderr": 0.06979205927323111 }, "community|acva:Saudi_Arabia|0": { "acc_norm": 0.8, "acc_norm_stderr": 0.028718326344709496 }, "community|acva:Somalia|0": { "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.0752101433090355 }, "community|acva:Sudan|0": { "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.07491109582924915 }, "community|acva:Syria|0": { "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.07309112127323451 }, "community|acva:Tunisia|0": { "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.07491109582924915 }, "community|acva:United_Arab_Emirates|0": { "acc_norm": 0.6470588235294118, "acc_norm_stderr": 0.05214148590752461 }, "community|acva:Yemen|0": { "acc_norm": 0.5, "acc_norm_stderr": 0.16666666666666666 }, "community|acva:communication|0": { "acc_norm": 0.4835164835164835, "acc_norm_stderr": 0.026228929294059487 }, "community|acva:computer_and_phone|0": { "acc_norm": 0.4542372881355932, "acc_norm_stderr": 0.029038197586234566 }, "community|acva:daily_life|0": { "acc_norm": 0.2878338278931751, "acc_norm_stderr": 0.024699715357282315 }, "community|acva:entertainment|0": { "acc_norm": 0.25084745762711863, "acc_norm_stderr": 0.02528228458238144 }, "community|alghafa:mcq_exams_test_ar|0": { "acc_norm": 0.4649910233393178, "acc_norm_stderr": 0.021152681155344338 }, "community|alghafa:meta_ar_dialects|0": { "acc_norm": 0.5169601482854495, "acc_norm_stderr": 0.006804003725617026 }, "community|alghafa:meta_ar_msa|0": { "acc_norm": 0.553072625698324, "acc_norm_stderr": 0.016628030039647614 }, "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.05799451149344531 }, "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.03622779862191887 }, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { "acc_norm": 0.5533333333333333, "acc_norm_stderr": 0.04072790343023465 }, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { "acc_norm": 0.793746091307067, "acc_norm_stderr": 0.004525429688507848 }, "community|alghafa:multiple_choice_rating_sentiment_task|0": { "acc_norm": 0.5567973311092577, "acc_norm_stderr": 0.0064163993571565445 }, "community|alghafa:multiple_choice_sentiment_task|0": { "acc_norm": 0.4191860465116279, "acc_norm_stderr": 0.011901014688814287 }, "community|arabic_exams|0": { "acc_norm": 0.553072625698324, "acc_norm_stderr": 0.021474702941383872 }, "community|arabic_mmlu:abstract_algebra|0": { "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "community|arabic_mmlu:anatomy|0": { "acc_norm": 0.45925925925925926, "acc_norm_stderr": 0.04304979692464242 }, "community|arabic_mmlu:astronomy|0": { "acc_norm": 0.75, "acc_norm_stderr": 0.03523807393012047 }, "community|arabic_mmlu:business_ethics|0": { "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "community|arabic_mmlu:clinical_knowledge|0": { "acc_norm": 0.6716981132075471, "acc_norm_stderr": 0.02890159361241178 }, "community|arabic_mmlu:college_biology|0": { "acc_norm": 0.6597222222222222, "acc_norm_stderr": 0.039621355734862175 }, "community|arabic_mmlu:college_chemistry|0": { "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "community|arabic_mmlu:college_computer_science|0": { "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "community|arabic_mmlu:college_mathematics|0": { "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "community|arabic_mmlu:college_medicine|0": { "acc_norm": 0.5202312138728323, "acc_norm_stderr": 0.03809342081273957 }, "community|arabic_mmlu:college_physics|0": { "acc_norm": 0.46078431372549017, "acc_norm_stderr": 0.049598599663841815 }, "community|arabic_mmlu:computer_security|0": { "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145633 }, "community|arabic_mmlu:conceptual_physics|0": { "acc_norm": 0.6723404255319149, "acc_norm_stderr": 0.030683020843231008 }, "community|arabic_mmlu:econometrics|0": { "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "community|arabic_mmlu:electrical_engineering|0": { "acc_norm": 0.5103448275862069, "acc_norm_stderr": 0.04165774775728762 }, "community|arabic_mmlu:elementary_mathematics|0": { "acc_norm": 0.6772486772486772, "acc_norm_stderr": 0.024078943243597016 }, "community|arabic_mmlu:formal_logic|0": { "acc_norm": 0.5158730158730159, "acc_norm_stderr": 0.044698818540726076 }, "community|arabic_mmlu:global_facts|0": { "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "community|arabic_mmlu:high_school_biology|0": { "acc_norm": 0.6903225806451613, "acc_norm_stderr": 0.026302774983517418 }, "community|arabic_mmlu:high_school_chemistry|0": { "acc_norm": 0.6059113300492611, "acc_norm_stderr": 0.034381579670365446 }, "community|arabic_mmlu:high_school_computer_science|0": { "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932261 }, "community|arabic_mmlu:high_school_european_history|0": { "acc_norm": 0.2909090909090909, "acc_norm_stderr": 0.03546563019624336 }, "community|arabic_mmlu:high_school_geography|0": { "acc_norm": 0.7424242424242424, "acc_norm_stderr": 0.031156269519646836 }, "community|arabic_mmlu:high_school_government_and_politics|0": { "acc_norm": 0.6787564766839378, "acc_norm_stderr": 0.033699508685490674 }, "community|arabic_mmlu:high_school_macroeconomics|0": { "acc_norm": 0.6974358974358974, "acc_norm_stderr": 0.023290888053772732 }, "community|arabic_mmlu:high_school_mathematics|0": { "acc_norm": 0.5148148148148148, "acc_norm_stderr": 0.03047215324932859 }, "community|arabic_mmlu:high_school_microeconomics|0": { "acc_norm": 0.7394957983193278, "acc_norm_stderr": 0.02851025151234193 }, "community|arabic_mmlu:high_school_physics|0": { "acc_norm": 0.4370860927152318, "acc_norm_stderr": 0.04050035722230636 }, "community|arabic_mmlu:high_school_psychology|0": { "acc_norm": 0.6770642201834862, "acc_norm_stderr": 0.020048115923415325 }, "community|arabic_mmlu:high_school_statistics|0": { "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.03324708911809117 }, "community|arabic_mmlu:high_school_us_history|0": { "acc_norm": 0.29901960784313725, "acc_norm_stderr": 0.03213325717373616 }, "community|arabic_mmlu:high_school_world_history|0": { "acc_norm": 0.3628691983122363, "acc_norm_stderr": 0.031299208255302136 }, "community|arabic_mmlu:human_aging|0": { "acc_norm": 0.5874439461883408, "acc_norm_stderr": 0.03304062175449297 }, "community|arabic_mmlu:human_sexuality|0": { "acc_norm": 0.6335877862595419, "acc_norm_stderr": 0.04225875451969638 }, "community|arabic_mmlu:international_law|0": { "acc_norm": 0.8264462809917356, "acc_norm_stderr": 0.0345727283691767 }, "community|arabic_mmlu:jurisprudence|0": { "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.044531975073749834 }, "community|arabic_mmlu:logical_fallacies|0": { "acc_norm": 0.5950920245398773, "acc_norm_stderr": 0.03856672163548914 }, "community|arabic_mmlu:machine_learning|0": { "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "community|arabic_mmlu:management|0": { "acc_norm": 0.6699029126213593, "acc_norm_stderr": 0.0465614711001235 }, "community|arabic_mmlu:marketing|0": { "acc_norm": 0.811965811965812, "acc_norm_stderr": 0.025598193686652265 }, "community|arabic_mmlu:medical_genetics|0": { "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "community|arabic_mmlu:miscellaneous|0": { "acc_norm": 0.6934865900383141, "acc_norm_stderr": 0.016486952893041508 }, "community|arabic_mmlu:moral_disputes|0": { "acc_norm": 0.6358381502890174, "acc_norm_stderr": 0.025906632631016113 }, "community|arabic_mmlu:moral_scenarios|0": { "acc_norm": 0.48379888268156424, "acc_norm_stderr": 0.016713720729501023 }, "community|arabic_mmlu:nutrition|0": { "acc_norm": 0.6470588235294118, "acc_norm_stderr": 0.02736359328468496 }, "community|arabic_mmlu:philosophy|0": { "acc_norm": 0.6237942122186495, "acc_norm_stderr": 0.02751392568354943 }, "community|arabic_mmlu:prehistory|0": { "acc_norm": 0.6327160493827161, "acc_norm_stderr": 0.026822801759507887 }, "community|arabic_mmlu:professional_accounting|0": { "acc_norm": 0.4326241134751773, "acc_norm_stderr": 0.02955545423677887 }, "community|arabic_mmlu:professional_law|0": { "acc_norm": 0.3546284224250326, "acc_norm_stderr": 0.012218576439090165 }, "community|arabic_mmlu:professional_medicine|0": { "acc_norm": 0.33455882352941174, "acc_norm_stderr": 0.028661996202335307 }, "community|arabic_mmlu:professional_psychology|0": { "acc_norm": 0.5751633986928104, "acc_norm_stderr": 0.01999797303545834 }, "community|arabic_mmlu:public_relations|0": { "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.044612721759105085 }, "community|arabic_mmlu:security_studies|0": { "acc_norm": 0.6979591836734694, "acc_norm_stderr": 0.0293936093198798 }, "community|arabic_mmlu:sociology|0": { "acc_norm": 0.7014925373134329, "acc_norm_stderr": 0.03235743789355043 }, "community|arabic_mmlu:us_foreign_policy|0": { "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "community|arabic_mmlu:virology|0": { "acc_norm": 0.4578313253012048, "acc_norm_stderr": 0.0387862677100236 }, "community|arabic_mmlu:world_religions|0": { "acc_norm": 0.6783625730994152, "acc_norm_stderr": 0.03582529442573122 }, "community|arc_challenge_okapi_ar|0": { "acc_norm": 0.621551724137931, "acc_norm_stderr": 0.014246237665660657 }, "community|arc_easy_ar|0": { "acc_norm": 0.6053299492385786, "acc_norm_stderr": 0.010054982191001633 }, "community|boolq_ar|0": { "acc_norm": 0.7773006134969325, "acc_norm_stderr": 0.007288058253930421 }, "community|copa_ext_ar|0": { "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.051674686932038624 }, "community|hellaswag_okapi_ar|0": { "acc_norm": 0.4711590884309236, "acc_norm_stderr": 0.005212686916328874 }, "community|openbook_qa_ext_ar|0": { "acc_norm": 0.5616161616161616, "acc_norm_stderr": 0.022324595132484144 }, "community|piqa_ar|0": { "acc_norm": 0.7490452809601745, "acc_norm_stderr": 0.010129522765571167 }, "community|race_ar|0": { "acc_norm": 0.5532562385879489, "acc_norm_stderr": 0.007082018526851182 }, "community|sciq_ar|0": { "acc_norm": 0.6763819095477387, "acc_norm_stderr": 0.01483949750163061 }, "community|toxigen_ar|0": { "acc_norm": 0.8160427807486631, "acc_norm_stderr": 0.012677728399148798 }, "lighteval|xstory_cloze:ar|0": { "acc": 0.7054930509596293, "acc_stderr": 0.011730206832052016 }, "community|acva:_average|0": { "acc_norm": 0.5283962858817328, "acc_norm_stderr": 0.047305264934042635 }, "community|alghafa:_average|0": { "acc_norm": 0.5694170295834494, "acc_norm_stderr": 0.02248641913340961 }, "community|arabic_mmlu:_average|0": { "acc_norm": 0.5866146754947349, "acc_norm_stderr": 0.03558128501066703 }, "all": { "acc_norm": 0.5644788284012293, "acc_norm_stderr": 0.03815719334229043, "acc": 0.7054930509596293, "acc_stderr": 0.011730206832052016 } }
{ "community|acva:Algeria|0": 0, "community|acva:Ancient_Egypt|0": 0, "community|acva:Arab_Empire|0": 0, "community|acva:Arabic_Architecture|0": 0, "community|acva:Arabic_Art|0": 0, "community|acva:Arabic_Astronomy|0": 0, "community|acva:Arabic_Calligraphy|0": 0, "community|acva:Arabic_Ceremony|0": 0, "community|acva:Arabic_Clothing|0": 0, "community|acva:Arabic_Culture|0": 0, "community|acva:Arabic_Food|0": 0, "community|acva:Arabic_Funeral|0": 0, "community|acva:Arabic_Geography|0": 0, "community|acva:Arabic_History|0": 0, "community|acva:Arabic_Language_Origin|0": 0, "community|acva:Arabic_Literature|0": 0, "community|acva:Arabic_Math|0": 0, "community|acva:Arabic_Medicine|0": 0, "community|acva:Arabic_Music|0": 0, "community|acva:Arabic_Ornament|0": 0, "community|acva:Arabic_Philosophy|0": 0, "community|acva:Arabic_Physics_and_Chemistry|0": 0, "community|acva:Arabic_Wedding|0": 0, "community|acva:Bahrain|0": 0, "community|acva:Comoros|0": 0, "community|acva:Egypt_modern|0": 0, "community|acva:InfluenceFromAncientEgypt|0": 0, "community|acva:InfluenceFromByzantium|0": 0, "community|acva:InfluenceFromChina|0": 0, "community|acva:InfluenceFromGreece|0": 0, "community|acva:InfluenceFromIslam|0": 0, "community|acva:InfluenceFromPersia|0": 0, "community|acva:InfluenceFromRome|0": 0, "community|acva:Iraq|0": 0, "community|acva:Islam_Education|0": 0, "community|acva:Islam_branches_and_schools|0": 0, "community|acva:Islamic_law_system|0": 0, "community|acva:Jordan|0": 0, "community|acva:Kuwait|0": 0, "community|acva:Lebanon|0": 0, "community|acva:Libya|0": 0, "community|acva:Mauritania|0": 0, "community|acva:Mesopotamia_civilization|0": 0, "community|acva:Morocco|0": 0, "community|acva:Oman|0": 0, "community|acva:Palestine|0": 0, "community|acva:Qatar|0": 0, "community|acva:Saudi_Arabia|0": 0, "community|acva:Somalia|0": 0, "community|acva:Sudan|0": 0, "community|acva:Syria|0": 0, "community|acva:Tunisia|0": 0, "community|acva:United_Arab_Emirates|0": 0, "community|acva:Yemen|0": 0, "community|acva:communication|0": 0, "community|acva:computer_and_phone|0": 0, "community|acva:daily_life|0": 0, "community|acva:entertainment|0": 0, "community|alghafa:mcq_exams_test_ar|0": 0, "community|alghafa:meta_ar_dialects|0": 0, "community|alghafa:meta_ar_msa|0": 0, "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, "community|alghafa:multiple_choice_rating_sentiment_task|0": 0, "community|alghafa:multiple_choice_sentiment_task|0": 0, "community|arabic_exams|0": 0, "community|arabic_mmlu:abstract_algebra|0": 0, "community|arabic_mmlu:anatomy|0": 0, "community|arabic_mmlu:astronomy|0": 0, "community|arabic_mmlu:business_ethics|0": 0, "community|arabic_mmlu:clinical_knowledge|0": 0, "community|arabic_mmlu:college_biology|0": 0, "community|arabic_mmlu:college_chemistry|0": 0, "community|arabic_mmlu:college_computer_science|0": 0, "community|arabic_mmlu:college_mathematics|0": 0, "community|arabic_mmlu:college_medicine|0": 0, "community|arabic_mmlu:college_physics|0": 0, "community|arabic_mmlu:computer_security|0": 0, "community|arabic_mmlu:conceptual_physics|0": 0, "community|arabic_mmlu:econometrics|0": 0, "community|arabic_mmlu:electrical_engineering|0": 0, "community|arabic_mmlu:elementary_mathematics|0": 0, "community|arabic_mmlu:formal_logic|0": 0, "community|arabic_mmlu:global_facts|0": 0, "community|arabic_mmlu:high_school_biology|0": 0, "community|arabic_mmlu:high_school_chemistry|0": 0, "community|arabic_mmlu:high_school_computer_science|0": 0, "community|arabic_mmlu:high_school_european_history|0": 0, "community|arabic_mmlu:high_school_geography|0": 0, "community|arabic_mmlu:high_school_government_and_politics|0": 0, "community|arabic_mmlu:high_school_macroeconomics|0": 0, "community|arabic_mmlu:high_school_mathematics|0": 0, "community|arabic_mmlu:high_school_microeconomics|0": 0, "community|arabic_mmlu:high_school_physics|0": 0, "community|arabic_mmlu:high_school_psychology|0": 0, "community|arabic_mmlu:high_school_statistics|0": 0, "community|arabic_mmlu:high_school_us_history|0": 0, "community|arabic_mmlu:high_school_world_history|0": 0, "community|arabic_mmlu:human_aging|0": 0, "community|arabic_mmlu:human_sexuality|0": 0, "community|arabic_mmlu:international_law|0": 0, "community|arabic_mmlu:jurisprudence|0": 0, "community|arabic_mmlu:logical_fallacies|0": 0, "community|arabic_mmlu:machine_learning|0": 0, "community|arabic_mmlu:management|0": 0, "community|arabic_mmlu:marketing|0": 0, "community|arabic_mmlu:medical_genetics|0": 0, "community|arabic_mmlu:miscellaneous|0": 0, "community|arabic_mmlu:moral_disputes|0": 0, "community|arabic_mmlu:moral_scenarios|0": 0, "community|arabic_mmlu:nutrition|0": 0, "community|arabic_mmlu:philosophy|0": 0, "community|arabic_mmlu:prehistory|0": 0, "community|arabic_mmlu:professional_accounting|0": 0, "community|arabic_mmlu:professional_law|0": 0, "community|arabic_mmlu:professional_medicine|0": 0, "community|arabic_mmlu:professional_psychology|0": 0, "community|arabic_mmlu:public_relations|0": 0, "community|arabic_mmlu:security_studies|0": 0, "community|arabic_mmlu:sociology|0": 0, "community|arabic_mmlu:us_foreign_policy|0": 0, "community|arabic_mmlu:virology|0": 0, "community|arabic_mmlu:world_religions|0": 0, "community|arc_challenge_okapi_ar|0": 0, "community|arc_easy_ar|0": 0, "community|boolq_ar|0": 0, "community|copa_ext_ar|0": 0, "community|hellaswag_okapi_ar|0": 0, "community|openbook_qa_ext_ar|0": 0, "community|piqa_ar|0": 0, "community|race_ar|0": 0, "community|sciq_ar|0": 0, "community|toxigen_ar|0": 0, "lighteval|xstory_cloze:ar|0": 0 }
{ "community|acva:Algeria": { "name": "acva:Algeria", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Algeria", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Ancient_Egypt": { "name": "acva:Ancient_Egypt", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Ancient_Egypt", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 315, "effective_num_docs": 315, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arab_Empire": { "name": "acva:Arab_Empire", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arab_Empire", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 265, "effective_num_docs": 265, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Architecture": { "name": "acva:Arabic_Architecture", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Architecture", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Art": { "name": "acva:Arabic_Art", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Art", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Astronomy": { "name": "acva:Arabic_Astronomy", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Astronomy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Calligraphy": { "name": "acva:Arabic_Calligraphy", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Calligraphy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 255, "effective_num_docs": 255, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Ceremony": { "name": "acva:Arabic_Ceremony", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Ceremony", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 185, "effective_num_docs": 185, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Clothing": { "name": "acva:Arabic_Clothing", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Clothing", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Culture": { "name": "acva:Arabic_Culture", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Culture", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Food": { "name": "acva:Arabic_Food", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Food", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Funeral": { "name": "acva:Arabic_Funeral", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Funeral", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 95, "effective_num_docs": 95, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Geography": { "name": "acva:Arabic_Geography", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Geography", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_History": { "name": "acva:Arabic_History", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_History", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Language_Origin": { "name": "acva:Arabic_Language_Origin", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Language_Origin", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 95, "effective_num_docs": 95, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Literature": { "name": "acva:Arabic_Literature", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Literature", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Math": { "name": "acva:Arabic_Math", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Math", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Medicine": { "name": "acva:Arabic_Medicine", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Medicine", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Music": { "name": "acva:Arabic_Music", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Music", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 139, "effective_num_docs": 139, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Ornament": { "name": "acva:Arabic_Ornament", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Ornament", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Philosophy": { "name": "acva:Arabic_Philosophy", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Philosophy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Physics_and_Chemistry": { "name": "acva:Arabic_Physics_and_Chemistry", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Physics_and_Chemistry", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Wedding": { "name": "acva:Arabic_Wedding", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Wedding", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Bahrain": { "name": "acva:Bahrain", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Bahrain", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Comoros": { "name": "acva:Comoros", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Comoros", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Egypt_modern": { "name": "acva:Egypt_modern", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Egypt_modern", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 95, "effective_num_docs": 95, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromAncientEgypt": { "name": "acva:InfluenceFromAncientEgypt", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromAncientEgypt", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromByzantium": { "name": "acva:InfluenceFromByzantium", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromByzantium", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromChina": { "name": "acva:InfluenceFromChina", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromChina", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromGreece": { "name": "acva:InfluenceFromGreece", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromGreece", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromIslam": { "name": "acva:InfluenceFromIslam", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromIslam", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromPersia": { "name": "acva:InfluenceFromPersia", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromPersia", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 175, "effective_num_docs": 175, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromRome": { "name": "acva:InfluenceFromRome", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromRome", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Iraq": { "name": "acva:Iraq", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Iraq", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 85, "effective_num_docs": 85, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Islam_Education": { "name": "acva:Islam_Education", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Islam_Education", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Islam_branches_and_schools": { "name": "acva:Islam_branches_and_schools", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Islam_branches_and_schools", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 175, "effective_num_docs": 175, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Islamic_law_system": { "name": "acva:Islamic_law_system", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Islamic_law_system", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Jordan": { "name": "acva:Jordan", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Jordan", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Kuwait": { "name": "acva:Kuwait", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Kuwait", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Lebanon": { "name": "acva:Lebanon", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Lebanon", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Libya": { "name": "acva:Libya", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Libya", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Mauritania": { "name": "acva:Mauritania", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Mauritania", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Mesopotamia_civilization": { "name": "acva:Mesopotamia_civilization", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Mesopotamia_civilization", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 155, "effective_num_docs": 155, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Morocco": { "name": "acva:Morocco", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Morocco", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Oman": { "name": "acva:Oman", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Oman", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Palestine": { "name": "acva:Palestine", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Palestine", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 85, "effective_num_docs": 85, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Qatar": { "name": "acva:Qatar", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Qatar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Saudi_Arabia": { "name": "acva:Saudi_Arabia", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Saudi_Arabia", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Somalia": { "name": "acva:Somalia", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Somalia", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Sudan": { "name": "acva:Sudan", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Sudan", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Syria": { "name": "acva:Syria", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Syria", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Tunisia": { "name": "acva:Tunisia", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Tunisia", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:United_Arab_Emirates": { "name": "acva:United_Arab_Emirates", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "United_Arab_Emirates", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 85, "effective_num_docs": 85, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Yemen": { "name": "acva:Yemen", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Yemen", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 10, "effective_num_docs": 10, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:communication": { "name": "acva:communication", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "communication", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 364, "effective_num_docs": 364, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:computer_and_phone": { "name": "acva:computer_and_phone", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "computer_and_phone", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 295, "effective_num_docs": 295, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:daily_life": { "name": "acva:daily_life", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "daily_life", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 337, "effective_num_docs": 337, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:entertainment": { "name": "acva:entertainment", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "entertainment", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 295, "effective_num_docs": 295, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:mcq_exams_test_ar": { "name": "alghafa:mcq_exams_test_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "mcq_exams_test_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 557, "effective_num_docs": 557, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:meta_ar_dialects": { "name": "alghafa:meta_ar_dialects", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "meta_ar_dialects", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 5395, "effective_num_docs": 5395, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:meta_ar_msa": { "name": "alghafa:meta_ar_msa", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "meta_ar_msa", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 895, "effective_num_docs": 895, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_facts_truefalse_balanced_task": { "name": "alghafa:multiple_choice_facts_truefalse_balanced_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_facts_truefalse_balanced_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 75, "effective_num_docs": 75, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_grounded_statement_soqal_task": { "name": "alghafa:multiple_choice_grounded_statement_soqal_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_grounded_statement_soqal_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 150, "effective_num_docs": 150, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { "name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 150, "effective_num_docs": 150, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { "name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 7995, "effective_num_docs": 7995, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_rating_sentiment_task": { "name": "alghafa:multiple_choice_rating_sentiment_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_rating_sentiment_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 5995, "effective_num_docs": 5995, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_sentiment_task": { "name": "alghafa:multiple_choice_sentiment_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_sentiment_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 1720, "effective_num_docs": 1720, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_exams": { "name": "arabic_exams", "prompt_function": "arabic_exams", "hf_repo": "OALL/Arabic_EXAMS", "hf_subset": "default", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 537, "effective_num_docs": 537, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:abstract_algebra": { "name": "arabic_mmlu:abstract_algebra", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "abstract_algebra", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:anatomy": { "name": "arabic_mmlu:anatomy", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "anatomy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 135, "effective_num_docs": 135, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:astronomy": { "name": "arabic_mmlu:astronomy", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "astronomy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 152, "effective_num_docs": 152, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:business_ethics": { "name": "arabic_mmlu:business_ethics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "business_ethics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:clinical_knowledge": { "name": "arabic_mmlu:clinical_knowledge", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "clinical_knowledge", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 265, "effective_num_docs": 265, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_biology": { "name": "arabic_mmlu:college_biology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_biology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 144, "effective_num_docs": 144, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_chemistry": { "name": "arabic_mmlu:college_chemistry", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_chemistry", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_computer_science": { "name": "arabic_mmlu:college_computer_science", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_computer_science", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_mathematics": { "name": "arabic_mmlu:college_mathematics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_mathematics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_medicine": { "name": "arabic_mmlu:college_medicine", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_medicine", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 173, "effective_num_docs": 173, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_physics": { "name": "arabic_mmlu:college_physics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_physics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 102, "effective_num_docs": 102, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:computer_security": { "name": "arabic_mmlu:computer_security", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "computer_security", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:conceptual_physics": { "name": "arabic_mmlu:conceptual_physics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "conceptual_physics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 235, "effective_num_docs": 235, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:econometrics": { "name": "arabic_mmlu:econometrics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "econometrics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 114, "effective_num_docs": 114, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:electrical_engineering": { "name": "arabic_mmlu:electrical_engineering", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "electrical_engineering", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:elementary_mathematics": { "name": "arabic_mmlu:elementary_mathematics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "elementary_mathematics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 378, "effective_num_docs": 378, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:formal_logic": { "name": "arabic_mmlu:formal_logic", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "formal_logic", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 126, "effective_num_docs": 126, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:global_facts": { "name": "arabic_mmlu:global_facts", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "global_facts", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_biology": { "name": "arabic_mmlu:high_school_biology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_biology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 310, "effective_num_docs": 310, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_chemistry": { "name": "arabic_mmlu:high_school_chemistry", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_chemistry", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 203, "effective_num_docs": 203, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_computer_science": { "name": "arabic_mmlu:high_school_computer_science", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_computer_science", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_european_history": { "name": "arabic_mmlu:high_school_european_history", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_european_history", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 165, "effective_num_docs": 165, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_geography": { "name": "arabic_mmlu:high_school_geography", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_geography", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 198, "effective_num_docs": 198, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_government_and_politics": { "name": "arabic_mmlu:high_school_government_and_politics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_government_and_politics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 193, "effective_num_docs": 193, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_macroeconomics": { "name": "arabic_mmlu:high_school_macroeconomics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_macroeconomics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 390, "effective_num_docs": 390, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_mathematics": { "name": "arabic_mmlu:high_school_mathematics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_mathematics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 270, "effective_num_docs": 270, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_microeconomics": { "name": "arabic_mmlu:high_school_microeconomics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_microeconomics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 238, "effective_num_docs": 238, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_physics": { "name": "arabic_mmlu:high_school_physics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_physics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 151, "effective_num_docs": 151, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_psychology": { "name": "arabic_mmlu:high_school_psychology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_psychology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 545, "effective_num_docs": 545, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_statistics": { "name": "arabic_mmlu:high_school_statistics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_statistics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 216, "effective_num_docs": 216, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_us_history": { "name": "arabic_mmlu:high_school_us_history", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_us_history", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 204, "effective_num_docs": 204, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_world_history": { "name": "arabic_mmlu:high_school_world_history", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_world_history", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 237, "effective_num_docs": 237, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:human_aging": { "name": "arabic_mmlu:human_aging", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "human_aging", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 223, "effective_num_docs": 223, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:human_sexuality": { "name": "arabic_mmlu:human_sexuality", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "human_sexuality", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 131, "effective_num_docs": 131, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:international_law": { "name": "arabic_mmlu:international_law", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "international_law", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 121, "effective_num_docs": 121, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:jurisprudence": { "name": "arabic_mmlu:jurisprudence", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "jurisprudence", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 108, "effective_num_docs": 108, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:logical_fallacies": { "name": "arabic_mmlu:logical_fallacies", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "logical_fallacies", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 163, "effective_num_docs": 163, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:machine_learning": { "name": "arabic_mmlu:machine_learning", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "machine_learning", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 112, "effective_num_docs": 112, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:management": { "name": "arabic_mmlu:management", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "management", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 103, "effective_num_docs": 103, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:marketing": { "name": "arabic_mmlu:marketing", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "marketing", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 234, "effective_num_docs": 234, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:medical_genetics": { "name": "arabic_mmlu:medical_genetics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "medical_genetics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:miscellaneous": { "name": "arabic_mmlu:miscellaneous", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "miscellaneous", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 783, "effective_num_docs": 783, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:moral_disputes": { "name": "arabic_mmlu:moral_disputes", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "moral_disputes", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 346, "effective_num_docs": 346, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:moral_scenarios": { "name": "arabic_mmlu:moral_scenarios", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "moral_scenarios", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 895, "effective_num_docs": 895, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:nutrition": { "name": "arabic_mmlu:nutrition", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "nutrition", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 306, "effective_num_docs": 306, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:philosophy": { "name": "arabic_mmlu:philosophy", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "philosophy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 311, "effective_num_docs": 311, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:prehistory": { "name": "arabic_mmlu:prehistory", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "prehistory", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 324, "effective_num_docs": 324, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:professional_accounting": { "name": "arabic_mmlu:professional_accounting", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "professional_accounting", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 282, "effective_num_docs": 282, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:professional_law": { "name": "arabic_mmlu:professional_law", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "professional_law", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 1534, "effective_num_docs": 1534, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:professional_medicine": { "name": "arabic_mmlu:professional_medicine", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "professional_medicine", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 272, "effective_num_docs": 272, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:professional_psychology": { "name": "arabic_mmlu:professional_psychology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "professional_psychology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 612, "effective_num_docs": 612, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:public_relations": { "name": "arabic_mmlu:public_relations", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "public_relations", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 110, "effective_num_docs": 110, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:security_studies": { "name": "arabic_mmlu:security_studies", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "security_studies", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 245, "effective_num_docs": 245, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:sociology": { "name": "arabic_mmlu:sociology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "sociology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 201, "effective_num_docs": 201, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:us_foreign_policy": { "name": "arabic_mmlu:us_foreign_policy", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "us_foreign_policy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:virology": { "name": "arabic_mmlu:virology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "virology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 166, "effective_num_docs": 166, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:world_religions": { "name": "arabic_mmlu:world_religions", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "world_religions", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 171, "effective_num_docs": 171, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arc_challenge_okapi_ar": { "name": "arc_challenge_okapi_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "arc_challenge_okapi_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 1160, "effective_num_docs": 1160, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arc_easy_ar": { "name": "arc_easy_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "arc_easy_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 2364, "effective_num_docs": 2364, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|boolq_ar": { "name": "boolq_ar", "prompt_function": "boolq_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "boolq_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 3260, "effective_num_docs": 3260, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|copa_ext_ar": { "name": "copa_ext_ar", "prompt_function": "copa_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "copa_ext_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 90, "effective_num_docs": 90, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|hellaswag_okapi_ar": { "name": "hellaswag_okapi_ar", "prompt_function": "hellaswag_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "hellaswag_okapi_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 9171, "effective_num_docs": 9171, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|openbook_qa_ext_ar": { "name": "openbook_qa_ext_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "openbook_qa_ext_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 495, "effective_num_docs": 495, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|piqa_ar": { "name": "piqa_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "piqa_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 1833, "effective_num_docs": 1833, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|race_ar": { "name": "race_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "race_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 4929, "effective_num_docs": 4929, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|sciq_ar": { "name": "sciq_ar", "prompt_function": "sciq_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "sciq_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 995, "effective_num_docs": 995, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|toxigen_ar": { "name": "toxigen_ar", "prompt_function": "toxigen_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "toxigen_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 935, "effective_num_docs": 935, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "lighteval|xstory_cloze:ar": { "name": "xstory_cloze:ar", "prompt_function": "storycloze", "hf_repo": "juletxara/xstory_cloze", "hf_subset": "ar", "metric": [ "loglikelihood_acc" ], "hf_avail_splits": [ "training", "eval" ], "evaluation_splits": [ "eval" ], "few_shots_split": null, "few_shots_select": null, "generation_size": -1, "stop_sequence": [ "\n" ], "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "lighteval" ], "original_num_docs": 1511, "effective_num_docs": 1511, "trust_dataset": true, "must_remove_duplicate_docs": null, "version": 0 } }
{ "community|acva:Algeria|0": { "hashes": { "hash_examples": "da5a3003cd46f6f9", "hash_full_prompts": "da5a3003cd46f6f9", "hash_input_tokens": "e5fc3dac42dfee72", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Ancient_Egypt|0": { "hashes": { "hash_examples": "52d6f767fede195b", "hash_full_prompts": "52d6f767fede195b", "hash_input_tokens": "f081df2e883d3501", "hash_cont_tokens": "02a204d955f29ed4" }, "truncated": 0, "non_truncated": 315, "padded": 630, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arab_Empire|0": { "hashes": { "hash_examples": "8dacff6a79804a75", "hash_full_prompts": "8dacff6a79804a75", "hash_input_tokens": "e49cd4090dad143b", "hash_cont_tokens": "0be121aeaa740bc8" }, "truncated": 0, "non_truncated": 265, "padded": 530, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Architecture|0": { "hashes": { "hash_examples": "df286cd862d9f6bb", "hash_full_prompts": "df286cd862d9f6bb", "hash_input_tokens": "3b7211c0a255b1c0", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Art|0": { "hashes": { "hash_examples": "112883d764118a49", "hash_full_prompts": "112883d764118a49", "hash_input_tokens": "917ed5049127fed5", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Astronomy|0": { "hashes": { "hash_examples": "20dcdf2454bf8671", "hash_full_prompts": "20dcdf2454bf8671", "hash_input_tokens": "97f6f79b2dac0efc", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Calligraphy|0": { "hashes": { "hash_examples": "3a9f9d1ebe868a15", "hash_full_prompts": "3a9f9d1ebe868a15", "hash_input_tokens": "2fad42b484dc59aa", "hash_cont_tokens": "3a362560f15a8d81" }, "truncated": 0, "non_truncated": 255, "padded": 510, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Ceremony|0": { "hashes": { "hash_examples": "c927630f8d2f44da", "hash_full_prompts": "c927630f8d2f44da", "hash_input_tokens": "9798eb01d07c3556", "hash_cont_tokens": "219de3ed588d7bf7" }, "truncated": 0, "non_truncated": 185, "padded": 370, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Clothing|0": { "hashes": { "hash_examples": "6ad0740c2ac6ac92", "hash_full_prompts": "6ad0740c2ac6ac92", "hash_input_tokens": "18fedb803dfd2f04", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Culture|0": { "hashes": { "hash_examples": "2177bd857ad872ae", "hash_full_prompts": "2177bd857ad872ae", "hash_input_tokens": "a6a59b42464c5837", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Food|0": { "hashes": { "hash_examples": "a6ada65b71d7c9c5", "hash_full_prompts": "a6ada65b71d7c9c5", "hash_input_tokens": "614343393975f735", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Funeral|0": { "hashes": { "hash_examples": "fcee39dc29eaae91", "hash_full_prompts": "fcee39dc29eaae91", "hash_input_tokens": "104ed3483963acca", "hash_cont_tokens": "c36c7371f1293511" }, "truncated": 0, "non_truncated": 95, "padded": 190, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Geography|0": { "hashes": { "hash_examples": "d36eda7c89231c02", "hash_full_prompts": "d36eda7c89231c02", "hash_input_tokens": "777d2a562f29e686", "hash_cont_tokens": "625e58e7a01dba13" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_History|0": { "hashes": { "hash_examples": "6354ac0d6db6a5fc", "hash_full_prompts": "6354ac0d6db6a5fc", "hash_input_tokens": "9c2057e05dabdf89", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Language_Origin|0": { "hashes": { "hash_examples": "ddc967c8aca34402", "hash_full_prompts": "ddc967c8aca34402", "hash_input_tokens": "a96a67eb4d720f13", "hash_cont_tokens": "c36c7371f1293511" }, "truncated": 0, "non_truncated": 95, "padded": 190, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Literature|0": { "hashes": { "hash_examples": "4305379fd46be5d8", "hash_full_prompts": "4305379fd46be5d8", "hash_input_tokens": "f26e61f50f0bd444", "hash_cont_tokens": "625e58e7a01dba13" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Math|0": { "hashes": { "hash_examples": "dec621144f4d28be", "hash_full_prompts": "dec621144f4d28be", "hash_input_tokens": "552e82b34d3ef11d", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Medicine|0": { "hashes": { "hash_examples": "2b344cdae9495ff2", "hash_full_prompts": "2b344cdae9495ff2", "hash_input_tokens": "93ce0692f579e950", "hash_cont_tokens": "625e58e7a01dba13" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Music|0": { "hashes": { "hash_examples": "0c54624d881944ce", "hash_full_prompts": "0c54624d881944ce", "hash_input_tokens": "ec71ed53e3bc6fab", "hash_cont_tokens": "4ac287553cdf8021" }, "truncated": 0, "non_truncated": 139, "padded": 278, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Ornament|0": { "hashes": { "hash_examples": "251a4a84289d8bc1", "hash_full_prompts": "251a4a84289d8bc1", "hash_input_tokens": "c77ed1df992f23ce", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Philosophy|0": { "hashes": { "hash_examples": "3f86fb9c94c13d22", "hash_full_prompts": "3f86fb9c94c13d22", "hash_input_tokens": "8f7be7cf36d086ec", "hash_cont_tokens": "625e58e7a01dba13" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Physics_and_Chemistry|0": { "hashes": { "hash_examples": "8fec65af3695b62a", "hash_full_prompts": "8fec65af3695b62a", "hash_input_tokens": "4598dfecde892b4a", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Wedding|0": { "hashes": { "hash_examples": "9cc3477184d7a4b8", "hash_full_prompts": "9cc3477184d7a4b8", "hash_input_tokens": "6a2e782fd3e14f2d", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Bahrain|0": { "hashes": { "hash_examples": "c92e803a0fa8b9e2", "hash_full_prompts": "c92e803a0fa8b9e2", "hash_input_tokens": "8cefee797012c64a", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Comoros|0": { "hashes": { "hash_examples": "06e5d4bba8e54cae", "hash_full_prompts": "06e5d4bba8e54cae", "hash_input_tokens": "88b4ecbe7a031dab", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Egypt_modern|0": { "hashes": { "hash_examples": "c6ec369164f93446", "hash_full_prompts": "c6ec369164f93446", "hash_input_tokens": "019a315de60f65ca", "hash_cont_tokens": "c36c7371f1293511" }, "truncated": 0, "non_truncated": 95, "padded": 190, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromAncientEgypt|0": { "hashes": { "hash_examples": "b9d56d74818b9bd4", "hash_full_prompts": "b9d56d74818b9bd4", "hash_input_tokens": "ff0c4477330411dc", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromByzantium|0": { "hashes": { "hash_examples": "5316c9624e7e59b8", "hash_full_prompts": "5316c9624e7e59b8", "hash_input_tokens": "1b910a4656773c97", "hash_cont_tokens": "625e58e7a01dba13" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromChina|0": { "hashes": { "hash_examples": "87894bce95a56411", "hash_full_prompts": "87894bce95a56411", "hash_input_tokens": "9b007c9c10fc8c4f", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromGreece|0": { "hashes": { "hash_examples": "0baa78a27e469312", "hash_full_prompts": "0baa78a27e469312", "hash_input_tokens": "cf3925b9e583eef1", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromIslam|0": { "hashes": { "hash_examples": "0c2532cde6541ff2", "hash_full_prompts": "0c2532cde6541ff2", "hash_input_tokens": "340ae221253932a4", "hash_cont_tokens": "625e58e7a01dba13" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromPersia|0": { "hashes": { "hash_examples": "efcd8112dc53c6e5", "hash_full_prompts": "efcd8112dc53c6e5", "hash_input_tokens": "90cbe499e5227cd8", "hash_cont_tokens": "0060d8f35205c778" }, "truncated": 0, "non_truncated": 175, "padded": 350, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromRome|0": { "hashes": { "hash_examples": "9db61480e2e85fd3", "hash_full_prompts": "9db61480e2e85fd3", "hash_input_tokens": "8e18c6eeaa6a5a68", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Iraq|0": { "hashes": { "hash_examples": "96dac3dfa8d2f41f", "hash_full_prompts": "96dac3dfa8d2f41f", "hash_input_tokens": "c8dacea3e8584b51", "hash_cont_tokens": "174ee430e070c2fa" }, "truncated": 0, "non_truncated": 85, "padded": 170, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Islam_Education|0": { "hashes": { "hash_examples": "0d80355f6a4cb51b", "hash_full_prompts": "0d80355f6a4cb51b", "hash_input_tokens": "39195312d4cbcdf9", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Islam_branches_and_schools|0": { "hashes": { "hash_examples": "5cedce1be2c3ad50", "hash_full_prompts": "5cedce1be2c3ad50", "hash_input_tokens": "71c8ffecf01f9a65", "hash_cont_tokens": "0060d8f35205c778" }, "truncated": 0, "non_truncated": 175, "padded": 350, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Islamic_law_system|0": { "hashes": { "hash_examples": "c0e6db8bc84e105e", "hash_full_prompts": "c0e6db8bc84e105e", "hash_input_tokens": "a7be3511d40b8730", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Jordan|0": { "hashes": { "hash_examples": "33deb5b4e5ddd6a1", "hash_full_prompts": "33deb5b4e5ddd6a1", "hash_input_tokens": "baba1b519e66d805", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Kuwait|0": { "hashes": { "hash_examples": "eb41773346d7c46c", "hash_full_prompts": "eb41773346d7c46c", "hash_input_tokens": "47abf2c5a502136f", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Lebanon|0": { "hashes": { "hash_examples": "25932dbf4c13d34f", "hash_full_prompts": "25932dbf4c13d34f", "hash_input_tokens": "806467ce0e7e49c6", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Libya|0": { "hashes": { "hash_examples": "f2c4db63cd402926", "hash_full_prompts": "f2c4db63cd402926", "hash_input_tokens": "23c9ca0b184d1265", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Mauritania|0": { "hashes": { "hash_examples": "8723ab5fdf286b54", "hash_full_prompts": "8723ab5fdf286b54", "hash_input_tokens": "a220b5302a4477a3", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Mesopotamia_civilization|0": { "hashes": { "hash_examples": "c33f5502a6130ca9", "hash_full_prompts": "c33f5502a6130ca9", "hash_input_tokens": "a9c1105c3f7780a1", "hash_cont_tokens": "ac62599297c498fd" }, "truncated": 0, "non_truncated": 155, "padded": 310, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Morocco|0": { "hashes": { "hash_examples": "588a5ed27904b1ae", "hash_full_prompts": "588a5ed27904b1ae", "hash_input_tokens": "4f45ccd38df6350f", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Oman|0": { "hashes": { "hash_examples": "d447c52b94248b69", "hash_full_prompts": "d447c52b94248b69", "hash_input_tokens": "69ba916374801e59", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Palestine|0": { "hashes": { "hash_examples": "19197e076ad14ff5", "hash_full_prompts": "19197e076ad14ff5", "hash_input_tokens": "77b136de826313a6", "hash_cont_tokens": "174ee430e070c2fa" }, "truncated": 0, "non_truncated": 85, "padded": 170, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Qatar|0": { "hashes": { "hash_examples": "cf0736fa185b28f6", "hash_full_prompts": "cf0736fa185b28f6", "hash_input_tokens": "2b658a817eee80ad", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Saudi_Arabia|0": { "hashes": { "hash_examples": "69beda6e1b85a08d", "hash_full_prompts": "69beda6e1b85a08d", "hash_input_tokens": "4a1a426d70693e9b", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Somalia|0": { "hashes": { "hash_examples": "b387940c65784fbf", "hash_full_prompts": "b387940c65784fbf", "hash_input_tokens": "d083c83b69b71bff", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Sudan|0": { "hashes": { "hash_examples": "e02c32b9d2dd0c3f", "hash_full_prompts": "e02c32b9d2dd0c3f", "hash_input_tokens": "0f255f2a0fb8c129", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Syria|0": { "hashes": { "hash_examples": "60a6f8fe73bda4bb", "hash_full_prompts": "60a6f8fe73bda4bb", "hash_input_tokens": "939d04f35ee00e35", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Tunisia|0": { "hashes": { "hash_examples": "34bb15d3830c5649", "hash_full_prompts": "34bb15d3830c5649", "hash_input_tokens": "7e37207612b9765b", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:United_Arab_Emirates|0": { "hashes": { "hash_examples": "98a0ba78172718ce", "hash_full_prompts": "98a0ba78172718ce", "hash_input_tokens": "335e6dbf7b60511d", "hash_cont_tokens": "174ee430e070c2fa" }, "truncated": 0, "non_truncated": 85, "padded": 170, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Yemen|0": { "hashes": { "hash_examples": "18e9bcccbb4ced7a", "hash_full_prompts": "18e9bcccbb4ced7a", "hash_input_tokens": "42bd1ebf201ab4bd", "hash_cont_tokens": "96702f2356f6107c" }, "truncated": 0, "non_truncated": 10, "padded": 20, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:communication|0": { "hashes": { "hash_examples": "9ff28ab5eab5c97b", "hash_full_prompts": "9ff28ab5eab5c97b", "hash_input_tokens": "cde9900484637577", "hash_cont_tokens": "b628a89bcecf356d" }, "truncated": 0, "non_truncated": 364, "padded": 728, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:computer_and_phone|0": { "hashes": { "hash_examples": "37bac2f086aaf6c2", "hash_full_prompts": "37bac2f086aaf6c2", "hash_input_tokens": "3237cdf9e89d962b", "hash_cont_tokens": "d8ae57c62ca85f4b" }, "truncated": 0, "non_truncated": 295, "padded": 590, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:daily_life|0": { "hashes": { "hash_examples": "bf07363c1c252e2f", "hash_full_prompts": "bf07363c1c252e2f", "hash_input_tokens": "e09edb9a10702466", "hash_cont_tokens": "d598caf874354f48" }, "truncated": 0, "non_truncated": 337, "padded": 674, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:entertainment|0": { "hashes": { "hash_examples": "37077bc00f0ac56a", "hash_full_prompts": "37077bc00f0ac56a", "hash_input_tokens": "259507eedd195806", "hash_cont_tokens": "d8ae57c62ca85f4b" }, "truncated": 0, "non_truncated": 295, "padded": 590, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:mcq_exams_test_ar|0": { "hashes": { "hash_examples": "c07a5e78c5c0b8fe", "hash_full_prompts": "c07a5e78c5c0b8fe", "hash_input_tokens": "d0e658e1d3f525ca", "hash_cont_tokens": "478fa268e1fd1e4d" }, "truncated": 0, "non_truncated": 557, "padded": 2228, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:meta_ar_dialects|0": { "hashes": { "hash_examples": "c0b6081f83e14064", "hash_full_prompts": "c0b6081f83e14064", "hash_input_tokens": "eba73771657157ae", "hash_cont_tokens": "3dea80a1ee9dc316" }, "truncated": 0, "non_truncated": 5395, "padded": 21451, "non_padded": 129, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:meta_ar_msa|0": { "hashes": { "hash_examples": "64eb78a7c5b7484b", "hash_full_prompts": "64eb78a7c5b7484b", "hash_input_tokens": "794aecc1fbbe0793", "hash_cont_tokens": "b7b27059aa46f9f5" }, "truncated": 0, "non_truncated": 895, "padded": 3554, "non_padded": 26, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { "hashes": { "hash_examples": "54fc3502c1c02c06", "hash_full_prompts": "54fc3502c1c02c06", "hash_input_tokens": "3f3ed402e71042a8", "hash_cont_tokens": "30ae320a6284bd96" }, "truncated": 0, "non_truncated": 75, "padded": 150, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { "hashes": { "hash_examples": "46572d83696552ae", "hash_full_prompts": "46572d83696552ae", "hash_input_tokens": "27b35507743dcffa", "hash_cont_tokens": "2e89ca40d66b31a0" }, "truncated": 0, "non_truncated": 150, "padded": 743, "non_padded": 7, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { "hashes": { "hash_examples": "f430d97ff715bc1c", "hash_full_prompts": "f430d97ff715bc1c", "hash_input_tokens": "e9b6514c4ccbb61a", "hash_cont_tokens": "6e7f62230276d03d" }, "truncated": 0, "non_truncated": 150, "padded": 746, "non_padded": 4, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { "hashes": { "hash_examples": "6b70a7416584f98c", "hash_full_prompts": "6b70a7416584f98c", "hash_input_tokens": "3bae7e59f2386e88", "hash_cont_tokens": "cd589f2d0662aca9" }, "truncated": 0, "non_truncated": 7995, "padded": 15990, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_rating_sentiment_task|0": { "hashes": { "hash_examples": "bc2005cc9d2f436e", "hash_full_prompts": "bc2005cc9d2f436e", "hash_input_tokens": "1615e207001a79c4", "hash_cont_tokens": "43c11b648549da66" }, "truncated": 0, "non_truncated": 5995, "padded": 17843, "non_padded": 142, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_sentiment_task|0": { "hashes": { "hash_examples": "6fb0e254ea5945d8", "hash_full_prompts": "6fb0e254ea5945d8", "hash_input_tokens": "6dc4e300f2ff1bfe", "hash_cont_tokens": "040bf50c610943db" }, "truncated": 0, "non_truncated": 1720, "padded": 5061, "non_padded": 99, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_exams|0": { "hashes": { "hash_examples": "6d721df351722656", "hash_full_prompts": "6d721df351722656", "hash_input_tokens": "aca56dc046703947", "hash_cont_tokens": "f6dc9d4abb83a50a" }, "truncated": 0, "non_truncated": 537, "padded": 2116, "non_padded": 32, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:abstract_algebra|0": { "hashes": { "hash_examples": "f2ddca8f45c0a511", "hash_full_prompts": "f2ddca8f45c0a511", "hash_input_tokens": "d7042e2e14520155", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:anatomy|0": { "hashes": { "hash_examples": "dfdbc1b83107668d", "hash_full_prompts": "dfdbc1b83107668d", "hash_input_tokens": "e20f6796c6d5fb1f", "hash_cont_tokens": "b5c1c612518185a5" }, "truncated": 0, "non_truncated": 135, "padded": 532, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:astronomy|0": { "hashes": { "hash_examples": "9736a606002a848e", "hash_full_prompts": "9736a606002a848e", "hash_input_tokens": "8da405e4905c75a2", "hash_cont_tokens": "9a91066ba51b2074" }, "truncated": 0, "non_truncated": 152, "padded": 608, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:business_ethics|0": { "hashes": { "hash_examples": "735e452fbb6dc63d", "hash_full_prompts": "735e452fbb6dc63d", "hash_input_tokens": "2db4839c0303775f", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 396, "non_padded": 4, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:clinical_knowledge|0": { "hashes": { "hash_examples": "6ab0ca4da98aedcf", "hash_full_prompts": "6ab0ca4da98aedcf", "hash_input_tokens": "94cfbbf9f11b0b2a", "hash_cont_tokens": "4a838d5fa832139f" }, "truncated": 0, "non_truncated": 265, "padded": 1044, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_biology|0": { "hashes": { "hash_examples": "17e4e390848018a4", "hash_full_prompts": "17e4e390848018a4", "hash_input_tokens": "9b43adc07f3e9d27", "hash_cont_tokens": "d95798e261e3ecf2" }, "truncated": 0, "non_truncated": 144, "padded": 568, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_chemistry|0": { "hashes": { "hash_examples": "4abb169f6dfd234b", "hash_full_prompts": "4abb169f6dfd234b", "hash_input_tokens": "85bfe55fbc8e6ac0", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 392, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_computer_science|0": { "hashes": { "hash_examples": "a369e2e941358a1e", "hash_full_prompts": "a369e2e941358a1e", "hash_input_tokens": "9f570bd02bec33c5", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_mathematics|0": { "hashes": { "hash_examples": "d7be03b8b6020bff", "hash_full_prompts": "d7be03b8b6020bff", "hash_input_tokens": "7e77bf0db39d418e", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 392, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_medicine|0": { "hashes": { "hash_examples": "0518a00f097346bf", "hash_full_prompts": "0518a00f097346bf", "hash_input_tokens": "e3f6484c467dfa1f", "hash_cont_tokens": "0a01d731701f68e5" }, "truncated": 0, "non_truncated": 173, "padded": 680, "non_padded": 12, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_physics|0": { "hashes": { "hash_examples": "5d842cd49bc70e12", "hash_full_prompts": "5d842cd49bc70e12", "hash_input_tokens": "db230e09c1538af9", "hash_cont_tokens": "77e73d9510077678" }, "truncated": 0, "non_truncated": 102, "padded": 404, "non_padded": 4, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:computer_security|0": { "hashes": { "hash_examples": "8e85d9f85be9b32f", "hash_full_prompts": "8e85d9f85be9b32f", "hash_input_tokens": "d72e2a6c23180e6f", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 392, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:conceptual_physics|0": { "hashes": { "hash_examples": "7964b55a0a49502b", "hash_full_prompts": "7964b55a0a49502b", "hash_input_tokens": "df74b1a60e8a2e7c", "hash_cont_tokens": "01df071ebfaec74d" }, "truncated": 0, "non_truncated": 235, "padded": 896, "non_padded": 44, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:econometrics|0": { "hashes": { "hash_examples": "1e192eae38347257", "hash_full_prompts": "1e192eae38347257", "hash_input_tokens": "1062cf3f569a55d7", "hash_cont_tokens": "e3fc2917921a9eaf" }, "truncated": 0, "non_truncated": 114, "padded": 432, "non_padded": 24, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:electrical_engineering|0": { "hashes": { "hash_examples": "cf97671d5c441da1", "hash_full_prompts": "cf97671d5c441da1", "hash_input_tokens": "6a55abcd2bff7101", "hash_cont_tokens": "c8a5aa308b735a83" }, "truncated": 0, "non_truncated": 145, "padded": 560, "non_padded": 20, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:elementary_mathematics|0": { "hashes": { "hash_examples": "6f49107ed43c40c5", "hash_full_prompts": "6f49107ed43c40c5", "hash_input_tokens": "9ddae447ded45c02", "hash_cont_tokens": "df4fdf22c42e07a2" }, "truncated": 0, "non_truncated": 378, "padded": 1488, "non_padded": 24, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:formal_logic|0": { "hashes": { "hash_examples": "7922c376008ba77b", "hash_full_prompts": "7922c376008ba77b", "hash_input_tokens": "22e82af5bbc2bf17", "hash_cont_tokens": "3a65271847fd1f2e" }, "truncated": 0, "non_truncated": 126, "padded": 496, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:global_facts|0": { "hashes": { "hash_examples": "11f9813185047d5b", "hash_full_prompts": "11f9813185047d5b", "hash_input_tokens": "66d675c248e02089", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 380, "non_padded": 20, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_biology|0": { "hashes": { "hash_examples": "2a804b1d90cbe66e", "hash_full_prompts": "2a804b1d90cbe66e", "hash_input_tokens": "62cced97dc5e2601", "hash_cont_tokens": "a6b83eb9e3d31488" }, "truncated": 0, "non_truncated": 310, "padded": 1212, "non_padded": 28, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_chemistry|0": { "hashes": { "hash_examples": "0032168adabc53b4", "hash_full_prompts": "0032168adabc53b4", "hash_input_tokens": "7761de55d380cce9", "hash_cont_tokens": "8a704d95abde0656" }, "truncated": 0, "non_truncated": 203, "padded": 796, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_computer_science|0": { "hashes": { "hash_examples": "f2fb8740f9df980f", "hash_full_prompts": "f2fb8740f9df980f", "hash_input_tokens": "af660b922e8019f8", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 392, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_european_history|0": { "hashes": { "hash_examples": "73509021e7e66435", "hash_full_prompts": "73509021e7e66435", "hash_input_tokens": "7f49c1b6a266b0b9", "hash_cont_tokens": "bc6082a11551e6de" }, "truncated": 0, "non_truncated": 165, "padded": 576, "non_padded": 84, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_geography|0": { "hashes": { "hash_examples": "9e08d1894940ff42", "hash_full_prompts": "9e08d1894940ff42", "hash_input_tokens": "0932046495a67f10", "hash_cont_tokens": "17f69684d1b7da75" }, "truncated": 0, "non_truncated": 198, "padded": 772, "non_padded": 20, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_government_and_politics|0": { "hashes": { "hash_examples": "64b7e97817ca6c76", "hash_full_prompts": "64b7e97817ca6c76", "hash_input_tokens": "ead3c42b9bf75f5d", "hash_cont_tokens": "bfdbe0094ac4e89d" }, "truncated": 0, "non_truncated": 193, "padded": 764, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_macroeconomics|0": { "hashes": { "hash_examples": "9f582da8534bd2ef", "hash_full_prompts": "9f582da8534bd2ef", "hash_input_tokens": "7c1543a80b4ede1b", "hash_cont_tokens": "a61cb28cbed86a76" }, "truncated": 0, "non_truncated": 390, "padded": 1532, "non_padded": 28, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_mathematics|0": { "hashes": { "hash_examples": "fd54f1c10d423c51", "hash_full_prompts": "fd54f1c10d423c51", "hash_input_tokens": "7ed446df1d4bd0fa", "hash_cont_tokens": "ede1813083303def" }, "truncated": 0, "non_truncated": 270, "padded": 1068, "non_padded": 12, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_microeconomics|0": { "hashes": { "hash_examples": "7037896925aaf42f", "hash_full_prompts": "7037896925aaf42f", "hash_input_tokens": "d46dc174eda297e7", "hash_cont_tokens": "2ce9a16a87758ab3" }, "truncated": 0, "non_truncated": 238, "padded": 920, "non_padded": 32, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_physics|0": { "hashes": { "hash_examples": "60c3776215167dae", "hash_full_prompts": "60c3776215167dae", "hash_input_tokens": "719b0072f6357a5e", "hash_cont_tokens": "34e90e2adee42b92" }, "truncated": 0, "non_truncated": 151, "padded": 580, "non_padded": 24, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_psychology|0": { "hashes": { "hash_examples": "61176bfd5da1298f", "hash_full_prompts": "61176bfd5da1298f", "hash_input_tokens": "fc71eb5e84cf4031", "hash_cont_tokens": "2d376df2c1814495" }, "truncated": 0, "non_truncated": 545, "padded": 2140, "non_padded": 40, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_statistics|0": { "hashes": { "hash_examples": "40dfeebd1ea10f76", "hash_full_prompts": "40dfeebd1ea10f76", "hash_input_tokens": "446f3bcbe952c340", "hash_cont_tokens": "4bd9660a3b058b49" }, "truncated": 0, "non_truncated": 216, "padded": 856, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_us_history|0": { "hashes": { "hash_examples": "03daa510ba917f4d", "hash_full_prompts": "03daa510ba917f4d", "hash_input_tokens": "1acac266e203e9a5", "hash_cont_tokens": "5c36ec463d3a0755" }, "truncated": 0, "non_truncated": 204, "padded": 788, "non_padded": 28, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_world_history|0": { "hashes": { "hash_examples": "be075ffd579f43c2", "hash_full_prompts": "be075ffd579f43c2", "hash_input_tokens": "31b35ccbe47f4cff", "hash_cont_tokens": "f21688a7138caf58" }, "truncated": 0, "non_truncated": 237, "padded": 872, "non_padded": 76, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:human_aging|0": { "hashes": { "hash_examples": "caa5b69f640bd1ef", "hash_full_prompts": "caa5b69f640bd1ef", "hash_input_tokens": "4752dd4d61914b6c", "hash_cont_tokens": "22b919059cbabb52" }, "truncated": 0, "non_truncated": 223, "padded": 868, "non_padded": 24, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:human_sexuality|0": { "hashes": { "hash_examples": "5ed2e38fb25a3767", "hash_full_prompts": "5ed2e38fb25a3767", "hash_input_tokens": "b193408d5d7bea73", "hash_cont_tokens": "7033845bac78be24" }, "truncated": 0, "non_truncated": 131, "padded": 524, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:international_law|0": { "hashes": { "hash_examples": "4e3e9e28d1b96484", "hash_full_prompts": "4e3e9e28d1b96484", "hash_input_tokens": "20f8d9db6a5917e5", "hash_cont_tokens": "29048e59854cec5c" }, "truncated": 0, "non_truncated": 121, "padded": 476, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:jurisprudence|0": { "hashes": { "hash_examples": "e264b755366310b3", "hash_full_prompts": "e264b755366310b3", "hash_input_tokens": "6d81e19d5f151cb2", "hash_cont_tokens": "fae0419b42375fd2" }, "truncated": 0, "non_truncated": 108, "padded": 420, "non_padded": 12, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:logical_fallacies|0": { "hashes": { "hash_examples": "a4ab6965a3e38071", "hash_full_prompts": "a4ab6965a3e38071", "hash_input_tokens": "7974c5b93a8d0c5f", "hash_cont_tokens": "93c4e716b01bd87c" }, "truncated": 0, "non_truncated": 163, "padded": 636, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:machine_learning|0": { "hashes": { "hash_examples": "b92320efa6636b40", "hash_full_prompts": "b92320efa6636b40", "hash_input_tokens": "fb54bcda51c26b7e", "hash_cont_tokens": "2e4467ffdab3254d" }, "truncated": 0, "non_truncated": 112, "padded": 432, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:management|0": { "hashes": { "hash_examples": "c9ee4872a850fe20", "hash_full_prompts": "c9ee4872a850fe20", "hash_input_tokens": "bc69860b6c1d275a", "hash_cont_tokens": "b0936addbab0c265" }, "truncated": 0, "non_truncated": 103, "padded": 408, "non_padded": 4, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:marketing|0": { "hashes": { "hash_examples": "0c151b70f6a047e3", "hash_full_prompts": "0c151b70f6a047e3", "hash_input_tokens": "e1179d18505459de", "hash_cont_tokens": "54e2619fba846f17" }, "truncated": 0, "non_truncated": 234, "padded": 920, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:medical_genetics|0": { "hashes": { "hash_examples": "513f6cb8fca3a24e", "hash_full_prompts": "513f6cb8fca3a24e", "hash_input_tokens": "089c3411d9789b13", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 388, "non_padded": 12, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:miscellaneous|0": { "hashes": { "hash_examples": "259a190d635331db", "hash_full_prompts": "259a190d635331db", "hash_input_tokens": "b9689e5fda8d03dd", "hash_cont_tokens": "b68c3a07a4a75876" }, "truncated": 0, "non_truncated": 783, "padded": 3088, "non_padded": 44, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:moral_disputes|0": { "hashes": { "hash_examples": "b85052c48a0b7bc3", "hash_full_prompts": "b85052c48a0b7bc3", "hash_input_tokens": "b21e7d7e1fcdc975", "hash_cont_tokens": "f54406d1e4cf99f8" }, "truncated": 0, "non_truncated": 346, "padded": 1348, "non_padded": 36, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:moral_scenarios|0": { "hashes": { "hash_examples": "28d0b069ef00dd00", "hash_full_prompts": "28d0b069ef00dd00", "hash_input_tokens": "63c5f825682abb5a", "hash_cont_tokens": "1a38cd5b4241444e" }, "truncated": 0, "non_truncated": 895, "padded": 3580, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:nutrition|0": { "hashes": { "hash_examples": "00c9bc5f1d305b2f", "hash_full_prompts": "00c9bc5f1d305b2f", "hash_input_tokens": "1136f15ad76ee823", "hash_cont_tokens": "9d443ff23bc12c12" }, "truncated": 0, "non_truncated": 306, "padded": 1192, "non_padded": 32, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:philosophy|0": { "hashes": { "hash_examples": "a458c08454a3fd5f", "hash_full_prompts": "a458c08454a3fd5f", "hash_input_tokens": "a356460c30a21f56", "hash_cont_tokens": "09e5454b7258b0b7" }, "truncated": 0, "non_truncated": 311, "padded": 1204, "non_padded": 40, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:prehistory|0": { "hashes": { "hash_examples": "d6a0ecbdbb670e9c", "hash_full_prompts": "d6a0ecbdbb670e9c", "hash_input_tokens": "863d9b4c1c269fa7", "hash_cont_tokens": "f20b5fcd2df4488d" }, "truncated": 0, "non_truncated": 324, "padded": 1272, "non_padded": 24, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:professional_accounting|0": { "hashes": { "hash_examples": "b4a95fe480b6540e", "hash_full_prompts": "b4a95fe480b6540e", "hash_input_tokens": "3a6934991a2799d9", "hash_cont_tokens": "a3a4b0df5e20638f" }, "truncated": 0, "non_truncated": 282, "padded": 1112, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:professional_law|0": { "hashes": { "hash_examples": "c2be9651cdbdde3b", "hash_full_prompts": "c2be9651cdbdde3b", "hash_input_tokens": "2acdd42816b0fcc7", "hash_cont_tokens": "eb156ebe8faf1aaf" }, "truncated": 0, "non_truncated": 1534, "padded": 6080, "non_padded": 56, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:professional_medicine|0": { "hashes": { "hash_examples": "26ce92416288f273", "hash_full_prompts": "26ce92416288f273", "hash_input_tokens": "5f042c0f6ae758a6", "hash_cont_tokens": "0218217c2fa604a0" }, "truncated": 0, "non_truncated": 272, "padded": 1076, "non_padded": 12, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:professional_psychology|0": { "hashes": { "hash_examples": "71ea5f182ea9a641", "hash_full_prompts": "71ea5f182ea9a641", "hash_input_tokens": "023cedfd52e8e443", "hash_cont_tokens": "4fea6b16917c8330" }, "truncated": 0, "non_truncated": 612, "padded": 2396, "non_padded": 52, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:public_relations|0": { "hashes": { "hash_examples": "125adc21f91f8d77", "hash_full_prompts": "125adc21f91f8d77", "hash_input_tokens": "0b6a7e0523bf9f8e", "hash_cont_tokens": "fae64d3b41255dc8" }, "truncated": 0, "non_truncated": 110, "padded": 420, "non_padded": 20, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:security_studies|0": { "hashes": { "hash_examples": "3c18b216c099fb26", "hash_full_prompts": "3c18b216c099fb26", "hash_input_tokens": "da5a93baefb5fae8", "hash_cont_tokens": "f81c4b3cc61f9738" }, "truncated": 0, "non_truncated": 245, "padded": 980, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:sociology|0": { "hashes": { "hash_examples": "3f2a9634cef7417d", "hash_full_prompts": "3f2a9634cef7417d", "hash_input_tokens": "bd6665edf9dc0bba", "hash_cont_tokens": "a68d71e598a7eb7f" }, "truncated": 0, "non_truncated": 201, "padded": 768, "non_padded": 36, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:us_foreign_policy|0": { "hashes": { "hash_examples": "22249da54056475e", "hash_full_prompts": "22249da54056475e", "hash_input_tokens": "1617b010ae65dfcc", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 380, "non_padded": 20, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:virology|0": { "hashes": { "hash_examples": "9d194b9471dc624e", "hash_full_prompts": "9d194b9471dc624e", "hash_input_tokens": "12eec5438185bcef", "hash_cont_tokens": "6c8625e5b2b8ffeb" }, "truncated": 0, "non_truncated": 166, "padded": 664, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:world_religions|0": { "hashes": { "hash_examples": "229e5fe50082b064", "hash_full_prompts": "229e5fe50082b064", "hash_input_tokens": "b2136a5b3753ab33", "hash_cont_tokens": "e0faaa109c671b8f" }, "truncated": 0, "non_truncated": 171, "padded": 668, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arc_challenge_okapi_ar|0": { "hashes": { "hash_examples": "ab893807673bc355", "hash_full_prompts": "ab893807673bc355", "hash_input_tokens": "3a3b08eb90768a1a", "hash_cont_tokens": "5ece5b0fdbfa8076" }, "truncated": 0, "non_truncated": 1160, "padded": 4558, "non_padded": 82, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arc_easy_ar|0": { "hashes": { "hash_examples": "acb688624acc3d04", "hash_full_prompts": "acb688624acc3d04", "hash_input_tokens": "441feeb1a5a0e817", "hash_cont_tokens": "f9e7ef6e6d49b466" }, "truncated": 0, "non_truncated": 2364, "padded": 9254, "non_padded": 202, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|boolq_ar|0": { "hashes": { "hash_examples": "48355a67867e0c32", "hash_full_prompts": "48355a67867e0c32", "hash_input_tokens": "59bb6cf5584b83f7", "hash_cont_tokens": "6cc6329be7dcb0ef" }, "truncated": 0, "non_truncated": 3260, "padded": 6474, "non_padded": 46, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|copa_ext_ar|0": { "hashes": { "hash_examples": "9bb83301bb72eecf", "hash_full_prompts": "9bb83301bb72eecf", "hash_input_tokens": "e853cb5d06649c47", "hash_cont_tokens": "48c2323d601b2b0c" }, "truncated": 0, "non_truncated": 90, "padded": 180, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|hellaswag_okapi_ar|0": { "hashes": { "hash_examples": "6e8cf57a322dfadd", "hash_full_prompts": "6e8cf57a322dfadd", "hash_input_tokens": "655f89bea7a08138", "hash_cont_tokens": "963f62478d9c5df7" }, "truncated": 0, "non_truncated": 9171, "padded": 36560, "non_padded": 124, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|openbook_qa_ext_ar|0": { "hashes": { "hash_examples": "923d41eb0aca93eb", "hash_full_prompts": "923d41eb0aca93eb", "hash_input_tokens": "aafd1a69a1bfb291", "hash_cont_tokens": "e9d7e284d35d6b14" }, "truncated": 0, "non_truncated": 495, "padded": 1949, "non_padded": 31, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|piqa_ar|0": { "hashes": { "hash_examples": "94bc205a520d3ea0", "hash_full_prompts": "94bc205a520d3ea0", "hash_input_tokens": "d63d9c52db17dbb6", "hash_cont_tokens": "6b782d2479e2c028" }, "truncated": 0, "non_truncated": 1833, "padded": 3621, "non_padded": 45, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|race_ar|0": { "hashes": { "hash_examples": "de65130bae647516", "hash_full_prompts": "de65130bae647516", "hash_input_tokens": "37a934c59d7cc420", "hash_cont_tokens": "03d651c5338bf364" }, "truncated": 0, "non_truncated": 4929, "padded": 19698, "non_padded": 18, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|sciq_ar|0": { "hashes": { "hash_examples": "23bcb7389ac0e137", "hash_full_prompts": "23bcb7389ac0e137", "hash_input_tokens": "1f8deedadd51e672", "hash_cont_tokens": "08d4e961078d048f" }, "truncated": 0, "non_truncated": 995, "padded": 3961, "non_padded": 19, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|toxigen_ar|0": { "hashes": { "hash_examples": "1e139513004a9a2e", "hash_full_prompts": "1e139513004a9a2e", "hash_input_tokens": "58a7a107d8989597", "hash_cont_tokens": "23c85267d1c209f3" }, "truncated": 0, "non_truncated": 935, "padded": 1858, "non_padded": 12, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "lighteval|xstory_cloze:ar|0": { "hashes": { "hash_examples": "865426a22c787481", "hash_full_prompts": "865426a22c787481", "hash_input_tokens": "906ee262041a266c", "hash_cont_tokens": "cb73d8896239c71e" }, "truncated": 0, "non_truncated": 1511, "padded": 2980, "non_padded": 42, "effective_few_shots": 0, "num_truncated_few_shots": 0 } }
{ "hashes": { "hash_examples": "bf1a1ac040a161c7", "hash_full_prompts": "bf1a1ac040a161c7", "hash_input_tokens": "b8f2f5cce27088a8", "hash_cont_tokens": "34cc95dbf7d4f56b" }, "truncated": 0, "non_truncated": 72964, "padded": 233423, "non_padded": 2200, "num_truncated_few_shots": 0 }
{ "lighteval_sha": "?", "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null, "job_id": "", "start_time": 630.862788057, "end_time": 54184.754023617, "total_evaluation_time_secondes": "53553.891235560004", "model_name": "tanliboy/lambda-qwen2.5-32b-dpo-test", "model_sha": "675b60d6e859455a6139e6e284bbe1844b8ddf46", "model_dtype": "torch.bfloat16", "model_size": "61.03 GB", "config": null }
{ "community|acva:Algeria|0": { "acc_norm": 0.5897435897435898, "acc_norm_stderr": 0.035314937123266714 }, "community|acva:Ancient_Egypt|0": { "acc_norm": 0.06666666666666667, "acc_norm_stderr": 0.014076929472824472 }, "community|acva:Arab_Empire|0": { "acc_norm": 0.3132075471698113, "acc_norm_stderr": 0.02854479331905533 }, "community|acva:Arabic_Architecture|0": { "acc_norm": 0.46153846153846156, "acc_norm_stderr": 0.0357915435254457 }, "community|acva:Arabic_Art|0": { "acc_norm": 0.3641025641025641, "acc_norm_stderr": 0.03454653867786389 }, "community|acva:Arabic_Astronomy|0": { "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.03581804596782233 }, "community|acva:Arabic_Calligraphy|0": { "acc_norm": 0.6078431372549019, "acc_norm_stderr": 0.030634359906451983 }, "community|acva:Arabic_Ceremony|0": { "acc_norm": 0.5459459459459459, "acc_norm_stderr": 0.03670453191802572 }, "community|acva:Arabic_Clothing|0": { "acc_norm": 0.5025641025641026, "acc_norm_stderr": 0.03589743589743588 }, "community|acva:Arabic_Culture|0": { "acc_norm": 0.2358974358974359, "acc_norm_stderr": 0.030481516761721537 }, "community|acva:Arabic_Food|0": { "acc_norm": 0.5025641025641026, "acc_norm_stderr": 0.03589743589743589 }, "community|acva:Arabic_Funeral|0": { "acc_norm": 0.4105263157894737, "acc_norm_stderr": 0.050738635645512106 }, "community|acva:Arabic_Geography|0": { "acc_norm": 0.6344827586206897, "acc_norm_stderr": 0.040131241954243856 }, "community|acva:Arabic_History|0": { "acc_norm": 0.31794871794871793, "acc_norm_stderr": 0.03343383454355787 }, "community|acva:Arabic_Language_Origin|0": { "acc_norm": 0.5789473684210527, "acc_norm_stderr": 0.050924152299673286 }, "community|acva:Arabic_Literature|0": { "acc_norm": 0.5103448275862069, "acc_norm_stderr": 0.04165774775728763 }, "community|acva:Arabic_Math|0": { "acc_norm": 0.3128205128205128, "acc_norm_stderr": 0.03328755065724854 }, "community|acva:Arabic_Medicine|0": { "acc_norm": 0.593103448275862, "acc_norm_stderr": 0.04093793981266236 }, "community|acva:Arabic_Music|0": { "acc_norm": 0.2446043165467626, "acc_norm_stderr": 0.03659146222520567 }, "community|acva:Arabic_Ornament|0": { "acc_norm": 0.47692307692307695, "acc_norm_stderr": 0.0358596530894741 }, "community|acva:Arabic_Philosophy|0": { "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "community|acva:Arabic_Physics_and_Chemistry|0": { "acc_norm": 0.717948717948718, "acc_norm_stderr": 0.032307986017991154 }, "community|acva:Arabic_Wedding|0": { "acc_norm": 0.4307692307692308, "acc_norm_stderr": 0.03555213252058761 }, "community|acva:Bahrain|0": { "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.07106690545187012 }, "community|acva:Comoros|0": { "acc_norm": 0.37777777777777777, "acc_norm_stderr": 0.07309112127323451 }, "community|acva:Egypt_modern|0": { "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.04794350420740798 }, "community|acva:InfluenceFromAncientEgypt|0": { "acc_norm": 0.6051282051282051, "acc_norm_stderr": 0.03509545602262038 }, "community|acva:InfluenceFromByzantium|0": { "acc_norm": 0.7172413793103448, "acc_norm_stderr": 0.03752833958003337 }, "community|acva:InfluenceFromChina|0": { "acc_norm": 0.28717948717948716, "acc_norm_stderr": 0.03248373338539886 }, "community|acva:InfluenceFromGreece|0": { "acc_norm": 0.6615384615384615, "acc_norm_stderr": 0.03397280032734095 }, "community|acva:InfluenceFromIslam|0": { "acc_norm": 0.31724137931034485, "acc_norm_stderr": 0.03878352372138621 }, "community|acva:InfluenceFromPersia|0": { "acc_norm": 0.6971428571428572, "acc_norm_stderr": 0.03483414676585986 }, "community|acva:InfluenceFromRome|0": { "acc_norm": 0.5846153846153846, "acc_norm_stderr": 0.03538013280575029 }, "community|acva:Iraq|0": { "acc_norm": 0.5411764705882353, "acc_norm_stderr": 0.0543691634273002 }, "community|acva:Islam_Education|0": { "acc_norm": 0.46153846153846156, "acc_norm_stderr": 0.03579154352544572 }, "community|acva:Islam_branches_and_schools|0": { "acc_norm": 0.4342857142857143, "acc_norm_stderr": 0.037576101528126626 }, "community|acva:Islamic_law_system|0": { "acc_norm": 0.4461538461538462, "acc_norm_stderr": 0.03568913546569232 }, "community|acva:Jordan|0": { "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.07216392363431012 }, "community|acva:Kuwait|0": { "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.07446027270295806 }, "community|acva:Lebanon|0": { "acc_norm": 0.2, "acc_norm_stderr": 0.06030226891555273 }, "community|acva:Libya|0": { "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.07491109582924914 }, "community|acva:Mauritania|0": { "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.07491109582924915 }, "community|acva:Mesopotamia_civilization|0": { "acc_norm": 0.5225806451612903, "acc_norm_stderr": 0.0402500394824441 }, "community|acva:Morocco|0": { "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.06478835438716998 }, "community|acva:Oman|0": { "acc_norm": 0.2, "acc_norm_stderr": 0.06030226891555273 }, "community|acva:Palestine|0": { "acc_norm": 0.2823529411764706, "acc_norm_stderr": 0.049114753600680516 }, "community|acva:Qatar|0": { "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.0752101433090355 }, "community|acva:Saudi_Arabia|0": { "acc_norm": 0.37435897435897436, "acc_norm_stderr": 0.034746084306262345 }, "community|acva:Somalia|0": { "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.07216392363431012 }, "community|acva:Sudan|0": { "acc_norm": 0.37777777777777777, "acc_norm_stderr": 0.07309112127323453 }, "community|acva:Syria|0": { "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.07106690545187012 }, "community|acva:Tunisia|0": { "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.06979205927323111 }, "community|acva:United_Arab_Emirates|0": { "acc_norm": 0.32941176470588235, "acc_norm_stderr": 0.051281164041654954 }, "community|acva:Yemen|0": { "acc_norm": 0.2, "acc_norm_stderr": 0.13333333333333333 }, "community|acva:communication|0": { "acc_norm": 0.5, "acc_norm_stderr": 0.0262431940540739 }, "community|acva:computer_and_phone|0": { "acc_norm": 0.46779661016949153, "acc_norm_stderr": 0.029100046852442864 }, "community|acva:daily_life|0": { "acc_norm": 0.20178041543026706, "acc_norm_stderr": 0.0218942996073136 }, "community|acva:entertainment|0": { "acc_norm": 0.24745762711864408, "acc_norm_stderr": 0.025167625104477034 }, "community|alghafa:mcq_exams_test_ar|0": { "acc_norm": 0.4614003590664273, "acc_norm_stderr": 0.02114144161245578 }, "community|alghafa:meta_ar_dialects|0": { "acc_norm": 0.519740500463392, "acc_norm_stderr": 0.006802613413062489 }, "community|alghafa:meta_ar_msa|0": { "acc_norm": 0.5508379888268157, "acc_norm_stderr": 0.016635838341631924 }, "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { "acc_norm": 0.92, "acc_norm_stderr": 0.03153719382878881 }, "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { "acc_norm": 0.72, "acc_norm_stderr": 0.0367834220026928 }, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { "acc_norm": 0.5666666666666667, "acc_norm_stderr": 0.040595860168112737 }, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { "acc_norm": 0.8295184490306442, "acc_norm_stderr": 0.004206003663653753 }, "community|alghafa:multiple_choice_rating_sentiment_task|0": { "acc_norm": 0.5751459549624687, "acc_norm_stderr": 0.006384847729010643 }, "community|alghafa:multiple_choice_sentiment_task|0": { "acc_norm": 0.40406976744186046, "acc_norm_stderr": 0.011835536517397218 }, "community|arabic_exams|0": { "acc_norm": 0.5791433891992551, "acc_norm_stderr": 0.021324444838272973 }, "community|arabic_mmlu:abstract_algebra|0": { "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "community|arabic_mmlu:anatomy|0": { "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.043097329010363554 }, "community|arabic_mmlu:astronomy|0": { "acc_norm": 0.7828947368421053, "acc_norm_stderr": 0.03355045304882924 }, "community|arabic_mmlu:business_ethics|0": { "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909284 }, "community|arabic_mmlu:clinical_knowledge|0": { "acc_norm": 0.720754716981132, "acc_norm_stderr": 0.027611163402399715 }, "community|arabic_mmlu:college_biology|0": { "acc_norm": 0.7152777777777778, "acc_norm_stderr": 0.03773809990686936 }, "community|arabic_mmlu:college_chemistry|0": { "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "community|arabic_mmlu:college_computer_science|0": { "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "community|arabic_mmlu:college_mathematics|0": { "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "community|arabic_mmlu:college_medicine|0": { "acc_norm": 0.5722543352601156, "acc_norm_stderr": 0.037724468575180255 }, "community|arabic_mmlu:college_physics|0": { "acc_norm": 0.5196078431372549, "acc_norm_stderr": 0.04971358884367406 }, "community|arabic_mmlu:computer_security|0": { "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "community|arabic_mmlu:conceptual_physics|0": { "acc_norm": 0.7191489361702128, "acc_norm_stderr": 0.02937917046412482 }, "community|arabic_mmlu:econometrics|0": { "acc_norm": 0.543859649122807, "acc_norm_stderr": 0.04685473041907789 }, "community|arabic_mmlu:electrical_engineering|0": { "acc_norm": 0.6413793103448275, "acc_norm_stderr": 0.039966295748767186 }, "community|arabic_mmlu:elementary_mathematics|0": { "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.02306818884826114 }, "community|arabic_mmlu:formal_logic|0": { "acc_norm": 0.5396825396825397, "acc_norm_stderr": 0.04458029125470973 }, "community|arabic_mmlu:global_facts|0": { "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "community|arabic_mmlu:high_school_biology|0": { "acc_norm": 0.7451612903225806, "acc_norm_stderr": 0.024790118459332208 }, "community|arabic_mmlu:high_school_chemistry|0": { "acc_norm": 0.6945812807881774, "acc_norm_stderr": 0.032406615658684086 }, "community|arabic_mmlu:high_school_computer_science|0": { "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "community|arabic_mmlu:high_school_european_history|0": { "acc_norm": 0.296969696969697, "acc_norm_stderr": 0.03567969772268048 }, "community|arabic_mmlu:high_school_geography|0": { "acc_norm": 0.797979797979798, "acc_norm_stderr": 0.02860620428922987 }, "community|arabic_mmlu:high_school_government_and_politics|0": { "acc_norm": 0.7461139896373057, "acc_norm_stderr": 0.03141024780565318 }, "community|arabic_mmlu:high_school_macroeconomics|0": { "acc_norm": 0.7564102564102564, "acc_norm_stderr": 0.021763733684173895 }, "community|arabic_mmlu:high_school_mathematics|0": { "acc_norm": 0.5481481481481482, "acc_norm_stderr": 0.030343862998512626 }, "community|arabic_mmlu:high_school_microeconomics|0": { "acc_norm": 0.7941176470588235, "acc_norm_stderr": 0.026265024608275882 }, "community|arabic_mmlu:high_school_physics|0": { "acc_norm": 0.5298013245033113, "acc_norm_stderr": 0.04075224992216979 }, "community|arabic_mmlu:high_school_psychology|0": { "acc_norm": 0.726605504587156, "acc_norm_stderr": 0.019109299846098292 }, "community|arabic_mmlu:high_school_statistics|0": { "acc_norm": 0.6342592592592593, "acc_norm_stderr": 0.03284738857647207 }, "community|arabic_mmlu:high_school_us_history|0": { "acc_norm": 0.3088235294117647, "acc_norm_stderr": 0.03242661719827218 }, "community|arabic_mmlu:high_school_world_history|0": { "acc_norm": 0.37130801687763715, "acc_norm_stderr": 0.0314506860074486 }, "community|arabic_mmlu:human_aging|0": { "acc_norm": 0.6591928251121076, "acc_norm_stderr": 0.0318114974705536 }, "community|arabic_mmlu:human_sexuality|0": { "acc_norm": 0.6106870229007634, "acc_norm_stderr": 0.04276486542814591 }, "community|arabic_mmlu:international_law|0": { "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "community|arabic_mmlu:jurisprudence|0": { "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.042365112580946315 }, "community|arabic_mmlu:logical_fallacies|0": { "acc_norm": 0.6748466257668712, "acc_norm_stderr": 0.0368035037128646 }, "community|arabic_mmlu:machine_learning|0": { "acc_norm": 0.6428571428571429, "acc_norm_stderr": 0.04547960999764376 }, "community|arabic_mmlu:management|0": { "acc_norm": 0.6990291262135923, "acc_norm_stderr": 0.04541609446503948 }, "community|arabic_mmlu:marketing|0": { "acc_norm": 0.8162393162393162, "acc_norm_stderr": 0.02537213967172293 }, "community|arabic_mmlu:medical_genetics|0": { "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526094 }, "community|arabic_mmlu:miscellaneous|0": { "acc_norm": 0.7254150702426565, "acc_norm_stderr": 0.015959829933084032 }, "community|arabic_mmlu:moral_disputes|0": { "acc_norm": 0.6994219653179191, "acc_norm_stderr": 0.024685316867257803 }, "community|arabic_mmlu:moral_scenarios|0": { "acc_norm": 0.5452513966480447, "acc_norm_stderr": 0.016653875777524012 }, "community|arabic_mmlu:nutrition|0": { "acc_norm": 0.738562091503268, "acc_norm_stderr": 0.025160998214292452 }, "community|arabic_mmlu:philosophy|0": { "acc_norm": 0.6784565916398714, "acc_norm_stderr": 0.02652772407952887 }, "community|arabic_mmlu:prehistory|0": { "acc_norm": 0.6882716049382716, "acc_norm_stderr": 0.025773111169630453 }, "community|arabic_mmlu:professional_accounting|0": { "acc_norm": 0.5070921985815603, "acc_norm_stderr": 0.02982449855912901 }, "community|arabic_mmlu:professional_law|0": { "acc_norm": 0.3917861799217731, "acc_norm_stderr": 0.01246756441814513 }, "community|arabic_mmlu:professional_medicine|0": { "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.029029422815681404 }, "community|arabic_mmlu:professional_psychology|0": { "acc_norm": 0.6437908496732027, "acc_norm_stderr": 0.019373332420724514 }, "community|arabic_mmlu:public_relations|0": { "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.04426294648200099 }, "community|arabic_mmlu:security_studies|0": { "acc_norm": 0.6693877551020408, "acc_norm_stderr": 0.0301164262965406 }, "community|arabic_mmlu:sociology|0": { "acc_norm": 0.7412935323383084, "acc_norm_stderr": 0.030965903123573033 }, "community|arabic_mmlu:us_foreign_policy|0": { "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036847 }, "community|arabic_mmlu:virology|0": { "acc_norm": 0.5120481927710844, "acc_norm_stderr": 0.03891364495835817 }, "community|arabic_mmlu:world_religions|0": { "acc_norm": 0.7426900584795322, "acc_norm_stderr": 0.03352799844161865 }, "community|arc_challenge_okapi_ar|0": { "acc_norm": 0.653448275862069, "acc_norm_stderr": 0.013978092888138751 }, "community|arc_easy_ar|0": { "acc_norm": 0.6095600676818951, "acc_norm_stderr": 0.010035834895204997 }, "community|boolq_ar|0": { "acc_norm": 0.8325153374233129, "acc_norm_stderr": 0.00654095953941842 }, "community|copa_ext_ar|0": { "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04996877926639073 }, "community|hellaswag_okapi_ar|0": { "acc_norm": 0.5222985497764693, "acc_norm_stderr": 0.005216185433571988 }, "community|openbook_qa_ext_ar|0": { "acc_norm": 0.5878787878787879, "acc_norm_stderr": 0.022145878087744476 }, "community|piqa_ar|0": { "acc_norm": 0.7741407528641571, "acc_norm_stderr": 0.009769361238384127 }, "community|race_ar|0": { "acc_norm": 0.5707039967539055, "acc_norm_stderr": 0.007050964531212677 }, "community|sciq_ar|0": { "acc_norm": 0.6673366834170854, "acc_norm_stderr": 0.014944512878445206 }, "community|toxigen_ar|0": { "acc_norm": 0.8203208556149733, "acc_norm_stderr": 0.012562245425338452 }, "lighteval|xstory_cloze:ar|0": { "acc": 0.7068166776968895, "acc_stderr": 0.011714791177625751 }, "community|acva:_average|0": { "acc_norm": 0.425542451334671, "acc_norm_stderr": 0.046278778203940676 }, "community|alghafa:_average|0": { "acc_norm": 0.616375520717586, "acc_norm_stderr": 0.01954697303075624 }, "community|arabic_mmlu:_average|0": { "acc_norm": 0.6317811061854015, "acc_norm_stderr": 0.03469868639599595 }, "all": { "acc_norm": 0.5446250242931533, "acc_norm_stderr": 0.03712188353110561, "acc": 0.7068166776968895, "acc_stderr": 0.011714791177625751 } }
{ "community|acva:Algeria|0": 0, "community|acva:Ancient_Egypt|0": 0, "community|acva:Arab_Empire|0": 0, "community|acva:Arabic_Architecture|0": 0, "community|acva:Arabic_Art|0": 0, "community|acva:Arabic_Astronomy|0": 0, "community|acva:Arabic_Calligraphy|0": 0, "community|acva:Arabic_Ceremony|0": 0, "community|acva:Arabic_Clothing|0": 0, "community|acva:Arabic_Culture|0": 0, "community|acva:Arabic_Food|0": 0, "community|acva:Arabic_Funeral|0": 0, "community|acva:Arabic_Geography|0": 0, "community|acva:Arabic_History|0": 0, "community|acva:Arabic_Language_Origin|0": 0, "community|acva:Arabic_Literature|0": 0, "community|acva:Arabic_Math|0": 0, "community|acva:Arabic_Medicine|0": 0, "community|acva:Arabic_Music|0": 0, "community|acva:Arabic_Ornament|0": 0, "community|acva:Arabic_Philosophy|0": 0, "community|acva:Arabic_Physics_and_Chemistry|0": 0, "community|acva:Arabic_Wedding|0": 0, "community|acva:Bahrain|0": 0, "community|acva:Comoros|0": 0, "community|acva:Egypt_modern|0": 0, "community|acva:InfluenceFromAncientEgypt|0": 0, "community|acva:InfluenceFromByzantium|0": 0, "community|acva:InfluenceFromChina|0": 0, "community|acva:InfluenceFromGreece|0": 0, "community|acva:InfluenceFromIslam|0": 0, "community|acva:InfluenceFromPersia|0": 0, "community|acva:InfluenceFromRome|0": 0, "community|acva:Iraq|0": 0, "community|acva:Islam_Education|0": 0, "community|acva:Islam_branches_and_schools|0": 0, "community|acva:Islamic_law_system|0": 0, "community|acva:Jordan|0": 0, "community|acva:Kuwait|0": 0, "community|acva:Lebanon|0": 0, "community|acva:Libya|0": 0, "community|acva:Mauritania|0": 0, "community|acva:Mesopotamia_civilization|0": 0, "community|acva:Morocco|0": 0, "community|acva:Oman|0": 0, "community|acva:Palestine|0": 0, "community|acva:Qatar|0": 0, "community|acva:Saudi_Arabia|0": 0, "community|acva:Somalia|0": 0, "community|acva:Sudan|0": 0, "community|acva:Syria|0": 0, "community|acva:Tunisia|0": 0, "community|acva:United_Arab_Emirates|0": 0, "community|acva:Yemen|0": 0, "community|acva:communication|0": 0, "community|acva:computer_and_phone|0": 0, "community|acva:daily_life|0": 0, "community|acva:entertainment|0": 0, "community|alghafa:mcq_exams_test_ar|0": 0, "community|alghafa:meta_ar_dialects|0": 0, "community|alghafa:meta_ar_msa|0": 0, "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, "community|alghafa:multiple_choice_rating_sentiment_task|0": 0, "community|alghafa:multiple_choice_sentiment_task|0": 0, "community|arabic_exams|0": 0, "community|arabic_mmlu:abstract_algebra|0": 0, "community|arabic_mmlu:anatomy|0": 0, "community|arabic_mmlu:astronomy|0": 0, "community|arabic_mmlu:business_ethics|0": 0, "community|arabic_mmlu:clinical_knowledge|0": 0, "community|arabic_mmlu:college_biology|0": 0, "community|arabic_mmlu:college_chemistry|0": 0, "community|arabic_mmlu:college_computer_science|0": 0, "community|arabic_mmlu:college_mathematics|0": 0, "community|arabic_mmlu:college_medicine|0": 0, "community|arabic_mmlu:college_physics|0": 0, "community|arabic_mmlu:computer_security|0": 0, "community|arabic_mmlu:conceptual_physics|0": 0, "community|arabic_mmlu:econometrics|0": 0, "community|arabic_mmlu:electrical_engineering|0": 0, "community|arabic_mmlu:elementary_mathematics|0": 0, "community|arabic_mmlu:formal_logic|0": 0, "community|arabic_mmlu:global_facts|0": 0, "community|arabic_mmlu:high_school_biology|0": 0, "community|arabic_mmlu:high_school_chemistry|0": 0, "community|arabic_mmlu:high_school_computer_science|0": 0, "community|arabic_mmlu:high_school_european_history|0": 0, "community|arabic_mmlu:high_school_geography|0": 0, "community|arabic_mmlu:high_school_government_and_politics|0": 0, "community|arabic_mmlu:high_school_macroeconomics|0": 0, "community|arabic_mmlu:high_school_mathematics|0": 0, "community|arabic_mmlu:high_school_microeconomics|0": 0, "community|arabic_mmlu:high_school_physics|0": 0, "community|arabic_mmlu:high_school_psychology|0": 0, "community|arabic_mmlu:high_school_statistics|0": 0, "community|arabic_mmlu:high_school_us_history|0": 0, "community|arabic_mmlu:high_school_world_history|0": 0, "community|arabic_mmlu:human_aging|0": 0, "community|arabic_mmlu:human_sexuality|0": 0, "community|arabic_mmlu:international_law|0": 0, "community|arabic_mmlu:jurisprudence|0": 0, "community|arabic_mmlu:logical_fallacies|0": 0, "community|arabic_mmlu:machine_learning|0": 0, "community|arabic_mmlu:management|0": 0, "community|arabic_mmlu:marketing|0": 0, "community|arabic_mmlu:medical_genetics|0": 0, "community|arabic_mmlu:miscellaneous|0": 0, "community|arabic_mmlu:moral_disputes|0": 0, "community|arabic_mmlu:moral_scenarios|0": 0, "community|arabic_mmlu:nutrition|0": 0, "community|arabic_mmlu:philosophy|0": 0, "community|arabic_mmlu:prehistory|0": 0, "community|arabic_mmlu:professional_accounting|0": 0, "community|arabic_mmlu:professional_law|0": 0, "community|arabic_mmlu:professional_medicine|0": 0, "community|arabic_mmlu:professional_psychology|0": 0, "community|arabic_mmlu:public_relations|0": 0, "community|arabic_mmlu:security_studies|0": 0, "community|arabic_mmlu:sociology|0": 0, "community|arabic_mmlu:us_foreign_policy|0": 0, "community|arabic_mmlu:virology|0": 0, "community|arabic_mmlu:world_religions|0": 0, "community|arc_challenge_okapi_ar|0": 0, "community|arc_easy_ar|0": 0, "community|boolq_ar|0": 0, "community|copa_ext_ar|0": 0, "community|hellaswag_okapi_ar|0": 0, "community|openbook_qa_ext_ar|0": 0, "community|piqa_ar|0": 0, "community|race_ar|0": 0, "community|sciq_ar|0": 0, "community|toxigen_ar|0": 0, "lighteval|xstory_cloze:ar|0": 0 }
{ "community|acva:Algeria": { "name": "acva:Algeria", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Algeria", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Ancient_Egypt": { "name": "acva:Ancient_Egypt", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Ancient_Egypt", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 315, "effective_num_docs": 315, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arab_Empire": { "name": "acva:Arab_Empire", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arab_Empire", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 265, "effective_num_docs": 265, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Architecture": { "name": "acva:Arabic_Architecture", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Architecture", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Art": { "name": "acva:Arabic_Art", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Art", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Astronomy": { "name": "acva:Arabic_Astronomy", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Astronomy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Calligraphy": { "name": "acva:Arabic_Calligraphy", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Calligraphy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 255, "effective_num_docs": 255, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Ceremony": { "name": "acva:Arabic_Ceremony", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Ceremony", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 185, "effective_num_docs": 185, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Clothing": { "name": "acva:Arabic_Clothing", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Clothing", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Culture": { "name": "acva:Arabic_Culture", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Culture", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Food": { "name": "acva:Arabic_Food", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Food", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Funeral": { "name": "acva:Arabic_Funeral", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Funeral", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 95, "effective_num_docs": 95, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Geography": { "name": "acva:Arabic_Geography", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Geography", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_History": { "name": "acva:Arabic_History", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_History", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Language_Origin": { "name": "acva:Arabic_Language_Origin", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Language_Origin", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 95, "effective_num_docs": 95, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Literature": { "name": "acva:Arabic_Literature", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Literature", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Math": { "name": "acva:Arabic_Math", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Math", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Medicine": { "name": "acva:Arabic_Medicine", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Medicine", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Music": { "name": "acva:Arabic_Music", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Music", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 139, "effective_num_docs": 139, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Ornament": { "name": "acva:Arabic_Ornament", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Ornament", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Philosophy": { "name": "acva:Arabic_Philosophy", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Philosophy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Physics_and_Chemistry": { "name": "acva:Arabic_Physics_and_Chemistry", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Physics_and_Chemistry", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Wedding": { "name": "acva:Arabic_Wedding", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Wedding", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Bahrain": { "name": "acva:Bahrain", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Bahrain", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Comoros": { "name": "acva:Comoros", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Comoros", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Egypt_modern": { "name": "acva:Egypt_modern", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Egypt_modern", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 95, "effective_num_docs": 95, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromAncientEgypt": { "name": "acva:InfluenceFromAncientEgypt", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromAncientEgypt", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromByzantium": { "name": "acva:InfluenceFromByzantium", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromByzantium", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromChina": { "name": "acva:InfluenceFromChina", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromChina", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromGreece": { "name": "acva:InfluenceFromGreece", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromGreece", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromIslam": { "name": "acva:InfluenceFromIslam", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromIslam", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromPersia": { "name": "acva:InfluenceFromPersia", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromPersia", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 175, "effective_num_docs": 175, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromRome": { "name": "acva:InfluenceFromRome", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromRome", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Iraq": { "name": "acva:Iraq", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Iraq", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 85, "effective_num_docs": 85, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Islam_Education": { "name": "acva:Islam_Education", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Islam_Education", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Islam_branches_and_schools": { "name": "acva:Islam_branches_and_schools", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Islam_branches_and_schools", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 175, "effective_num_docs": 175, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Islamic_law_system": { "name": "acva:Islamic_law_system", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Islamic_law_system", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Jordan": { "name": "acva:Jordan", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Jordan", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Kuwait": { "name": "acva:Kuwait", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Kuwait", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Lebanon": { "name": "acva:Lebanon", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Lebanon", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Libya": { "name": "acva:Libya", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Libya", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Mauritania": { "name": "acva:Mauritania", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Mauritania", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Mesopotamia_civilization": { "name": "acva:Mesopotamia_civilization", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Mesopotamia_civilization", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 155, "effective_num_docs": 155, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Morocco": { "name": "acva:Morocco", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Morocco", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Oman": { "name": "acva:Oman", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Oman", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Palestine": { "name": "acva:Palestine", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Palestine", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 85, "effective_num_docs": 85, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Qatar": { "name": "acva:Qatar", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Qatar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Saudi_Arabia": { "name": "acva:Saudi_Arabia", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Saudi_Arabia", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Somalia": { "name": "acva:Somalia", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Somalia", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Sudan": { "name": "acva:Sudan", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Sudan", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Syria": { "name": "acva:Syria", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Syria", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Tunisia": { "name": "acva:Tunisia", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Tunisia", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:United_Arab_Emirates": { "name": "acva:United_Arab_Emirates", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "United_Arab_Emirates", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 85, "effective_num_docs": 85, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Yemen": { "name": "acva:Yemen", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Yemen", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 10, "effective_num_docs": 10, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:communication": { "name": "acva:communication", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "communication", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 364, "effective_num_docs": 364, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:computer_and_phone": { "name": "acva:computer_and_phone", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "computer_and_phone", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 295, "effective_num_docs": 295, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:daily_life": { "name": "acva:daily_life", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "daily_life", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 337, "effective_num_docs": 337, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:entertainment": { "name": "acva:entertainment", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "entertainment", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 295, "effective_num_docs": 295, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:mcq_exams_test_ar": { "name": "alghafa:mcq_exams_test_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "mcq_exams_test_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 557, "effective_num_docs": 557, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:meta_ar_dialects": { "name": "alghafa:meta_ar_dialects", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "meta_ar_dialects", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 5395, "effective_num_docs": 5395, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:meta_ar_msa": { "name": "alghafa:meta_ar_msa", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "meta_ar_msa", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 895, "effective_num_docs": 895, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_facts_truefalse_balanced_task": { "name": "alghafa:multiple_choice_facts_truefalse_balanced_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_facts_truefalse_balanced_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 75, "effective_num_docs": 75, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_grounded_statement_soqal_task": { "name": "alghafa:multiple_choice_grounded_statement_soqal_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_grounded_statement_soqal_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 150, "effective_num_docs": 150, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { "name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 150, "effective_num_docs": 150, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { "name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 7995, "effective_num_docs": 7995, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_rating_sentiment_task": { "name": "alghafa:multiple_choice_rating_sentiment_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_rating_sentiment_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 5995, "effective_num_docs": 5995, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_sentiment_task": { "name": "alghafa:multiple_choice_sentiment_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_sentiment_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 1720, "effective_num_docs": 1720, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_exams": { "name": "arabic_exams", "prompt_function": "arabic_exams", "hf_repo": "OALL/Arabic_EXAMS", "hf_subset": "default", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 537, "effective_num_docs": 537, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:abstract_algebra": { "name": "arabic_mmlu:abstract_algebra", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "abstract_algebra", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:anatomy": { "name": "arabic_mmlu:anatomy", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "anatomy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 135, "effective_num_docs": 135, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:astronomy": { "name": "arabic_mmlu:astronomy", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "astronomy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 152, "effective_num_docs": 152, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:business_ethics": { "name": "arabic_mmlu:business_ethics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "business_ethics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:clinical_knowledge": { "name": "arabic_mmlu:clinical_knowledge", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "clinical_knowledge", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 265, "effective_num_docs": 265, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_biology": { "name": "arabic_mmlu:college_biology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_biology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 144, "effective_num_docs": 144, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_chemistry": { "name": "arabic_mmlu:college_chemistry", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_chemistry", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_computer_science": { "name": "arabic_mmlu:college_computer_science", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_computer_science", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_mathematics": { "name": "arabic_mmlu:college_mathematics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_mathematics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_medicine": { "name": "arabic_mmlu:college_medicine", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_medicine", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 173, "effective_num_docs": 173, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_physics": { "name": "arabic_mmlu:college_physics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_physics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 102, "effective_num_docs": 102, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:computer_security": { "name": "arabic_mmlu:computer_security", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "computer_security", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:conceptual_physics": { "name": "arabic_mmlu:conceptual_physics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "conceptual_physics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 235, "effective_num_docs": 235, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:econometrics": { "name": "arabic_mmlu:econometrics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "econometrics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 114, "effective_num_docs": 114, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:electrical_engineering": { "name": "arabic_mmlu:electrical_engineering", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "electrical_engineering", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:elementary_mathematics": { "name": "arabic_mmlu:elementary_mathematics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "elementary_mathematics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 378, "effective_num_docs": 378, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:formal_logic": { "name": "arabic_mmlu:formal_logic", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "formal_logic", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 126, "effective_num_docs": 126, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:global_facts": { "name": "arabic_mmlu:global_facts", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "global_facts", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_biology": { "name": "arabic_mmlu:high_school_biology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_biology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 310, "effective_num_docs": 310, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_chemistry": { "name": "arabic_mmlu:high_school_chemistry", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_chemistry", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 203, "effective_num_docs": 203, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_computer_science": { "name": "arabic_mmlu:high_school_computer_science", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_computer_science", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_european_history": { "name": "arabic_mmlu:high_school_european_history", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_european_history", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 165, "effective_num_docs": 165, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_geography": { "name": "arabic_mmlu:high_school_geography", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_geography", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 198, "effective_num_docs": 198, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_government_and_politics": { "name": "arabic_mmlu:high_school_government_and_politics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_government_and_politics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 193, "effective_num_docs": 193, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_macroeconomics": { "name": "arabic_mmlu:high_school_macroeconomics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_macroeconomics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 390, "effective_num_docs": 390, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_mathematics": { "name": "arabic_mmlu:high_school_mathematics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_mathematics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 270, "effective_num_docs": 270, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_microeconomics": { "name": "arabic_mmlu:high_school_microeconomics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_microeconomics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 238, "effective_num_docs": 238, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_physics": { "name": "arabic_mmlu:high_school_physics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_physics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 151, "effective_num_docs": 151, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_psychology": { "name": "arabic_mmlu:high_school_psychology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_psychology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 545, "effective_num_docs": 545, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_statistics": { "name": "arabic_mmlu:high_school_statistics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_statistics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 216, "effective_num_docs": 216, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_us_history": { "name": "arabic_mmlu:high_school_us_history", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_us_history", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 204, "effective_num_docs": 204, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_world_history": { "name": "arabic_mmlu:high_school_world_history", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_world_history", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 237, "effective_num_docs": 237, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:human_aging": { "name": "arabic_mmlu:human_aging", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "human_aging", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 223, "effective_num_docs": 223, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:human_sexuality": { "name": "arabic_mmlu:human_sexuality", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "human_sexuality", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 131, "effective_num_docs": 131, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:international_law": { "name": "arabic_mmlu:international_law", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "international_law", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 121, "effective_num_docs": 121, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:jurisprudence": { "name": "arabic_mmlu:jurisprudence", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "jurisprudence", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 108, "effective_num_docs": 108, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:logical_fallacies": { "name": "arabic_mmlu:logical_fallacies", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "logical_fallacies", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 163, "effective_num_docs": 163, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:machine_learning": { "name": "arabic_mmlu:machine_learning", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "machine_learning", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 112, "effective_num_docs": 112, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:management": { "name": "arabic_mmlu:management", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "management", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 103, "effective_num_docs": 103, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:marketing": { "name": "arabic_mmlu:marketing", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "marketing", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 234, "effective_num_docs": 234, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:medical_genetics": { "name": "arabic_mmlu:medical_genetics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "medical_genetics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:miscellaneous": { "name": "arabic_mmlu:miscellaneous", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "miscellaneous", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 783, "effective_num_docs": 783, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:moral_disputes": { "name": "arabic_mmlu:moral_disputes", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "moral_disputes", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 346, "effective_num_docs": 346, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:moral_scenarios": { "name": "arabic_mmlu:moral_scenarios", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "moral_scenarios", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 895, "effective_num_docs": 895, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:nutrition": { "name": "arabic_mmlu:nutrition", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "nutrition", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 306, "effective_num_docs": 306, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:philosophy": { "name": "arabic_mmlu:philosophy", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "philosophy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 311, "effective_num_docs": 311, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:prehistory": { "name": "arabic_mmlu:prehistory", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "prehistory", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 324, "effective_num_docs": 324, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:professional_accounting": { "name": "arabic_mmlu:professional_accounting", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "professional_accounting", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 282, "effective_num_docs": 282, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:professional_law": { "name": "arabic_mmlu:professional_law", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "professional_law", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 1534, "effective_num_docs": 1534, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:professional_medicine": { "name": "arabic_mmlu:professional_medicine", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "professional_medicine", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 272, "effective_num_docs": 272, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:professional_psychology": { "name": "arabic_mmlu:professional_psychology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "professional_psychology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 612, "effective_num_docs": 612, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:public_relations": { "name": "arabic_mmlu:public_relations", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "public_relations", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 110, "effective_num_docs": 110, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:security_studies": { "name": "arabic_mmlu:security_studies", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "security_studies", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 245, "effective_num_docs": 245, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:sociology": { "name": "arabic_mmlu:sociology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "sociology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 201, "effective_num_docs": 201, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:us_foreign_policy": { "name": "arabic_mmlu:us_foreign_policy", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "us_foreign_policy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:virology": { "name": "arabic_mmlu:virology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "virology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 166, "effective_num_docs": 166, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:world_religions": { "name": "arabic_mmlu:world_religions", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "world_religions", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 171, "effective_num_docs": 171, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arc_challenge_okapi_ar": { "name": "arc_challenge_okapi_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "arc_challenge_okapi_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 1160, "effective_num_docs": 1160, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arc_easy_ar": { "name": "arc_easy_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "arc_easy_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 2364, "effective_num_docs": 2364, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|boolq_ar": { "name": "boolq_ar", "prompt_function": "boolq_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "boolq_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 3260, "effective_num_docs": 3260, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|copa_ext_ar": { "name": "copa_ext_ar", "prompt_function": "copa_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "copa_ext_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 90, "effective_num_docs": 90, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|hellaswag_okapi_ar": { "name": "hellaswag_okapi_ar", "prompt_function": "hellaswag_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "hellaswag_okapi_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 9171, "effective_num_docs": 9171, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|openbook_qa_ext_ar": { "name": "openbook_qa_ext_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "openbook_qa_ext_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 495, "effective_num_docs": 495, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|piqa_ar": { "name": "piqa_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "piqa_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 1833, "effective_num_docs": 1833, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|race_ar": { "name": "race_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "race_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 4929, "effective_num_docs": 4929, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|sciq_ar": { "name": "sciq_ar", "prompt_function": "sciq_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "sciq_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 995, "effective_num_docs": 995, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|toxigen_ar": { "name": "toxigen_ar", "prompt_function": "toxigen_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "toxigen_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 935, "effective_num_docs": 935, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "lighteval|xstory_cloze:ar": { "name": "xstory_cloze:ar", "prompt_function": "storycloze", "hf_repo": "juletxara/xstory_cloze", "hf_subset": "ar", "metric": [ "loglikelihood_acc" ], "hf_avail_splits": [ "training", "eval" ], "evaluation_splits": [ "eval" ], "few_shots_split": null, "few_shots_select": null, "generation_size": -1, "stop_sequence": [ "\n" ], "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "lighteval" ], "original_num_docs": 1511, "effective_num_docs": 1511, "trust_dataset": true, "must_remove_duplicate_docs": null, "version": 0 } }
{ "community|acva:Algeria|0": { "hashes": { "hash_examples": "da5a3003cd46f6f9", "hash_full_prompts": "da5a3003cd46f6f9", "hash_input_tokens": "e5fc3dac42dfee72", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Ancient_Egypt|0": { "hashes": { "hash_examples": "52d6f767fede195b", "hash_full_prompts": "52d6f767fede195b", "hash_input_tokens": "f081df2e883d3501", "hash_cont_tokens": "02a204d955f29ed4" }, "truncated": 0, "non_truncated": 315, "padded": 630, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arab_Empire|0": { "hashes": { "hash_examples": "8dacff6a79804a75", "hash_full_prompts": "8dacff6a79804a75", "hash_input_tokens": "e49cd4090dad143b", "hash_cont_tokens": "0be121aeaa740bc8" }, "truncated": 0, "non_truncated": 265, "padded": 530, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Architecture|0": { "hashes": { "hash_examples": "df286cd862d9f6bb", "hash_full_prompts": "df286cd862d9f6bb", "hash_input_tokens": "3b7211c0a255b1c0", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Art|0": { "hashes": { "hash_examples": "112883d764118a49", "hash_full_prompts": "112883d764118a49", "hash_input_tokens": "917ed5049127fed5", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Astronomy|0": { "hashes": { "hash_examples": "20dcdf2454bf8671", "hash_full_prompts": "20dcdf2454bf8671", "hash_input_tokens": "97f6f79b2dac0efc", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Calligraphy|0": { "hashes": { "hash_examples": "3a9f9d1ebe868a15", "hash_full_prompts": "3a9f9d1ebe868a15", "hash_input_tokens": "2fad42b484dc59aa", "hash_cont_tokens": "3a362560f15a8d81" }, "truncated": 0, "non_truncated": 255, "padded": 510, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Ceremony|0": { "hashes": { "hash_examples": "c927630f8d2f44da", "hash_full_prompts": "c927630f8d2f44da", "hash_input_tokens": "9798eb01d07c3556", "hash_cont_tokens": "219de3ed588d7bf7" }, "truncated": 0, "non_truncated": 185, "padded": 370, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Clothing|0": { "hashes": { "hash_examples": "6ad0740c2ac6ac92", "hash_full_prompts": "6ad0740c2ac6ac92", "hash_input_tokens": "18fedb803dfd2f04", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Culture|0": { "hashes": { "hash_examples": "2177bd857ad872ae", "hash_full_prompts": "2177bd857ad872ae", "hash_input_tokens": "a6a59b42464c5837", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Food|0": { "hashes": { "hash_examples": "a6ada65b71d7c9c5", "hash_full_prompts": "a6ada65b71d7c9c5", "hash_input_tokens": "614343393975f735", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Funeral|0": { "hashes": { "hash_examples": "fcee39dc29eaae91", "hash_full_prompts": "fcee39dc29eaae91", "hash_input_tokens": "104ed3483963acca", "hash_cont_tokens": "c36c7371f1293511" }, "truncated": 0, "non_truncated": 95, "padded": 190, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Geography|0": { "hashes": { "hash_examples": "d36eda7c89231c02", "hash_full_prompts": "d36eda7c89231c02", "hash_input_tokens": "777d2a562f29e686", "hash_cont_tokens": "625e58e7a01dba13" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_History|0": { "hashes": { "hash_examples": "6354ac0d6db6a5fc", "hash_full_prompts": "6354ac0d6db6a5fc", "hash_input_tokens": "9c2057e05dabdf89", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Language_Origin|0": { "hashes": { "hash_examples": "ddc967c8aca34402", "hash_full_prompts": "ddc967c8aca34402", "hash_input_tokens": "a96a67eb4d720f13", "hash_cont_tokens": "c36c7371f1293511" }, "truncated": 0, "non_truncated": 95, "padded": 190, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Literature|0": { "hashes": { "hash_examples": "4305379fd46be5d8", "hash_full_prompts": "4305379fd46be5d8", "hash_input_tokens": "f26e61f50f0bd444", "hash_cont_tokens": "625e58e7a01dba13" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Math|0": { "hashes": { "hash_examples": "dec621144f4d28be", "hash_full_prompts": "dec621144f4d28be", "hash_input_tokens": "552e82b34d3ef11d", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Medicine|0": { "hashes": { "hash_examples": "2b344cdae9495ff2", "hash_full_prompts": "2b344cdae9495ff2", "hash_input_tokens": "93ce0692f579e950", "hash_cont_tokens": "625e58e7a01dba13" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Music|0": { "hashes": { "hash_examples": "0c54624d881944ce", "hash_full_prompts": "0c54624d881944ce", "hash_input_tokens": "ec71ed53e3bc6fab", "hash_cont_tokens": "4ac287553cdf8021" }, "truncated": 0, "non_truncated": 139, "padded": 278, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Ornament|0": { "hashes": { "hash_examples": "251a4a84289d8bc1", "hash_full_prompts": "251a4a84289d8bc1", "hash_input_tokens": "c77ed1df992f23ce", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Philosophy|0": { "hashes": { "hash_examples": "3f86fb9c94c13d22", "hash_full_prompts": "3f86fb9c94c13d22", "hash_input_tokens": "8f7be7cf36d086ec", "hash_cont_tokens": "625e58e7a01dba13" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Physics_and_Chemistry|0": { "hashes": { "hash_examples": "8fec65af3695b62a", "hash_full_prompts": "8fec65af3695b62a", "hash_input_tokens": "4598dfecde892b4a", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Wedding|0": { "hashes": { "hash_examples": "9cc3477184d7a4b8", "hash_full_prompts": "9cc3477184d7a4b8", "hash_input_tokens": "6a2e782fd3e14f2d", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Bahrain|0": { "hashes": { "hash_examples": "c92e803a0fa8b9e2", "hash_full_prompts": "c92e803a0fa8b9e2", "hash_input_tokens": "8cefee797012c64a", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Comoros|0": { "hashes": { "hash_examples": "06e5d4bba8e54cae", "hash_full_prompts": "06e5d4bba8e54cae", "hash_input_tokens": "88b4ecbe7a031dab", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Egypt_modern|0": { "hashes": { "hash_examples": "c6ec369164f93446", "hash_full_prompts": "c6ec369164f93446", "hash_input_tokens": "019a315de60f65ca", "hash_cont_tokens": "c36c7371f1293511" }, "truncated": 0, "non_truncated": 95, "padded": 190, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromAncientEgypt|0": { "hashes": { "hash_examples": "b9d56d74818b9bd4", "hash_full_prompts": "b9d56d74818b9bd4", "hash_input_tokens": "ff0c4477330411dc", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromByzantium|0": { "hashes": { "hash_examples": "5316c9624e7e59b8", "hash_full_prompts": "5316c9624e7e59b8", "hash_input_tokens": "1b910a4656773c97", "hash_cont_tokens": "625e58e7a01dba13" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromChina|0": { "hashes": { "hash_examples": "87894bce95a56411", "hash_full_prompts": "87894bce95a56411", "hash_input_tokens": "9b007c9c10fc8c4f", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromGreece|0": { "hashes": { "hash_examples": "0baa78a27e469312", "hash_full_prompts": "0baa78a27e469312", "hash_input_tokens": "cf3925b9e583eef1", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromIslam|0": { "hashes": { "hash_examples": "0c2532cde6541ff2", "hash_full_prompts": "0c2532cde6541ff2", "hash_input_tokens": "340ae221253932a4", "hash_cont_tokens": "625e58e7a01dba13" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromPersia|0": { "hashes": { "hash_examples": "efcd8112dc53c6e5", "hash_full_prompts": "efcd8112dc53c6e5", "hash_input_tokens": "90cbe499e5227cd8", "hash_cont_tokens": "0060d8f35205c778" }, "truncated": 0, "non_truncated": 175, "padded": 350, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromRome|0": { "hashes": { "hash_examples": "9db61480e2e85fd3", "hash_full_prompts": "9db61480e2e85fd3", "hash_input_tokens": "8e18c6eeaa6a5a68", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Iraq|0": { "hashes": { "hash_examples": "96dac3dfa8d2f41f", "hash_full_prompts": "96dac3dfa8d2f41f", "hash_input_tokens": "c8dacea3e8584b51", "hash_cont_tokens": "174ee430e070c2fa" }, "truncated": 0, "non_truncated": 85, "padded": 170, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Islam_Education|0": { "hashes": { "hash_examples": "0d80355f6a4cb51b", "hash_full_prompts": "0d80355f6a4cb51b", "hash_input_tokens": "39195312d4cbcdf9", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Islam_branches_and_schools|0": { "hashes": { "hash_examples": "5cedce1be2c3ad50", "hash_full_prompts": "5cedce1be2c3ad50", "hash_input_tokens": "71c8ffecf01f9a65", "hash_cont_tokens": "0060d8f35205c778" }, "truncated": 0, "non_truncated": 175, "padded": 350, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Islamic_law_system|0": { "hashes": { "hash_examples": "c0e6db8bc84e105e", "hash_full_prompts": "c0e6db8bc84e105e", "hash_input_tokens": "a7be3511d40b8730", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Jordan|0": { "hashes": { "hash_examples": "33deb5b4e5ddd6a1", "hash_full_prompts": "33deb5b4e5ddd6a1", "hash_input_tokens": "baba1b519e66d805", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Kuwait|0": { "hashes": { "hash_examples": "eb41773346d7c46c", "hash_full_prompts": "eb41773346d7c46c", "hash_input_tokens": "47abf2c5a502136f", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Lebanon|0": { "hashes": { "hash_examples": "25932dbf4c13d34f", "hash_full_prompts": "25932dbf4c13d34f", "hash_input_tokens": "806467ce0e7e49c6", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Libya|0": { "hashes": { "hash_examples": "f2c4db63cd402926", "hash_full_prompts": "f2c4db63cd402926", "hash_input_tokens": "23c9ca0b184d1265", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Mauritania|0": { "hashes": { "hash_examples": "8723ab5fdf286b54", "hash_full_prompts": "8723ab5fdf286b54", "hash_input_tokens": "a220b5302a4477a3", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Mesopotamia_civilization|0": { "hashes": { "hash_examples": "c33f5502a6130ca9", "hash_full_prompts": "c33f5502a6130ca9", "hash_input_tokens": "a9c1105c3f7780a1", "hash_cont_tokens": "ac62599297c498fd" }, "truncated": 0, "non_truncated": 155, "padded": 310, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Morocco|0": { "hashes": { "hash_examples": "588a5ed27904b1ae", "hash_full_prompts": "588a5ed27904b1ae", "hash_input_tokens": "4f45ccd38df6350f", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Oman|0": { "hashes": { "hash_examples": "d447c52b94248b69", "hash_full_prompts": "d447c52b94248b69", "hash_input_tokens": "69ba916374801e59", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Palestine|0": { "hashes": { "hash_examples": "19197e076ad14ff5", "hash_full_prompts": "19197e076ad14ff5", "hash_input_tokens": "77b136de826313a6", "hash_cont_tokens": "174ee430e070c2fa" }, "truncated": 0, "non_truncated": 85, "padded": 170, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Qatar|0": { "hashes": { "hash_examples": "cf0736fa185b28f6", "hash_full_prompts": "cf0736fa185b28f6", "hash_input_tokens": "2b658a817eee80ad", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Saudi_Arabia|0": { "hashes": { "hash_examples": "69beda6e1b85a08d", "hash_full_prompts": "69beda6e1b85a08d", "hash_input_tokens": "4a1a426d70693e9b", "hash_cont_tokens": "ebddcaf492db5bb8" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Somalia|0": { "hashes": { "hash_examples": "b387940c65784fbf", "hash_full_prompts": "b387940c65784fbf", "hash_input_tokens": "d083c83b69b71bff", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Sudan|0": { "hashes": { "hash_examples": "e02c32b9d2dd0c3f", "hash_full_prompts": "e02c32b9d2dd0c3f", "hash_input_tokens": "0f255f2a0fb8c129", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Syria|0": { "hashes": { "hash_examples": "60a6f8fe73bda4bb", "hash_full_prompts": "60a6f8fe73bda4bb", "hash_input_tokens": "939d04f35ee00e35", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Tunisia|0": { "hashes": { "hash_examples": "34bb15d3830c5649", "hash_full_prompts": "34bb15d3830c5649", "hash_input_tokens": "7e37207612b9765b", "hash_cont_tokens": "9ad7f58ff8a11e98" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:United_Arab_Emirates|0": { "hashes": { "hash_examples": "98a0ba78172718ce", "hash_full_prompts": "98a0ba78172718ce", "hash_input_tokens": "335e6dbf7b60511d", "hash_cont_tokens": "174ee430e070c2fa" }, "truncated": 0, "non_truncated": 85, "padded": 170, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:Yemen|0": { "hashes": { "hash_examples": "18e9bcccbb4ced7a", "hash_full_prompts": "18e9bcccbb4ced7a", "hash_input_tokens": "42bd1ebf201ab4bd", "hash_cont_tokens": "96702f2356f6107c" }, "truncated": 0, "non_truncated": 10, "padded": 20, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:communication|0": { "hashes": { "hash_examples": "9ff28ab5eab5c97b", "hash_full_prompts": "9ff28ab5eab5c97b", "hash_input_tokens": "cde9900484637577", "hash_cont_tokens": "b628a89bcecf356d" }, "truncated": 0, "non_truncated": 364, "padded": 728, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:computer_and_phone|0": { "hashes": { "hash_examples": "37bac2f086aaf6c2", "hash_full_prompts": "37bac2f086aaf6c2", "hash_input_tokens": "3237cdf9e89d962b", "hash_cont_tokens": "d8ae57c62ca85f4b" }, "truncated": 0, "non_truncated": 295, "padded": 590, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:daily_life|0": { "hashes": { "hash_examples": "bf07363c1c252e2f", "hash_full_prompts": "bf07363c1c252e2f", "hash_input_tokens": "e09edb9a10702466", "hash_cont_tokens": "d598caf874354f48" }, "truncated": 0, "non_truncated": 337, "padded": 674, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|acva:entertainment|0": { "hashes": { "hash_examples": "37077bc00f0ac56a", "hash_full_prompts": "37077bc00f0ac56a", "hash_input_tokens": "259507eedd195806", "hash_cont_tokens": "d8ae57c62ca85f4b" }, "truncated": 0, "non_truncated": 295, "padded": 590, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:mcq_exams_test_ar|0": { "hashes": { "hash_examples": "c07a5e78c5c0b8fe", "hash_full_prompts": "c07a5e78c5c0b8fe", "hash_input_tokens": "d0e658e1d3f525ca", "hash_cont_tokens": "478fa268e1fd1e4d" }, "truncated": 0, "non_truncated": 557, "padded": 2228, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:meta_ar_dialects|0": { "hashes": { "hash_examples": "c0b6081f83e14064", "hash_full_prompts": "c0b6081f83e14064", "hash_input_tokens": "eba73771657157ae", "hash_cont_tokens": "3dea80a1ee9dc316" }, "truncated": 0, "non_truncated": 5395, "padded": 21451, "non_padded": 129, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:meta_ar_msa|0": { "hashes": { "hash_examples": "64eb78a7c5b7484b", "hash_full_prompts": "64eb78a7c5b7484b", "hash_input_tokens": "794aecc1fbbe0793", "hash_cont_tokens": "b7b27059aa46f9f5" }, "truncated": 0, "non_truncated": 895, "padded": 3554, "non_padded": 26, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { "hashes": { "hash_examples": "54fc3502c1c02c06", "hash_full_prompts": "54fc3502c1c02c06", "hash_input_tokens": "3f3ed402e71042a8", "hash_cont_tokens": "30ae320a6284bd96" }, "truncated": 0, "non_truncated": 75, "padded": 150, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { "hashes": { "hash_examples": "46572d83696552ae", "hash_full_prompts": "46572d83696552ae", "hash_input_tokens": "27b35507743dcffa", "hash_cont_tokens": "2e89ca40d66b31a0" }, "truncated": 0, "non_truncated": 150, "padded": 743, "non_padded": 7, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { "hashes": { "hash_examples": "f430d97ff715bc1c", "hash_full_prompts": "f430d97ff715bc1c", "hash_input_tokens": "e9b6514c4ccbb61a", "hash_cont_tokens": "6e7f62230276d03d" }, "truncated": 0, "non_truncated": 150, "padded": 746, "non_padded": 4, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { "hashes": { "hash_examples": "6b70a7416584f98c", "hash_full_prompts": "6b70a7416584f98c", "hash_input_tokens": "3bae7e59f2386e88", "hash_cont_tokens": "cd589f2d0662aca9" }, "truncated": 0, "non_truncated": 7995, "padded": 15990, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_rating_sentiment_task|0": { "hashes": { "hash_examples": "bc2005cc9d2f436e", "hash_full_prompts": "bc2005cc9d2f436e", "hash_input_tokens": "1615e207001a79c4", "hash_cont_tokens": "43c11b648549da66" }, "truncated": 0, "non_truncated": 5995, "padded": 17843, "non_padded": 142, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_sentiment_task|0": { "hashes": { "hash_examples": "6fb0e254ea5945d8", "hash_full_prompts": "6fb0e254ea5945d8", "hash_input_tokens": "6dc4e300f2ff1bfe", "hash_cont_tokens": "040bf50c610943db" }, "truncated": 0, "non_truncated": 1720, "padded": 5061, "non_padded": 99, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_exams|0": { "hashes": { "hash_examples": "6d721df351722656", "hash_full_prompts": "6d721df351722656", "hash_input_tokens": "aca56dc046703947", "hash_cont_tokens": "f6dc9d4abb83a50a" }, "truncated": 0, "non_truncated": 537, "padded": 2116, "non_padded": 32, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:abstract_algebra|0": { "hashes": { "hash_examples": "f2ddca8f45c0a511", "hash_full_prompts": "f2ddca8f45c0a511", "hash_input_tokens": "d7042e2e14520155", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:anatomy|0": { "hashes": { "hash_examples": "dfdbc1b83107668d", "hash_full_prompts": "dfdbc1b83107668d", "hash_input_tokens": "e20f6796c6d5fb1f", "hash_cont_tokens": "b5c1c612518185a5" }, "truncated": 0, "non_truncated": 135, "padded": 532, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:astronomy|0": { "hashes": { "hash_examples": "9736a606002a848e", "hash_full_prompts": "9736a606002a848e", "hash_input_tokens": "8da405e4905c75a2", "hash_cont_tokens": "9a91066ba51b2074" }, "truncated": 0, "non_truncated": 152, "padded": 608, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:business_ethics|0": { "hashes": { "hash_examples": "735e452fbb6dc63d", "hash_full_prompts": "735e452fbb6dc63d", "hash_input_tokens": "2db4839c0303775f", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 396, "non_padded": 4, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:clinical_knowledge|0": { "hashes": { "hash_examples": "6ab0ca4da98aedcf", "hash_full_prompts": "6ab0ca4da98aedcf", "hash_input_tokens": "94cfbbf9f11b0b2a", "hash_cont_tokens": "4a838d5fa832139f" }, "truncated": 0, "non_truncated": 265, "padded": 1044, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_biology|0": { "hashes": { "hash_examples": "17e4e390848018a4", "hash_full_prompts": "17e4e390848018a4", "hash_input_tokens": "9b43adc07f3e9d27", "hash_cont_tokens": "d95798e261e3ecf2" }, "truncated": 0, "non_truncated": 144, "padded": 568, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_chemistry|0": { "hashes": { "hash_examples": "4abb169f6dfd234b", "hash_full_prompts": "4abb169f6dfd234b", "hash_input_tokens": "85bfe55fbc8e6ac0", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 392, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_computer_science|0": { "hashes": { "hash_examples": "a369e2e941358a1e", "hash_full_prompts": "a369e2e941358a1e", "hash_input_tokens": "9f570bd02bec33c5", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_mathematics|0": { "hashes": { "hash_examples": "d7be03b8b6020bff", "hash_full_prompts": "d7be03b8b6020bff", "hash_input_tokens": "7e77bf0db39d418e", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 392, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_medicine|0": { "hashes": { "hash_examples": "0518a00f097346bf", "hash_full_prompts": "0518a00f097346bf", "hash_input_tokens": "e3f6484c467dfa1f", "hash_cont_tokens": "0a01d731701f68e5" }, "truncated": 0, "non_truncated": 173, "padded": 680, "non_padded": 12, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_physics|0": { "hashes": { "hash_examples": "5d842cd49bc70e12", "hash_full_prompts": "5d842cd49bc70e12", "hash_input_tokens": "db230e09c1538af9", "hash_cont_tokens": "77e73d9510077678" }, "truncated": 0, "non_truncated": 102, "padded": 404, "non_padded": 4, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:computer_security|0": { "hashes": { "hash_examples": "8e85d9f85be9b32f", "hash_full_prompts": "8e85d9f85be9b32f", "hash_input_tokens": "d72e2a6c23180e6f", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 392, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:conceptual_physics|0": { "hashes": { "hash_examples": "7964b55a0a49502b", "hash_full_prompts": "7964b55a0a49502b", "hash_input_tokens": "df74b1a60e8a2e7c", "hash_cont_tokens": "01df071ebfaec74d" }, "truncated": 0, "non_truncated": 235, "padded": 896, "non_padded": 44, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:econometrics|0": { "hashes": { "hash_examples": "1e192eae38347257", "hash_full_prompts": "1e192eae38347257", "hash_input_tokens": "1062cf3f569a55d7", "hash_cont_tokens": "e3fc2917921a9eaf" }, "truncated": 0, "non_truncated": 114, "padded": 432, "non_padded": 24, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:electrical_engineering|0": { "hashes": { "hash_examples": "cf97671d5c441da1", "hash_full_prompts": "cf97671d5c441da1", "hash_input_tokens": "6a55abcd2bff7101", "hash_cont_tokens": "c8a5aa308b735a83" }, "truncated": 0, "non_truncated": 145, "padded": 560, "non_padded": 20, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:elementary_mathematics|0": { "hashes": { "hash_examples": "6f49107ed43c40c5", "hash_full_prompts": "6f49107ed43c40c5", "hash_input_tokens": "9ddae447ded45c02", "hash_cont_tokens": "df4fdf22c42e07a2" }, "truncated": 0, "non_truncated": 378, "padded": 1488, "non_padded": 24, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:formal_logic|0": { "hashes": { "hash_examples": "7922c376008ba77b", "hash_full_prompts": "7922c376008ba77b", "hash_input_tokens": "22e82af5bbc2bf17", "hash_cont_tokens": "3a65271847fd1f2e" }, "truncated": 0, "non_truncated": 126, "padded": 496, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:global_facts|0": { "hashes": { "hash_examples": "11f9813185047d5b", "hash_full_prompts": "11f9813185047d5b", "hash_input_tokens": "66d675c248e02089", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 380, "non_padded": 20, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_biology|0": { "hashes": { "hash_examples": "2a804b1d90cbe66e", "hash_full_prompts": "2a804b1d90cbe66e", "hash_input_tokens": "62cced97dc5e2601", "hash_cont_tokens": "a6b83eb9e3d31488" }, "truncated": 0, "non_truncated": 310, "padded": 1212, "non_padded": 28, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_chemistry|0": { "hashes": { "hash_examples": "0032168adabc53b4", "hash_full_prompts": "0032168adabc53b4", "hash_input_tokens": "7761de55d380cce9", "hash_cont_tokens": "8a704d95abde0656" }, "truncated": 0, "non_truncated": 203, "padded": 796, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_computer_science|0": { "hashes": { "hash_examples": "f2fb8740f9df980f", "hash_full_prompts": "f2fb8740f9df980f", "hash_input_tokens": "af660b922e8019f8", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 392, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_european_history|0": { "hashes": { "hash_examples": "73509021e7e66435", "hash_full_prompts": "73509021e7e66435", "hash_input_tokens": "7f49c1b6a266b0b9", "hash_cont_tokens": "bc6082a11551e6de" }, "truncated": 0, "non_truncated": 165, "padded": 576, "non_padded": 84, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_geography|0": { "hashes": { "hash_examples": "9e08d1894940ff42", "hash_full_prompts": "9e08d1894940ff42", "hash_input_tokens": "0932046495a67f10", "hash_cont_tokens": "17f69684d1b7da75" }, "truncated": 0, "non_truncated": 198, "padded": 772, "non_padded": 20, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_government_and_politics|0": { "hashes": { "hash_examples": "64b7e97817ca6c76", "hash_full_prompts": "64b7e97817ca6c76", "hash_input_tokens": "ead3c42b9bf75f5d", "hash_cont_tokens": "bfdbe0094ac4e89d" }, "truncated": 0, "non_truncated": 193, "padded": 764, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_macroeconomics|0": { "hashes": { "hash_examples": "9f582da8534bd2ef", "hash_full_prompts": "9f582da8534bd2ef", "hash_input_tokens": "7c1543a80b4ede1b", "hash_cont_tokens": "a61cb28cbed86a76" }, "truncated": 0, "non_truncated": 390, "padded": 1532, "non_padded": 28, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_mathematics|0": { "hashes": { "hash_examples": "fd54f1c10d423c51", "hash_full_prompts": "fd54f1c10d423c51", "hash_input_tokens": "7ed446df1d4bd0fa", "hash_cont_tokens": "ede1813083303def" }, "truncated": 0, "non_truncated": 270, "padded": 1068, "non_padded": 12, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_microeconomics|0": { "hashes": { "hash_examples": "7037896925aaf42f", "hash_full_prompts": "7037896925aaf42f", "hash_input_tokens": "d46dc174eda297e7", "hash_cont_tokens": "2ce9a16a87758ab3" }, "truncated": 0, "non_truncated": 238, "padded": 920, "non_padded": 32, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_physics|0": { "hashes": { "hash_examples": "60c3776215167dae", "hash_full_prompts": "60c3776215167dae", "hash_input_tokens": "719b0072f6357a5e", "hash_cont_tokens": "34e90e2adee42b92" }, "truncated": 0, "non_truncated": 151, "padded": 580, "non_padded": 24, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_psychology|0": { "hashes": { "hash_examples": "61176bfd5da1298f", "hash_full_prompts": "61176bfd5da1298f", "hash_input_tokens": "fc71eb5e84cf4031", "hash_cont_tokens": "2d376df2c1814495" }, "truncated": 0, "non_truncated": 545, "padded": 2140, "non_padded": 40, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_statistics|0": { "hashes": { "hash_examples": "40dfeebd1ea10f76", "hash_full_prompts": "40dfeebd1ea10f76", "hash_input_tokens": "446f3bcbe952c340", "hash_cont_tokens": "4bd9660a3b058b49" }, "truncated": 0, "non_truncated": 216, "padded": 856, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_us_history|0": { "hashes": { "hash_examples": "03daa510ba917f4d", "hash_full_prompts": "03daa510ba917f4d", "hash_input_tokens": "1acac266e203e9a5", "hash_cont_tokens": "5c36ec463d3a0755" }, "truncated": 0, "non_truncated": 204, "padded": 788, "non_padded": 28, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_world_history|0": { "hashes": { "hash_examples": "be075ffd579f43c2", "hash_full_prompts": "be075ffd579f43c2", "hash_input_tokens": "31b35ccbe47f4cff", "hash_cont_tokens": "f21688a7138caf58" }, "truncated": 0, "non_truncated": 237, "padded": 872, "non_padded": 76, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:human_aging|0": { "hashes": { "hash_examples": "caa5b69f640bd1ef", "hash_full_prompts": "caa5b69f640bd1ef", "hash_input_tokens": "4752dd4d61914b6c", "hash_cont_tokens": "22b919059cbabb52" }, "truncated": 0, "non_truncated": 223, "padded": 868, "non_padded": 24, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:human_sexuality|0": { "hashes": { "hash_examples": "5ed2e38fb25a3767", "hash_full_prompts": "5ed2e38fb25a3767", "hash_input_tokens": "b193408d5d7bea73", "hash_cont_tokens": "7033845bac78be24" }, "truncated": 0, "non_truncated": 131, "padded": 524, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:international_law|0": { "hashes": { "hash_examples": "4e3e9e28d1b96484", "hash_full_prompts": "4e3e9e28d1b96484", "hash_input_tokens": "20f8d9db6a5917e5", "hash_cont_tokens": "29048e59854cec5c" }, "truncated": 0, "non_truncated": 121, "padded": 476, "non_padded": 8, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:jurisprudence|0": { "hashes": { "hash_examples": "e264b755366310b3", "hash_full_prompts": "e264b755366310b3", "hash_input_tokens": "6d81e19d5f151cb2", "hash_cont_tokens": "fae0419b42375fd2" }, "truncated": 0, "non_truncated": 108, "padded": 420, "non_padded": 12, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:logical_fallacies|0": { "hashes": { "hash_examples": "a4ab6965a3e38071", "hash_full_prompts": "a4ab6965a3e38071", "hash_input_tokens": "7974c5b93a8d0c5f", "hash_cont_tokens": "93c4e716b01bd87c" }, "truncated": 0, "non_truncated": 163, "padded": 636, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:machine_learning|0": { "hashes": { "hash_examples": "b92320efa6636b40", "hash_full_prompts": "b92320efa6636b40", "hash_input_tokens": "fb54bcda51c26b7e", "hash_cont_tokens": "2e4467ffdab3254d" }, "truncated": 0, "non_truncated": 112, "padded": 432, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:management|0": { "hashes": { "hash_examples": "c9ee4872a850fe20", "hash_full_prompts": "c9ee4872a850fe20", "hash_input_tokens": "bc69860b6c1d275a", "hash_cont_tokens": "b0936addbab0c265" }, "truncated": 0, "non_truncated": 103, "padded": 408, "non_padded": 4, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:marketing|0": { "hashes": { "hash_examples": "0c151b70f6a047e3", "hash_full_prompts": "0c151b70f6a047e3", "hash_input_tokens": "e1179d18505459de", "hash_cont_tokens": "54e2619fba846f17" }, "truncated": 0, "non_truncated": 234, "padded": 920, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:medical_genetics|0": { "hashes": { "hash_examples": "513f6cb8fca3a24e", "hash_full_prompts": "513f6cb8fca3a24e", "hash_input_tokens": "089c3411d9789b13", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 388, "non_padded": 12, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:miscellaneous|0": { "hashes": { "hash_examples": "259a190d635331db", "hash_full_prompts": "259a190d635331db", "hash_input_tokens": "b9689e5fda8d03dd", "hash_cont_tokens": "b68c3a07a4a75876" }, "truncated": 0, "non_truncated": 783, "padded": 3088, "non_padded": 44, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:moral_disputes|0": { "hashes": { "hash_examples": "b85052c48a0b7bc3", "hash_full_prompts": "b85052c48a0b7bc3", "hash_input_tokens": "b21e7d7e1fcdc975", "hash_cont_tokens": "f54406d1e4cf99f8" }, "truncated": 0, "non_truncated": 346, "padded": 1348, "non_padded": 36, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:moral_scenarios|0": { "hashes": { "hash_examples": "28d0b069ef00dd00", "hash_full_prompts": "28d0b069ef00dd00", "hash_input_tokens": "63c5f825682abb5a", "hash_cont_tokens": "1a38cd5b4241444e" }, "truncated": 0, "non_truncated": 895, "padded": 3580, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:nutrition|0": { "hashes": { "hash_examples": "00c9bc5f1d305b2f", "hash_full_prompts": "00c9bc5f1d305b2f", "hash_input_tokens": "1136f15ad76ee823", "hash_cont_tokens": "9d443ff23bc12c12" }, "truncated": 0, "non_truncated": 306, "padded": 1192, "non_padded": 32, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:philosophy|0": { "hashes": { "hash_examples": "a458c08454a3fd5f", "hash_full_prompts": "a458c08454a3fd5f", "hash_input_tokens": "a356460c30a21f56", "hash_cont_tokens": "09e5454b7258b0b7" }, "truncated": 0, "non_truncated": 311, "padded": 1204, "non_padded": 40, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:prehistory|0": { "hashes": { "hash_examples": "d6a0ecbdbb670e9c", "hash_full_prompts": "d6a0ecbdbb670e9c", "hash_input_tokens": "863d9b4c1c269fa7", "hash_cont_tokens": "f20b5fcd2df4488d" }, "truncated": 0, "non_truncated": 324, "padded": 1272, "non_padded": 24, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:professional_accounting|0": { "hashes": { "hash_examples": "b4a95fe480b6540e", "hash_full_prompts": "b4a95fe480b6540e", "hash_input_tokens": "3a6934991a2799d9", "hash_cont_tokens": "a3a4b0df5e20638f" }, "truncated": 0, "non_truncated": 282, "padded": 1112, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:professional_law|0": { "hashes": { "hash_examples": "c2be9651cdbdde3b", "hash_full_prompts": "c2be9651cdbdde3b", "hash_input_tokens": "2acdd42816b0fcc7", "hash_cont_tokens": "eb156ebe8faf1aaf" }, "truncated": 0, "non_truncated": 1534, "padded": 6080, "non_padded": 56, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:professional_medicine|0": { "hashes": { "hash_examples": "26ce92416288f273", "hash_full_prompts": "26ce92416288f273", "hash_input_tokens": "5f042c0f6ae758a6", "hash_cont_tokens": "0218217c2fa604a0" }, "truncated": 0, "non_truncated": 272, "padded": 1076, "non_padded": 12, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:professional_psychology|0": { "hashes": { "hash_examples": "71ea5f182ea9a641", "hash_full_prompts": "71ea5f182ea9a641", "hash_input_tokens": "023cedfd52e8e443", "hash_cont_tokens": "4fea6b16917c8330" }, "truncated": 0, "non_truncated": 612, "padded": 2396, "non_padded": 52, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:public_relations|0": { "hashes": { "hash_examples": "125adc21f91f8d77", "hash_full_prompts": "125adc21f91f8d77", "hash_input_tokens": "0b6a7e0523bf9f8e", "hash_cont_tokens": "fae64d3b41255dc8" }, "truncated": 0, "non_truncated": 110, "padded": 420, "non_padded": 20, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:security_studies|0": { "hashes": { "hash_examples": "3c18b216c099fb26", "hash_full_prompts": "3c18b216c099fb26", "hash_input_tokens": "da5a93baefb5fae8", "hash_cont_tokens": "f81c4b3cc61f9738" }, "truncated": 0, "non_truncated": 245, "padded": 980, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:sociology|0": { "hashes": { "hash_examples": "3f2a9634cef7417d", "hash_full_prompts": "3f2a9634cef7417d", "hash_input_tokens": "bd6665edf9dc0bba", "hash_cont_tokens": "a68d71e598a7eb7f" }, "truncated": 0, "non_truncated": 201, "padded": 768, "non_padded": 36, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:us_foreign_policy|0": { "hashes": { "hash_examples": "22249da54056475e", "hash_full_prompts": "22249da54056475e", "hash_input_tokens": "1617b010ae65dfcc", "hash_cont_tokens": "67c9ff842b18298a" }, "truncated": 0, "non_truncated": 100, "padded": 380, "non_padded": 20, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:virology|0": { "hashes": { "hash_examples": "9d194b9471dc624e", "hash_full_prompts": "9d194b9471dc624e", "hash_input_tokens": "12eec5438185bcef", "hash_cont_tokens": "6c8625e5b2b8ffeb" }, "truncated": 0, "non_truncated": 166, "padded": 664, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:world_religions|0": { "hashes": { "hash_examples": "229e5fe50082b064", "hash_full_prompts": "229e5fe50082b064", "hash_input_tokens": "b2136a5b3753ab33", "hash_cont_tokens": "e0faaa109c671b8f" }, "truncated": 0, "non_truncated": 171, "padded": 668, "non_padded": 16, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arc_challenge_okapi_ar|0": { "hashes": { "hash_examples": "ab893807673bc355", "hash_full_prompts": "ab893807673bc355", "hash_input_tokens": "3a3b08eb90768a1a", "hash_cont_tokens": "5ece5b0fdbfa8076" }, "truncated": 0, "non_truncated": 1160, "padded": 4558, "non_padded": 82, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|arc_easy_ar|0": { "hashes": { "hash_examples": "acb688624acc3d04", "hash_full_prompts": "acb688624acc3d04", "hash_input_tokens": "441feeb1a5a0e817", "hash_cont_tokens": "f9e7ef6e6d49b466" }, "truncated": 0, "non_truncated": 2364, "padded": 9254, "non_padded": 202, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|boolq_ar|0": { "hashes": { "hash_examples": "48355a67867e0c32", "hash_full_prompts": "48355a67867e0c32", "hash_input_tokens": "59bb6cf5584b83f7", "hash_cont_tokens": "6cc6329be7dcb0ef" }, "truncated": 0, "non_truncated": 3260, "padded": 6474, "non_padded": 46, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|copa_ext_ar|0": { "hashes": { "hash_examples": "9bb83301bb72eecf", "hash_full_prompts": "9bb83301bb72eecf", "hash_input_tokens": "e853cb5d06649c47", "hash_cont_tokens": "48c2323d601b2b0c" }, "truncated": 0, "non_truncated": 90, "padded": 180, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|hellaswag_okapi_ar|0": { "hashes": { "hash_examples": "6e8cf57a322dfadd", "hash_full_prompts": "6e8cf57a322dfadd", "hash_input_tokens": "655f89bea7a08138", "hash_cont_tokens": "963f62478d9c5df7" }, "truncated": 0, "non_truncated": 9171, "padded": 36560, "non_padded": 124, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|openbook_qa_ext_ar|0": { "hashes": { "hash_examples": "923d41eb0aca93eb", "hash_full_prompts": "923d41eb0aca93eb", "hash_input_tokens": "aafd1a69a1bfb291", "hash_cont_tokens": "e9d7e284d35d6b14" }, "truncated": 0, "non_truncated": 495, "padded": 1949, "non_padded": 31, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|piqa_ar|0": { "hashes": { "hash_examples": "94bc205a520d3ea0", "hash_full_prompts": "94bc205a520d3ea0", "hash_input_tokens": "d63d9c52db17dbb6", "hash_cont_tokens": "6b782d2479e2c028" }, "truncated": 0, "non_truncated": 1833, "padded": 3621, "non_padded": 45, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|race_ar|0": { "hashes": { "hash_examples": "de65130bae647516", "hash_full_prompts": "de65130bae647516", "hash_input_tokens": "37a934c59d7cc420", "hash_cont_tokens": "03d651c5338bf364" }, "truncated": 0, "non_truncated": 4929, "padded": 19698, "non_padded": 18, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|sciq_ar|0": { "hashes": { "hash_examples": "39b55b39df48c12b", "hash_full_prompts": "39b55b39df48c12b", "hash_input_tokens": "cb1baca2127dd320", "hash_cont_tokens": "4178cc137274af02" }, "truncated": 0, "non_truncated": 995, "padded": 3961, "non_padded": 19, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "community|toxigen_ar|0": { "hashes": { "hash_examples": "1e139513004a9a2e", "hash_full_prompts": "1e139513004a9a2e", "hash_input_tokens": "58a7a107d8989597", "hash_cont_tokens": "23c85267d1c209f3" }, "truncated": 0, "non_truncated": 935, "padded": 1858, "non_padded": 12, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "lighteval|xstory_cloze:ar|0": { "hashes": { "hash_examples": "865426a22c787481", "hash_full_prompts": "865426a22c787481", "hash_input_tokens": "906ee262041a266c", "hash_cont_tokens": "cb73d8896239c71e" }, "truncated": 0, "non_truncated": 1511, "padded": 2980, "non_padded": 42, "effective_few_shots": 0, "num_truncated_few_shots": 0 } }
{ "hashes": { "hash_examples": "f7091f46c11f5052", "hash_full_prompts": "f7091f46c11f5052", "hash_input_tokens": "2cc0cf610c721146", "hash_cont_tokens": "f2810d148db4104a" }, "truncated": 0, "non_truncated": 72964, "padded": 233423, "non_padded": 2200, "num_truncated_few_shots": 0 }