results
/
OpenBuddy
/openbuddy-qwen2.5llamaify-14b-v23.1-200k
/results_2024-10-10T20-33-07.999075.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 698.510230295, | |
"end_time": 30276.410270489, | |
"total_evaluation_time_secondes": "29577.900040194", | |
"model_name": "OpenBuddy/openbuddy-qwen2.5llamaify-14b-v23.1-200k", | |
"model_sha": "df8ddf776804523b602eaab24e5fccf28a4b841e", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "27.51 GB", | |
"config": null | |
}, | |
"results": { | |
"community|acva:Algeria|0": { | |
"acc_norm": 0.5538461538461539, | |
"acc_norm_stderr": 0.035689135465692336 | |
}, | |
"community|acva:Ancient_Egypt|0": { | |
"acc_norm": 0.050793650793650794, | |
"acc_norm_stderr": 0.01239139518482262 | |
}, | |
"community|acva:Arab_Empire|0": { | |
"acc_norm": 0.30943396226415093, | |
"acc_norm_stderr": 0.028450154794118627 | |
}, | |
"community|acva:Arabic_Architecture|0": { | |
"acc_norm": 0.4717948717948718, | |
"acc_norm_stderr": 0.035840746749208334 | |
}, | |
"community|acva:Arabic_Art|0": { | |
"acc_norm": 0.3641025641025641, | |
"acc_norm_stderr": 0.03454653867786389 | |
}, | |
"community|acva:Arabic_Astronomy|0": { | |
"acc_norm": 0.4666666666666667, | |
"acc_norm_stderr": 0.03581804596782233 | |
}, | |
"community|acva:Arabic_Calligraphy|0": { | |
"acc_norm": 0.47843137254901963, | |
"acc_norm_stderr": 0.0313435870640056 | |
}, | |
"community|acva:Arabic_Ceremony|0": { | |
"acc_norm": 0.5243243243243243, | |
"acc_norm_stderr": 0.0368168445060319 | |
}, | |
"community|acva:Arabic_Clothing|0": { | |
"acc_norm": 0.517948717948718, | |
"acc_norm_stderr": 0.03587477098773825 | |
}, | |
"community|acva:Arabic_Culture|0": { | |
"acc_norm": 0.23076923076923078, | |
"acc_norm_stderr": 0.0302493752938313 | |
}, | |
"community|acva:Arabic_Food|0": { | |
"acc_norm": 0.46153846153846156, | |
"acc_norm_stderr": 0.03579154352544572 | |
}, | |
"community|acva:Arabic_Funeral|0": { | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.050529115263991134 | |
}, | |
"community|acva:Arabic_Geography|0": { | |
"acc_norm": 0.6206896551724138, | |
"acc_norm_stderr": 0.04043461861916747 | |
}, | |
"community|acva:Arabic_History|0": { | |
"acc_norm": 0.3076923076923077, | |
"acc_norm_stderr": 0.03313653039774173 | |
}, | |
"community|acva:Arabic_Language_Origin|0": { | |
"acc_norm": 0.5789473684210527, | |
"acc_norm_stderr": 0.050924152299673286 | |
}, | |
"community|acva:Arabic_Literature|0": { | |
"acc_norm": 0.4689655172413793, | |
"acc_norm_stderr": 0.04158632762097828 | |
}, | |
"community|acva:Arabic_Math|0": { | |
"acc_norm": 0.30256410256410254, | |
"acc_norm_stderr": 0.03298070870085618 | |
}, | |
"community|acva:Arabic_Medicine|0": { | |
"acc_norm": 0.47586206896551725, | |
"acc_norm_stderr": 0.041618085035015295 | |
}, | |
"community|acva:Arabic_Music|0": { | |
"acc_norm": 0.23741007194244604, | |
"acc_norm_stderr": 0.036220593237998276 | |
}, | |
"community|acva:Arabic_Ornament|0": { | |
"acc_norm": 0.4717948717948718, | |
"acc_norm_stderr": 0.035840746749208334 | |
}, | |
"community|acva:Arabic_Philosophy|0": { | |
"acc_norm": 0.5793103448275863, | |
"acc_norm_stderr": 0.0411391498118926 | |
}, | |
"community|acva:Arabic_Physics_and_Chemistry|0": { | |
"acc_norm": 0.6410256410256411, | |
"acc_norm_stderr": 0.03444042881521375 | |
}, | |
"community|acva:Arabic_Wedding|0": { | |
"acc_norm": 0.4153846153846154, | |
"acc_norm_stderr": 0.03538013280575029 | |
}, | |
"community|acva:Bahrain|0": { | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.07106690545187012 | |
}, | |
"community|acva:Comoros|0": { | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.07385489458759965 | |
}, | |
"community|acva:Egypt_modern|0": { | |
"acc_norm": 0.3263157894736842, | |
"acc_norm_stderr": 0.04835966701461423 | |
}, | |
"community|acva:InfluenceFromAncientEgypt|0": { | |
"acc_norm": 0.6051282051282051, | |
"acc_norm_stderr": 0.03509545602262038 | |
}, | |
"community|acva:InfluenceFromByzantium|0": { | |
"acc_norm": 0.7172413793103448, | |
"acc_norm_stderr": 0.03752833958003337 | |
}, | |
"community|acva:InfluenceFromChina|0": { | |
"acc_norm": 0.26666666666666666, | |
"acc_norm_stderr": 0.0317493043641267 | |
}, | |
"community|acva:InfluenceFromGreece|0": { | |
"acc_norm": 0.6307692307692307, | |
"acc_norm_stderr": 0.034648411418637566 | |
}, | |
"community|acva:InfluenceFromIslam|0": { | |
"acc_norm": 0.3103448275862069, | |
"acc_norm_stderr": 0.03855289616378947 | |
}, | |
"community|acva:InfluenceFromPersia|0": { | |
"acc_norm": 0.6971428571428572, | |
"acc_norm_stderr": 0.03483414676585986 | |
}, | |
"community|acva:InfluenceFromRome|0": { | |
"acc_norm": 0.5743589743589743, | |
"acc_norm_stderr": 0.03549871080367708 | |
}, | |
"community|acva:Iraq|0": { | |
"acc_norm": 0.5176470588235295, | |
"acc_norm_stderr": 0.05452048340661895 | |
}, | |
"community|acva:Islam_Education|0": { | |
"acc_norm": 0.4666666666666667, | |
"acc_norm_stderr": 0.03581804596782232 | |
}, | |
"community|acva:Islam_branches_and_schools|0": { | |
"acc_norm": 0.44, | |
"acc_norm_stderr": 0.037630997249913416 | |
}, | |
"community|acva:Islamic_law_system|0": { | |
"acc_norm": 0.4256410256410256, | |
"acc_norm_stderr": 0.035498710803677086 | |
}, | |
"community|acva:Jordan|0": { | |
"acc_norm": 0.35555555555555557, | |
"acc_norm_stderr": 0.07216392363431012 | |
}, | |
"community|acva:Kuwait|0": { | |
"acc_norm": 0.28888888888888886, | |
"acc_norm_stderr": 0.06832943242540508 | |
}, | |
"community|acva:Lebanon|0": { | |
"acc_norm": 0.28888888888888886, | |
"acc_norm_stderr": 0.06832943242540508 | |
}, | |
"community|acva:Libya|0": { | |
"acc_norm": 0.4666666666666667, | |
"acc_norm_stderr": 0.0752101433090355 | |
}, | |
"community|acva:Mauritania|0": { | |
"acc_norm": 0.4444444444444444, | |
"acc_norm_stderr": 0.07491109582924915 | |
}, | |
"community|acva:Mesopotamia_civilization|0": { | |
"acc_norm": 0.5225806451612903, | |
"acc_norm_stderr": 0.0402500394824441 | |
}, | |
"community|acva:Morocco|0": { | |
"acc_norm": 0.2222222222222222, | |
"acc_norm_stderr": 0.06267511942419628 | |
}, | |
"community|acva:Oman|0": { | |
"acc_norm": 0.17777777777777778, | |
"acc_norm_stderr": 0.05763774795025094 | |
}, | |
"community|acva:Palestine|0": { | |
"acc_norm": 0.27058823529411763, | |
"acc_norm_stderr": 0.04847314453023652 | |
}, | |
"community|acva:Qatar|0": { | |
"acc_norm": 0.4444444444444444, | |
"acc_norm_stderr": 0.07491109582924914 | |
}, | |
"community|acva:Saudi_Arabia|0": { | |
"acc_norm": 0.36923076923076925, | |
"acc_norm_stderr": 0.03464841141863757 | |
}, | |
"community|acva:Somalia|0": { | |
"acc_norm": 0.35555555555555557, | |
"acc_norm_stderr": 0.07216392363431012 | |
}, | |
"community|acva:Sudan|0": { | |
"acc_norm": 0.35555555555555557, | |
"acc_norm_stderr": 0.07216392363431012 | |
}, | |
"community|acva:Syria|0": { | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.07106690545187012 | |
}, | |
"community|acva:Tunisia|0": { | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.07106690545187012 | |
}, | |
"community|acva:United_Arab_Emirates|0": { | |
"acc_norm": 0.23529411764705882, | |
"acc_norm_stderr": 0.04628210543937907 | |
}, | |
"community|acva:Yemen|0": { | |
"acc_norm": 0.2, | |
"acc_norm_stderr": 0.13333333333333333 | |
}, | |
"community|acva:communication|0": { | |
"acc_norm": 0.42857142857142855, | |
"acc_norm_stderr": 0.025974025974025955 | |
}, | |
"community|acva:computer_and_phone|0": { | |
"acc_norm": 0.45084745762711864, | |
"acc_norm_stderr": 0.02901934773187137 | |
}, | |
"community|acva:daily_life|0": { | |
"acc_norm": 0.18694362017804153, | |
"acc_norm_stderr": 0.021268948348414647 | |
}, | |
"community|acva:entertainment|0": { | |
"acc_norm": 0.23389830508474577, | |
"acc_norm_stderr": 0.024687839412166384 | |
}, | |
"community|alghafa:mcq_exams_test_ar|0": { | |
"acc_norm": 0.296229802513465, | |
"acc_norm_stderr": 0.01936388142873615 | |
}, | |
"community|alghafa:meta_ar_dialects|0": { | |
"acc_norm": 0.30898980537534754, | |
"acc_norm_stderr": 0.006291567084535373 | |
}, | |
"community|alghafa:meta_ar_msa|0": { | |
"acc_norm": 0.36312849162011174, | |
"acc_norm_stderr": 0.016083749986853694 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { | |
"acc_norm": 0.52, | |
"acc_norm_stderr": 0.05807730170189531 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { | |
"acc_norm": 0.5933333333333334, | |
"acc_norm_stderr": 0.040241626657390624 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { | |
"acc_norm": 0.41333333333333333, | |
"acc_norm_stderr": 0.040341569222180455 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { | |
"acc_norm": 0.7878674171357098, | |
"acc_norm_stderr": 0.004572441818885609 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": { | |
"acc_norm": 0.5426188490408674, | |
"acc_norm_stderr": 0.006434698461883929 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task|0": { | |
"acc_norm": 0.3802325581395349, | |
"acc_norm_stderr": 0.011708495443225615 | |
}, | |
"community|arabic_exams|0": { | |
"acc_norm": 0.32774674115456237, | |
"acc_norm_stderr": 0.020274644797720624 | |
}, | |
"community|arabic_mmlu:abstract_algebra|0": { | |
"acc_norm": 0.22, | |
"acc_norm_stderr": 0.04163331998932268 | |
}, | |
"community|arabic_mmlu:anatomy|0": { | |
"acc_norm": 0.3111111111111111, | |
"acc_norm_stderr": 0.03999262876617722 | |
}, | |
"community|arabic_mmlu:astronomy|0": { | |
"acc_norm": 0.4407894736842105, | |
"acc_norm_stderr": 0.04040311062490436 | |
}, | |
"community|arabic_mmlu:business_ethics|0": { | |
"acc_norm": 0.5, | |
"acc_norm_stderr": 0.050251890762960605 | |
}, | |
"community|arabic_mmlu:clinical_knowledge|0": { | |
"acc_norm": 0.4641509433962264, | |
"acc_norm_stderr": 0.030693675018458006 | |
}, | |
"community|arabic_mmlu:college_biology|0": { | |
"acc_norm": 0.3472222222222222, | |
"acc_norm_stderr": 0.039812405437178615 | |
}, | |
"community|arabic_mmlu:college_chemistry|0": { | |
"acc_norm": 0.33, | |
"acc_norm_stderr": 0.047258156262526045 | |
}, | |
"community|arabic_mmlu:college_computer_science|0": { | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.04923659639173309 | |
}, | |
"community|arabic_mmlu:college_mathematics|0": { | |
"acc_norm": 0.35, | |
"acc_norm_stderr": 0.0479372485441102 | |
}, | |
"community|arabic_mmlu:college_medicine|0": { | |
"acc_norm": 0.37572254335260113, | |
"acc_norm_stderr": 0.03692820767264867 | |
}, | |
"community|arabic_mmlu:college_physics|0": { | |
"acc_norm": 0.2647058823529412, | |
"acc_norm_stderr": 0.04389869956808779 | |
}, | |
"community|arabic_mmlu:computer_security|0": { | |
"acc_norm": 0.53, | |
"acc_norm_stderr": 0.05016135580465919 | |
}, | |
"community|arabic_mmlu:conceptual_physics|0": { | |
"acc_norm": 0.4340425531914894, | |
"acc_norm_stderr": 0.03240038086792747 | |
}, | |
"community|arabic_mmlu:econometrics|0": { | |
"acc_norm": 0.2894736842105263, | |
"acc_norm_stderr": 0.04266339443159394 | |
}, | |
"community|arabic_mmlu:electrical_engineering|0": { | |
"acc_norm": 0.4689655172413793, | |
"acc_norm_stderr": 0.04158632762097828 | |
}, | |
"community|arabic_mmlu:elementary_mathematics|0": { | |
"acc_norm": 0.5793650793650794, | |
"acc_norm_stderr": 0.025424835086924003 | |
}, | |
"community|arabic_mmlu:formal_logic|0": { | |
"acc_norm": 0.3968253968253968, | |
"acc_norm_stderr": 0.04375888492727061 | |
}, | |
"community|arabic_mmlu:global_facts|0": { | |
"acc_norm": 0.34, | |
"acc_norm_stderr": 0.047609522856952344 | |
}, | |
"community|arabic_mmlu:high_school_biology|0": { | |
"acc_norm": 0.38387096774193546, | |
"acc_norm_stderr": 0.02766618207553964 | |
}, | |
"community|arabic_mmlu:high_school_chemistry|0": { | |
"acc_norm": 0.3448275862068966, | |
"acc_norm_stderr": 0.03344283744280457 | |
}, | |
"community|arabic_mmlu:high_school_computer_science|0": { | |
"acc_norm": 0.59, | |
"acc_norm_stderr": 0.04943110704237102 | |
}, | |
"community|arabic_mmlu:high_school_european_history|0": { | |
"acc_norm": 0.23030303030303031, | |
"acc_norm_stderr": 0.03287666758603489 | |
}, | |
"community|arabic_mmlu:high_school_geography|0": { | |
"acc_norm": 0.48484848484848486, | |
"acc_norm_stderr": 0.03560716516531061 | |
}, | |
"community|arabic_mmlu:high_school_government_and_politics|0": { | |
"acc_norm": 0.39378238341968913, | |
"acc_norm_stderr": 0.03526077095548237 | |
}, | |
"community|arabic_mmlu:high_school_macroeconomics|0": { | |
"acc_norm": 0.4025641025641026, | |
"acc_norm_stderr": 0.02486499515976775 | |
}, | |
"community|arabic_mmlu:high_school_mathematics|0": { | |
"acc_norm": 0.3888888888888889, | |
"acc_norm_stderr": 0.029723278961476668 | |
}, | |
"community|arabic_mmlu:high_school_microeconomics|0": { | |
"acc_norm": 0.40336134453781514, | |
"acc_norm_stderr": 0.031866081214088314 | |
}, | |
"community|arabic_mmlu:high_school_physics|0": { | |
"acc_norm": 0.304635761589404, | |
"acc_norm_stderr": 0.03757949922943343 | |
}, | |
"community|arabic_mmlu:high_school_psychology|0": { | |
"acc_norm": 0.3944954128440367, | |
"acc_norm_stderr": 0.020954642108587485 | |
}, | |
"community|arabic_mmlu:high_school_statistics|0": { | |
"acc_norm": 0.375, | |
"acc_norm_stderr": 0.033016908987210894 | |
}, | |
"community|arabic_mmlu:high_school_us_history|0": { | |
"acc_norm": 0.25980392156862747, | |
"acc_norm_stderr": 0.030778554678693264 | |
}, | |
"community|arabic_mmlu:high_school_world_history|0": { | |
"acc_norm": 0.33755274261603374, | |
"acc_norm_stderr": 0.030781549102026233 | |
}, | |
"community|arabic_mmlu:human_aging|0": { | |
"acc_norm": 0.4484304932735426, | |
"acc_norm_stderr": 0.03337883736255098 | |
}, | |
"community|arabic_mmlu:human_sexuality|0": { | |
"acc_norm": 0.48091603053435117, | |
"acc_norm_stderr": 0.04382094705550988 | |
}, | |
"community|arabic_mmlu:international_law|0": { | |
"acc_norm": 0.6446280991735537, | |
"acc_norm_stderr": 0.04369236326573981 | |
}, | |
"community|arabic_mmlu:jurisprudence|0": { | |
"acc_norm": 0.4722222222222222, | |
"acc_norm_stderr": 0.04826217294139894 | |
}, | |
"community|arabic_mmlu:logical_fallacies|0": { | |
"acc_norm": 0.38650306748466257, | |
"acc_norm_stderr": 0.038258255488486076 | |
}, | |
"community|arabic_mmlu:machine_learning|0": { | |
"acc_norm": 0.3482142857142857, | |
"acc_norm_stderr": 0.04521829902833585 | |
}, | |
"community|arabic_mmlu:management|0": { | |
"acc_norm": 0.4368932038834951, | |
"acc_norm_stderr": 0.04911147107365777 | |
}, | |
"community|arabic_mmlu:marketing|0": { | |
"acc_norm": 0.6367521367521367, | |
"acc_norm_stderr": 0.03150712523091265 | |
}, | |
"community|arabic_mmlu:medical_genetics|0": { | |
"acc_norm": 0.49, | |
"acc_norm_stderr": 0.05024183937956911 | |
}, | |
"community|arabic_mmlu:miscellaneous|0": { | |
"acc_norm": 0.4444444444444444, | |
"acc_norm_stderr": 0.017769250583533246 | |
}, | |
"community|arabic_mmlu:moral_disputes|0": { | |
"acc_norm": 0.44508670520231214, | |
"acc_norm_stderr": 0.02675625512966377 | |
}, | |
"community|arabic_mmlu:moral_scenarios|0": { | |
"acc_norm": 0.2737430167597765, | |
"acc_norm_stderr": 0.014912413096372434 | |
}, | |
"community|arabic_mmlu:nutrition|0": { | |
"acc_norm": 0.5326797385620915, | |
"acc_norm_stderr": 0.02856869975222587 | |
}, | |
"community|arabic_mmlu:philosophy|0": { | |
"acc_norm": 0.4212218649517685, | |
"acc_norm_stderr": 0.028043399858210628 | |
}, | |
"community|arabic_mmlu:prehistory|0": { | |
"acc_norm": 0.38580246913580246, | |
"acc_norm_stderr": 0.027085401226132143 | |
}, | |
"community|arabic_mmlu:professional_accounting|0": { | |
"acc_norm": 0.2801418439716312, | |
"acc_norm_stderr": 0.02678917235114025 | |
}, | |
"community|arabic_mmlu:professional_law|0": { | |
"acc_norm": 0.2894393741851369, | |
"acc_norm_stderr": 0.011582659702210236 | |
}, | |
"community|arabic_mmlu:professional_medicine|0": { | |
"acc_norm": 0.20588235294117646, | |
"acc_norm_stderr": 0.024562204314142314 | |
}, | |
"community|arabic_mmlu:professional_psychology|0": { | |
"acc_norm": 0.3709150326797386, | |
"acc_norm_stderr": 0.01954210156485412 | |
}, | |
"community|arabic_mmlu:public_relations|0": { | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.0469237132203465 | |
}, | |
"community|arabic_mmlu:security_studies|0": { | |
"acc_norm": 0.5551020408163265, | |
"acc_norm_stderr": 0.031814251181977865 | |
}, | |
"community|arabic_mmlu:sociology|0": { | |
"acc_norm": 0.5124378109452736, | |
"acc_norm_stderr": 0.0353443984853958 | |
}, | |
"community|arabic_mmlu:us_foreign_policy|0": { | |
"acc_norm": 0.59, | |
"acc_norm_stderr": 0.04943110704237101 | |
}, | |
"community|arabic_mmlu:virology|0": { | |
"acc_norm": 0.39759036144578314, | |
"acc_norm_stderr": 0.038099730845402184 | |
}, | |
"community|arabic_mmlu:world_religions|0": { | |
"acc_norm": 0.4152046783625731, | |
"acc_norm_stderr": 0.03779275945503201 | |
}, | |
"community|arc_challenge_okapi_ar|0": { | |
"acc_norm": 0.3905172413793103, | |
"acc_norm_stderr": 0.014330425995124081 | |
}, | |
"community|arc_easy_ar|0": { | |
"acc_norm": 0.3616751269035533, | |
"acc_norm_stderr": 0.009884355213508935 | |
}, | |
"community|boolq_ar|0": { | |
"acc_norm": 0.7730061349693251, | |
"acc_norm_stderr": 0.007337639145268569 | |
}, | |
"community|copa_ext_ar|0": { | |
"acc_norm": 0.5222222222222223, | |
"acc_norm_stderr": 0.05294752255076824 | |
}, | |
"community|hellaswag_okapi_ar|0": { | |
"acc_norm": 0.2804492421764257, | |
"acc_norm_stderr": 0.004691083734262567 | |
}, | |
"community|openbook_qa_ext_ar|0": { | |
"acc_norm": 0.4222222222222222, | |
"acc_norm_stderr": 0.022222222222222244 | |
}, | |
"community|piqa_ar|0": { | |
"acc_norm": 0.5559192580469177, | |
"acc_norm_stderr": 0.011608446249769285 | |
}, | |
"community|race_ar|0": { | |
"acc_norm": 0.37918441874619596, | |
"acc_norm_stderr": 0.006911482237113977 | |
}, | |
"community|sciq_ar|0": { | |
"acc_norm": 0.5618090452261306, | |
"acc_norm_stderr": 0.015737396091561304 | |
}, | |
"community|toxigen_ar|0": { | |
"acc_norm": 0.7486631016042781, | |
"acc_norm_stderr": 0.014193774531094676 | |
}, | |
"lighteval|xstory_cloze:ar|0": { | |
"acc": 0.6055592322964924, | |
"acc_stderr": 0.01257710651393614 | |
}, | |
"community|acva:_average|0": { | |
"acc_norm": 0.4069857552067533, | |
"acc_norm_stderr": 0.04614252658353273 | |
}, | |
"community|alghafa:_average|0": { | |
"acc_norm": 0.4673037322768559, | |
"acc_norm_stderr": 0.022568370200620753 | |
}, | |
"community|arabic_mmlu:_average|0": { | |
"acc_norm": 0.40702737381621423, | |
"acc_norm_stderr": 0.03603525805225283 | |
}, | |
"all": { | |
"acc_norm": 0.41729542559006494, | |
"acc_norm_stderr": 0.03787800426220229, | |
"acc": 0.6055592322964924, | |
"acc_stderr": 0.01257710651393614 | |
} | |
}, | |
"versions": { | |
"community|acva:Algeria|0": 0, | |
"community|acva:Ancient_Egypt|0": 0, | |
"community|acva:Arab_Empire|0": 0, | |
"community|acva:Arabic_Architecture|0": 0, | |
"community|acva:Arabic_Art|0": 0, | |
"community|acva:Arabic_Astronomy|0": 0, | |
"community|acva:Arabic_Calligraphy|0": 0, | |
"community|acva:Arabic_Ceremony|0": 0, | |
"community|acva:Arabic_Clothing|0": 0, | |
"community|acva:Arabic_Culture|0": 0, | |
"community|acva:Arabic_Food|0": 0, | |
"community|acva:Arabic_Funeral|0": 0, | |
"community|acva:Arabic_Geography|0": 0, | |
"community|acva:Arabic_History|0": 0, | |
"community|acva:Arabic_Language_Origin|0": 0, | |
"community|acva:Arabic_Literature|0": 0, | |
"community|acva:Arabic_Math|0": 0, | |
"community|acva:Arabic_Medicine|0": 0, | |
"community|acva:Arabic_Music|0": 0, | |
"community|acva:Arabic_Ornament|0": 0, | |
"community|acva:Arabic_Philosophy|0": 0, | |
"community|acva:Arabic_Physics_and_Chemistry|0": 0, | |
"community|acva:Arabic_Wedding|0": 0, | |
"community|acva:Bahrain|0": 0, | |
"community|acva:Comoros|0": 0, | |
"community|acva:Egypt_modern|0": 0, | |
"community|acva:InfluenceFromAncientEgypt|0": 0, | |
"community|acva:InfluenceFromByzantium|0": 0, | |
"community|acva:InfluenceFromChina|0": 0, | |
"community|acva:InfluenceFromGreece|0": 0, | |
"community|acva:InfluenceFromIslam|0": 0, | |
"community|acva:InfluenceFromPersia|0": 0, | |
"community|acva:InfluenceFromRome|0": 0, | |
"community|acva:Iraq|0": 0, | |
"community|acva:Islam_Education|0": 0, | |
"community|acva:Islam_branches_and_schools|0": 0, | |
"community|acva:Islamic_law_system|0": 0, | |
"community|acva:Jordan|0": 0, | |
"community|acva:Kuwait|0": 0, | |
"community|acva:Lebanon|0": 0, | |
"community|acva:Libya|0": 0, | |
"community|acva:Mauritania|0": 0, | |
"community|acva:Mesopotamia_civilization|0": 0, | |
"community|acva:Morocco|0": 0, | |
"community|acva:Oman|0": 0, | |
"community|acva:Palestine|0": 0, | |
"community|acva:Qatar|0": 0, | |
"community|acva:Saudi_Arabia|0": 0, | |
"community|acva:Somalia|0": 0, | |
"community|acva:Sudan|0": 0, | |
"community|acva:Syria|0": 0, | |
"community|acva:Tunisia|0": 0, | |
"community|acva:United_Arab_Emirates|0": 0, | |
"community|acva:Yemen|0": 0, | |
"community|acva:communication|0": 0, | |
"community|acva:computer_and_phone|0": 0, | |
"community|acva:daily_life|0": 0, | |
"community|acva:entertainment|0": 0, | |
"community|alghafa:mcq_exams_test_ar|0": 0, | |
"community|alghafa:meta_ar_dialects|0": 0, | |
"community|alghafa:meta_ar_msa|0": 0, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0, | |
"community|alghafa:multiple_choice_sentiment_task|0": 0, | |
"community|arabic_exams|0": 0, | |
"community|arabic_mmlu:abstract_algebra|0": 0, | |
"community|arabic_mmlu:anatomy|0": 0, | |
"community|arabic_mmlu:astronomy|0": 0, | |
"community|arabic_mmlu:business_ethics|0": 0, | |
"community|arabic_mmlu:clinical_knowledge|0": 0, | |
"community|arabic_mmlu:college_biology|0": 0, | |
"community|arabic_mmlu:college_chemistry|0": 0, | |
"community|arabic_mmlu:college_computer_science|0": 0, | |
"community|arabic_mmlu:college_mathematics|0": 0, | |
"community|arabic_mmlu:college_medicine|0": 0, | |
"community|arabic_mmlu:college_physics|0": 0, | |
"community|arabic_mmlu:computer_security|0": 0, | |
"community|arabic_mmlu:conceptual_physics|0": 0, | |
"community|arabic_mmlu:econometrics|0": 0, | |
"community|arabic_mmlu:electrical_engineering|0": 0, | |
"community|arabic_mmlu:elementary_mathematics|0": 0, | |
"community|arabic_mmlu:formal_logic|0": 0, | |
"community|arabic_mmlu:global_facts|0": 0, | |
"community|arabic_mmlu:high_school_biology|0": 0, | |
"community|arabic_mmlu:high_school_chemistry|0": 0, | |
"community|arabic_mmlu:high_school_computer_science|0": 0, | |
"community|arabic_mmlu:high_school_european_history|0": 0, | |
"community|arabic_mmlu:high_school_geography|0": 0, | |
"community|arabic_mmlu:high_school_government_and_politics|0": 0, | |
"community|arabic_mmlu:high_school_macroeconomics|0": 0, | |
"community|arabic_mmlu:high_school_mathematics|0": 0, | |
"community|arabic_mmlu:high_school_microeconomics|0": 0, | |
"community|arabic_mmlu:high_school_physics|0": 0, | |
"community|arabic_mmlu:high_school_psychology|0": 0, | |
"community|arabic_mmlu:high_school_statistics|0": 0, | |
"community|arabic_mmlu:high_school_us_history|0": 0, | |
"community|arabic_mmlu:high_school_world_history|0": 0, | |
"community|arabic_mmlu:human_aging|0": 0, | |
"community|arabic_mmlu:human_sexuality|0": 0, | |
"community|arabic_mmlu:international_law|0": 0, | |
"community|arabic_mmlu:jurisprudence|0": 0, | |
"community|arabic_mmlu:logical_fallacies|0": 0, | |
"community|arabic_mmlu:machine_learning|0": 0, | |
"community|arabic_mmlu:management|0": 0, | |
"community|arabic_mmlu:marketing|0": 0, | |
"community|arabic_mmlu:medical_genetics|0": 0, | |
"community|arabic_mmlu:miscellaneous|0": 0, | |
"community|arabic_mmlu:moral_disputes|0": 0, | |
"community|arabic_mmlu:moral_scenarios|0": 0, | |
"community|arabic_mmlu:nutrition|0": 0, | |
"community|arabic_mmlu:philosophy|0": 0, | |
"community|arabic_mmlu:prehistory|0": 0, | |
"community|arabic_mmlu:professional_accounting|0": 0, | |
"community|arabic_mmlu:professional_law|0": 0, | |
"community|arabic_mmlu:professional_medicine|0": 0, | |
"community|arabic_mmlu:professional_psychology|0": 0, | |
"community|arabic_mmlu:public_relations|0": 0, | |
"community|arabic_mmlu:security_studies|0": 0, | |
"community|arabic_mmlu:sociology|0": 0, | |
"community|arabic_mmlu:us_foreign_policy|0": 0, | |
"community|arabic_mmlu:virology|0": 0, | |
"community|arabic_mmlu:world_religions|0": 0, | |
"community|arc_challenge_okapi_ar|0": 0, | |
"community|arc_easy_ar|0": 0, | |
"community|boolq_ar|0": 0, | |
"community|copa_ext_ar|0": 0, | |
"community|hellaswag_okapi_ar|0": 0, | |
"community|openbook_qa_ext_ar|0": 0, | |
"community|piqa_ar|0": 0, | |
"community|race_ar|0": 0, | |
"community|sciq_ar|0": 0, | |
"community|toxigen_ar|0": 0, | |
"lighteval|xstory_cloze:ar|0": 0 | |
}, | |
"config_tasks": { | |
"community|acva:Algeria": { | |
"name": "acva:Algeria", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Algeria", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Ancient_Egypt": { | |
"name": "acva:Ancient_Egypt", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Ancient_Egypt", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 315, | |
"effective_num_docs": 315, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arab_Empire": { | |
"name": "acva:Arab_Empire", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arab_Empire", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 265, | |
"effective_num_docs": 265, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Architecture": { | |
"name": "acva:Arabic_Architecture", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Architecture", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Art": { | |
"name": "acva:Arabic_Art", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Art", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Astronomy": { | |
"name": "acva:Arabic_Astronomy", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Astronomy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Calligraphy": { | |
"name": "acva:Arabic_Calligraphy", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Calligraphy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 255, | |
"effective_num_docs": 255, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Ceremony": { | |
"name": "acva:Arabic_Ceremony", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Ceremony", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 185, | |
"effective_num_docs": 185, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Clothing": { | |
"name": "acva:Arabic_Clothing", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Clothing", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Culture": { | |
"name": "acva:Arabic_Culture", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Culture", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Food": { | |
"name": "acva:Arabic_Food", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Food", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Funeral": { | |
"name": "acva:Arabic_Funeral", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Funeral", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 95, | |
"effective_num_docs": 95, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Geography": { | |
"name": "acva:Arabic_Geography", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Geography", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_History": { | |
"name": "acva:Arabic_History", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_History", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Language_Origin": { | |
"name": "acva:Arabic_Language_Origin", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Language_Origin", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 95, | |
"effective_num_docs": 95, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Literature": { | |
"name": "acva:Arabic_Literature", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Literature", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Math": { | |
"name": "acva:Arabic_Math", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Math", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Medicine": { | |
"name": "acva:Arabic_Medicine", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Medicine", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Music": { | |
"name": "acva:Arabic_Music", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Music", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 139, | |
"effective_num_docs": 139, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Ornament": { | |
"name": "acva:Arabic_Ornament", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Ornament", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Philosophy": { | |
"name": "acva:Arabic_Philosophy", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Philosophy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Physics_and_Chemistry": { | |
"name": "acva:Arabic_Physics_and_Chemistry", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Physics_and_Chemistry", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Wedding": { | |
"name": "acva:Arabic_Wedding", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Wedding", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Bahrain": { | |
"name": "acva:Bahrain", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Bahrain", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Comoros": { | |
"name": "acva:Comoros", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Comoros", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Egypt_modern": { | |
"name": "acva:Egypt_modern", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Egypt_modern", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 95, | |
"effective_num_docs": 95, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromAncientEgypt": { | |
"name": "acva:InfluenceFromAncientEgypt", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromAncientEgypt", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromByzantium": { | |
"name": "acva:InfluenceFromByzantium", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromByzantium", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromChina": { | |
"name": "acva:InfluenceFromChina", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromChina", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromGreece": { | |
"name": "acva:InfluenceFromGreece", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromGreece", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromIslam": { | |
"name": "acva:InfluenceFromIslam", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromIslam", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromPersia": { | |
"name": "acva:InfluenceFromPersia", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromPersia", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 175, | |
"effective_num_docs": 175, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromRome": { | |
"name": "acva:InfluenceFromRome", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromRome", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Iraq": { | |
"name": "acva:Iraq", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Iraq", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 85, | |
"effective_num_docs": 85, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Islam_Education": { | |
"name": "acva:Islam_Education", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Islam_Education", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Islam_branches_and_schools": { | |
"name": "acva:Islam_branches_and_schools", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Islam_branches_and_schools", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 175, | |
"effective_num_docs": 175, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Islamic_law_system": { | |
"name": "acva:Islamic_law_system", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Islamic_law_system", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Jordan": { | |
"name": "acva:Jordan", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Jordan", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Kuwait": { | |
"name": "acva:Kuwait", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Kuwait", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Lebanon": { | |
"name": "acva:Lebanon", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Lebanon", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Libya": { | |
"name": "acva:Libya", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Libya", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Mauritania": { | |
"name": "acva:Mauritania", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Mauritania", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Mesopotamia_civilization": { | |
"name": "acva:Mesopotamia_civilization", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Mesopotamia_civilization", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 155, | |
"effective_num_docs": 155, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Morocco": { | |
"name": "acva:Morocco", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Morocco", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Oman": { | |
"name": "acva:Oman", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Oman", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Palestine": { | |
"name": "acva:Palestine", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Palestine", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 85, | |
"effective_num_docs": 85, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Qatar": { | |
"name": "acva:Qatar", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Qatar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Saudi_Arabia": { | |
"name": "acva:Saudi_Arabia", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Saudi_Arabia", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Somalia": { | |
"name": "acva:Somalia", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Somalia", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Sudan": { | |
"name": "acva:Sudan", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Sudan", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Syria": { | |
"name": "acva:Syria", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Syria", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Tunisia": { | |
"name": "acva:Tunisia", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Tunisia", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:United_Arab_Emirates": { | |
"name": "acva:United_Arab_Emirates", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "United_Arab_Emirates", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 85, | |
"effective_num_docs": 85, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Yemen": { | |
"name": "acva:Yemen", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Yemen", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 10, | |
"effective_num_docs": 10, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:communication": { | |
"name": "acva:communication", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "communication", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 364, | |
"effective_num_docs": 364, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:computer_and_phone": { | |
"name": "acva:computer_and_phone", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "computer_and_phone", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 295, | |
"effective_num_docs": 295, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:daily_life": { | |
"name": "acva:daily_life", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "daily_life", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 337, | |
"effective_num_docs": 337, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:entertainment": { | |
"name": "acva:entertainment", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "entertainment", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 295, | |
"effective_num_docs": 295, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:mcq_exams_test_ar": { | |
"name": "alghafa:mcq_exams_test_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "mcq_exams_test_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 557, | |
"effective_num_docs": 557, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:meta_ar_dialects": { | |
"name": "alghafa:meta_ar_dialects", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "meta_ar_dialects", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 5395, | |
"effective_num_docs": 5395, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:meta_ar_msa": { | |
"name": "alghafa:meta_ar_msa", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "meta_ar_msa", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 895, | |
"effective_num_docs": 895, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": { | |
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_facts_truefalse_balanced_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 75, | |
"effective_num_docs": 75, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task": { | |
"name": "alghafa:multiple_choice_grounded_statement_soqal_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_grounded_statement_soqal_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 150, | |
"effective_num_docs": 150, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { | |
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 150, | |
"effective_num_docs": 150, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { | |
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 7995, | |
"effective_num_docs": 7995, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task": { | |
"name": "alghafa:multiple_choice_rating_sentiment_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_rating_sentiment_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 5995, | |
"effective_num_docs": 5995, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task": { | |
"name": "alghafa:multiple_choice_sentiment_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_sentiment_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1720, | |
"effective_num_docs": 1720, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_exams": { | |
"name": "arabic_exams", | |
"prompt_function": "arabic_exams", | |
"hf_repo": "OALL/Arabic_EXAMS", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 537, | |
"effective_num_docs": 537, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:abstract_algebra": { | |
"name": "arabic_mmlu:abstract_algebra", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "abstract_algebra", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:anatomy": { | |
"name": "arabic_mmlu:anatomy", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "anatomy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 135, | |
"effective_num_docs": 135, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:astronomy": { | |
"name": "arabic_mmlu:astronomy", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "astronomy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 152, | |
"effective_num_docs": 152, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:business_ethics": { | |
"name": "arabic_mmlu:business_ethics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "business_ethics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:clinical_knowledge": { | |
"name": "arabic_mmlu:clinical_knowledge", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "clinical_knowledge", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 265, | |
"effective_num_docs": 265, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_biology": { | |
"name": "arabic_mmlu:college_biology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_biology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 144, | |
"effective_num_docs": 144, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_chemistry": { | |
"name": "arabic_mmlu:college_chemistry", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_chemistry", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_computer_science": { | |
"name": "arabic_mmlu:college_computer_science", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_computer_science", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_mathematics": { | |
"name": "arabic_mmlu:college_mathematics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_mathematics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_medicine": { | |
"name": "arabic_mmlu:college_medicine", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_medicine", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 173, | |
"effective_num_docs": 173, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_physics": { | |
"name": "arabic_mmlu:college_physics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_physics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 102, | |
"effective_num_docs": 102, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:computer_security": { | |
"name": "arabic_mmlu:computer_security", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "computer_security", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:conceptual_physics": { | |
"name": "arabic_mmlu:conceptual_physics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "conceptual_physics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 235, | |
"effective_num_docs": 235, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:econometrics": { | |
"name": "arabic_mmlu:econometrics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "econometrics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 114, | |
"effective_num_docs": 114, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:electrical_engineering": { | |
"name": "arabic_mmlu:electrical_engineering", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "electrical_engineering", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:elementary_mathematics": { | |
"name": "arabic_mmlu:elementary_mathematics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "elementary_mathematics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 378, | |
"effective_num_docs": 378, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:formal_logic": { | |
"name": "arabic_mmlu:formal_logic", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "formal_logic", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 126, | |
"effective_num_docs": 126, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:global_facts": { | |
"name": "arabic_mmlu:global_facts", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "global_facts", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_biology": { | |
"name": "arabic_mmlu:high_school_biology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_biology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 310, | |
"effective_num_docs": 310, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_chemistry": { | |
"name": "arabic_mmlu:high_school_chemistry", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_chemistry", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 203, | |
"effective_num_docs": 203, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_computer_science": { | |
"name": "arabic_mmlu:high_school_computer_science", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_computer_science", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_european_history": { | |
"name": "arabic_mmlu:high_school_european_history", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_european_history", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 165, | |
"effective_num_docs": 165, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_geography": { | |
"name": "arabic_mmlu:high_school_geography", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_geography", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 198, | |
"effective_num_docs": 198, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_government_and_politics": { | |
"name": "arabic_mmlu:high_school_government_and_politics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_government_and_politics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 193, | |
"effective_num_docs": 193, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_macroeconomics": { | |
"name": "arabic_mmlu:high_school_macroeconomics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_macroeconomics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 390, | |
"effective_num_docs": 390, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_mathematics": { | |
"name": "arabic_mmlu:high_school_mathematics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_mathematics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 270, | |
"effective_num_docs": 270, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_microeconomics": { | |
"name": "arabic_mmlu:high_school_microeconomics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_microeconomics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 238, | |
"effective_num_docs": 238, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_physics": { | |
"name": "arabic_mmlu:high_school_physics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_physics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 151, | |
"effective_num_docs": 151, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_psychology": { | |
"name": "arabic_mmlu:high_school_psychology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_psychology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 545, | |
"effective_num_docs": 545, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_statistics": { | |
"name": "arabic_mmlu:high_school_statistics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_statistics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 216, | |
"effective_num_docs": 216, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_us_history": { | |
"name": "arabic_mmlu:high_school_us_history", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_us_history", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 204, | |
"effective_num_docs": 204, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_world_history": { | |
"name": "arabic_mmlu:high_school_world_history", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_world_history", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 237, | |
"effective_num_docs": 237, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:human_aging": { | |
"name": "arabic_mmlu:human_aging", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "human_aging", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 223, | |
"effective_num_docs": 223, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:human_sexuality": { | |
"name": "arabic_mmlu:human_sexuality", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "human_sexuality", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 131, | |
"effective_num_docs": 131, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:international_law": { | |
"name": "arabic_mmlu:international_law", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "international_law", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 121, | |
"effective_num_docs": 121, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:jurisprudence": { | |
"name": "arabic_mmlu:jurisprudence", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "jurisprudence", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 108, | |
"effective_num_docs": 108, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:logical_fallacies": { | |
"name": "arabic_mmlu:logical_fallacies", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "logical_fallacies", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 163, | |
"effective_num_docs": 163, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:machine_learning": { | |
"name": "arabic_mmlu:machine_learning", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "machine_learning", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 112, | |
"effective_num_docs": 112, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:management": { | |
"name": "arabic_mmlu:management", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "management", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 103, | |
"effective_num_docs": 103, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:marketing": { | |
"name": "arabic_mmlu:marketing", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "marketing", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 234, | |
"effective_num_docs": 234, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:medical_genetics": { | |
"name": "arabic_mmlu:medical_genetics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "medical_genetics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:miscellaneous": { | |
"name": "arabic_mmlu:miscellaneous", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "miscellaneous", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 783, | |
"effective_num_docs": 783, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:moral_disputes": { | |
"name": "arabic_mmlu:moral_disputes", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "moral_disputes", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 346, | |
"effective_num_docs": 346, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:moral_scenarios": { | |
"name": "arabic_mmlu:moral_scenarios", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "moral_scenarios", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 895, | |
"effective_num_docs": 895, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:nutrition": { | |
"name": "arabic_mmlu:nutrition", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "nutrition", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 306, | |
"effective_num_docs": 306, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:philosophy": { | |
"name": "arabic_mmlu:philosophy", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "philosophy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 311, | |
"effective_num_docs": 311, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:prehistory": { | |
"name": "arabic_mmlu:prehistory", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "prehistory", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 324, | |
"effective_num_docs": 324, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:professional_accounting": { | |
"name": "arabic_mmlu:professional_accounting", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "professional_accounting", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 282, | |
"effective_num_docs": 282, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:professional_law": { | |
"name": "arabic_mmlu:professional_law", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "professional_law", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1534, | |
"effective_num_docs": 1534, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:professional_medicine": { | |
"name": "arabic_mmlu:professional_medicine", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "professional_medicine", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 272, | |
"effective_num_docs": 272, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:professional_psychology": { | |
"name": "arabic_mmlu:professional_psychology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "professional_psychology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 612, | |
"effective_num_docs": 612, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:public_relations": { | |
"name": "arabic_mmlu:public_relations", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "public_relations", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 110, | |
"effective_num_docs": 110, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:security_studies": { | |
"name": "arabic_mmlu:security_studies", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "security_studies", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 245, | |
"effective_num_docs": 245, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:sociology": { | |
"name": "arabic_mmlu:sociology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "sociology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 201, | |
"effective_num_docs": 201, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:us_foreign_policy": { | |
"name": "arabic_mmlu:us_foreign_policy", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "us_foreign_policy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:virology": { | |
"name": "arabic_mmlu:virology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "virology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 166, | |
"effective_num_docs": 166, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:world_religions": { | |
"name": "arabic_mmlu:world_religions", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "world_religions", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 171, | |
"effective_num_docs": 171, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arc_challenge_okapi_ar": { | |
"name": "arc_challenge_okapi_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "arc_challenge_okapi_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1160, | |
"effective_num_docs": 1160, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arc_easy_ar": { | |
"name": "arc_easy_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "arc_easy_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 2364, | |
"effective_num_docs": 2364, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|boolq_ar": { | |
"name": "boolq_ar", | |
"prompt_function": "boolq_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "boolq_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 3260, | |
"effective_num_docs": 3260, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|copa_ext_ar": { | |
"name": "copa_ext_ar", | |
"prompt_function": "copa_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "copa_ext_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 90, | |
"effective_num_docs": 90, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|hellaswag_okapi_ar": { | |
"name": "hellaswag_okapi_ar", | |
"prompt_function": "hellaswag_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "hellaswag_okapi_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 9171, | |
"effective_num_docs": 9171, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|openbook_qa_ext_ar": { | |
"name": "openbook_qa_ext_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "openbook_qa_ext_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 495, | |
"effective_num_docs": 495, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|piqa_ar": { | |
"name": "piqa_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "piqa_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1833, | |
"effective_num_docs": 1833, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|race_ar": { | |
"name": "race_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "race_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 4929, | |
"effective_num_docs": 4929, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|sciq_ar": { | |
"name": "sciq_ar", | |
"prompt_function": "sciq_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "sciq_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 995, | |
"effective_num_docs": 995, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|toxigen_ar": { | |
"name": "toxigen_ar", | |
"prompt_function": "toxigen_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "toxigen_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 935, | |
"effective_num_docs": 935, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"lighteval|xstory_cloze:ar": { | |
"name": "xstory_cloze:ar", | |
"prompt_function": "storycloze", | |
"hf_repo": "juletxara/xstory_cloze", | |
"hf_subset": "ar", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"training", | |
"eval" | |
], | |
"evaluation_splits": [ | |
"eval" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 1511, | |
"effective_num_docs": 1511, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
} | |
}, | |
"summary_tasks": { | |
"community|acva:Algeria|0": { | |
"hashes": { | |
"hash_examples": "da5a3003cd46f6f9", | |
"hash_full_prompts": "da5a3003cd46f6f9", | |
"hash_input_tokens": "3ef2bacc7ff37e63", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Ancient_Egypt|0": { | |
"hashes": { | |
"hash_examples": "52d6f767fede195b", | |
"hash_full_prompts": "52d6f767fede195b", | |
"hash_input_tokens": "214cc053d531b353", | |
"hash_cont_tokens": "02a204d955f29ed4" | |
}, | |
"truncated": 0, | |
"non_truncated": 315, | |
"padded": 630, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arab_Empire|0": { | |
"hashes": { | |
"hash_examples": "8dacff6a79804a75", | |
"hash_full_prompts": "8dacff6a79804a75", | |
"hash_input_tokens": "fcf9bbf3ece4c59a", | |
"hash_cont_tokens": "0be121aeaa740bc8" | |
}, | |
"truncated": 0, | |
"non_truncated": 265, | |
"padded": 530, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Architecture|0": { | |
"hashes": { | |
"hash_examples": "df286cd862d9f6bb", | |
"hash_full_prompts": "df286cd862d9f6bb", | |
"hash_input_tokens": "f409e4f91057b0a3", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Art|0": { | |
"hashes": { | |
"hash_examples": "112883d764118a49", | |
"hash_full_prompts": "112883d764118a49", | |
"hash_input_tokens": "961c4f967ebc3560", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Astronomy|0": { | |
"hashes": { | |
"hash_examples": "20dcdf2454bf8671", | |
"hash_full_prompts": "20dcdf2454bf8671", | |
"hash_input_tokens": "3eecb15f812c1eb3", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Calligraphy|0": { | |
"hashes": { | |
"hash_examples": "3a9f9d1ebe868a15", | |
"hash_full_prompts": "3a9f9d1ebe868a15", | |
"hash_input_tokens": "5c9e1b82570611ba", | |
"hash_cont_tokens": "3a362560f15a8d81" | |
}, | |
"truncated": 0, | |
"non_truncated": 255, | |
"padded": 510, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Ceremony|0": { | |
"hashes": { | |
"hash_examples": "c927630f8d2f44da", | |
"hash_full_prompts": "c927630f8d2f44da", | |
"hash_input_tokens": "ef313f408e82c8b1", | |
"hash_cont_tokens": "219de3ed588d7bf7" | |
}, | |
"truncated": 0, | |
"non_truncated": 185, | |
"padded": 370, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Clothing|0": { | |
"hashes": { | |
"hash_examples": "6ad0740c2ac6ac92", | |
"hash_full_prompts": "6ad0740c2ac6ac92", | |
"hash_input_tokens": "fe7d468c570bbbef", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Culture|0": { | |
"hashes": { | |
"hash_examples": "2177bd857ad872ae", | |
"hash_full_prompts": "2177bd857ad872ae", | |
"hash_input_tokens": "6c4c5ec501b06344", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Food|0": { | |
"hashes": { | |
"hash_examples": "a6ada65b71d7c9c5", | |
"hash_full_prompts": "a6ada65b71d7c9c5", | |
"hash_input_tokens": "82dec31ea4b85be7", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Funeral|0": { | |
"hashes": { | |
"hash_examples": "fcee39dc29eaae91", | |
"hash_full_prompts": "fcee39dc29eaae91", | |
"hash_input_tokens": "21c3d1f4aa7a1b43", | |
"hash_cont_tokens": "c36c7371f1293511" | |
}, | |
"truncated": 0, | |
"non_truncated": 95, | |
"padded": 190, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Geography|0": { | |
"hashes": { | |
"hash_examples": "d36eda7c89231c02", | |
"hash_full_prompts": "d36eda7c89231c02", | |
"hash_input_tokens": "5153cdd7e275c923", | |
"hash_cont_tokens": "625e58e7a01dba13" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_History|0": { | |
"hashes": { | |
"hash_examples": "6354ac0d6db6a5fc", | |
"hash_full_prompts": "6354ac0d6db6a5fc", | |
"hash_input_tokens": "21100775a51cb718", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Language_Origin|0": { | |
"hashes": { | |
"hash_examples": "ddc967c8aca34402", | |
"hash_full_prompts": "ddc967c8aca34402", | |
"hash_input_tokens": "ddb0541d93f36eb6", | |
"hash_cont_tokens": "c36c7371f1293511" | |
}, | |
"truncated": 0, | |
"non_truncated": 95, | |
"padded": 190, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Literature|0": { | |
"hashes": { | |
"hash_examples": "4305379fd46be5d8", | |
"hash_full_prompts": "4305379fd46be5d8", | |
"hash_input_tokens": "7b2216f979c03d6e", | |
"hash_cont_tokens": "625e58e7a01dba13" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Math|0": { | |
"hashes": { | |
"hash_examples": "dec621144f4d28be", | |
"hash_full_prompts": "dec621144f4d28be", | |
"hash_input_tokens": "2cf492ee21e36250", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Medicine|0": { | |
"hashes": { | |
"hash_examples": "2b344cdae9495ff2", | |
"hash_full_prompts": "2b344cdae9495ff2", | |
"hash_input_tokens": "822e41ac3f615cec", | |
"hash_cont_tokens": "625e58e7a01dba13" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Music|0": { | |
"hashes": { | |
"hash_examples": "0c54624d881944ce", | |
"hash_full_prompts": "0c54624d881944ce", | |
"hash_input_tokens": "a3beee14b9f9c5b3", | |
"hash_cont_tokens": "4ac287553cdf8021" | |
}, | |
"truncated": 0, | |
"non_truncated": 139, | |
"padded": 278, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Ornament|0": { | |
"hashes": { | |
"hash_examples": "251a4a84289d8bc1", | |
"hash_full_prompts": "251a4a84289d8bc1", | |
"hash_input_tokens": "d66efb5898b60b7d", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Philosophy|0": { | |
"hashes": { | |
"hash_examples": "3f86fb9c94c13d22", | |
"hash_full_prompts": "3f86fb9c94c13d22", | |
"hash_input_tokens": "4db8a1a1ce91af01", | |
"hash_cont_tokens": "625e58e7a01dba13" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Physics_and_Chemistry|0": { | |
"hashes": { | |
"hash_examples": "8fec65af3695b62a", | |
"hash_full_prompts": "8fec65af3695b62a", | |
"hash_input_tokens": "006d8fd32d4df053", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Wedding|0": { | |
"hashes": { | |
"hash_examples": "9cc3477184d7a4b8", | |
"hash_full_prompts": "9cc3477184d7a4b8", | |
"hash_input_tokens": "cfc86a7f4fb03562", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Bahrain|0": { | |
"hashes": { | |
"hash_examples": "c92e803a0fa8b9e2", | |
"hash_full_prompts": "c92e803a0fa8b9e2", | |
"hash_input_tokens": "9a4972ac1b8bb2ef", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Comoros|0": { | |
"hashes": { | |
"hash_examples": "06e5d4bba8e54cae", | |
"hash_full_prompts": "06e5d4bba8e54cae", | |
"hash_input_tokens": "0e6e4b54cea0480f", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Egypt_modern|0": { | |
"hashes": { | |
"hash_examples": "c6ec369164f93446", | |
"hash_full_prompts": "c6ec369164f93446", | |
"hash_input_tokens": "f8c58bce9be98072", | |
"hash_cont_tokens": "c36c7371f1293511" | |
}, | |
"truncated": 0, | |
"non_truncated": 95, | |
"padded": 190, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromAncientEgypt|0": { | |
"hashes": { | |
"hash_examples": "b9d56d74818b9bd4", | |
"hash_full_prompts": "b9d56d74818b9bd4", | |
"hash_input_tokens": "9e5799065a110e5c", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromByzantium|0": { | |
"hashes": { | |
"hash_examples": "5316c9624e7e59b8", | |
"hash_full_prompts": "5316c9624e7e59b8", | |
"hash_input_tokens": "375684579aec5df9", | |
"hash_cont_tokens": "625e58e7a01dba13" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromChina|0": { | |
"hashes": { | |
"hash_examples": "87894bce95a56411", | |
"hash_full_prompts": "87894bce95a56411", | |
"hash_input_tokens": "42c8734bf7379972", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromGreece|0": { | |
"hashes": { | |
"hash_examples": "0baa78a27e469312", | |
"hash_full_prompts": "0baa78a27e469312", | |
"hash_input_tokens": "fca8c6de9b662b92", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromIslam|0": { | |
"hashes": { | |
"hash_examples": "0c2532cde6541ff2", | |
"hash_full_prompts": "0c2532cde6541ff2", | |
"hash_input_tokens": "e3675188444ec536", | |
"hash_cont_tokens": "625e58e7a01dba13" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromPersia|0": { | |
"hashes": { | |
"hash_examples": "efcd8112dc53c6e5", | |
"hash_full_prompts": "efcd8112dc53c6e5", | |
"hash_input_tokens": "463a0b24318833a6", | |
"hash_cont_tokens": "0060d8f35205c778" | |
}, | |
"truncated": 0, | |
"non_truncated": 175, | |
"padded": 350, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromRome|0": { | |
"hashes": { | |
"hash_examples": "9db61480e2e85fd3", | |
"hash_full_prompts": "9db61480e2e85fd3", | |
"hash_input_tokens": "fd2998a357b960c0", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Iraq|0": { | |
"hashes": { | |
"hash_examples": "96dac3dfa8d2f41f", | |
"hash_full_prompts": "96dac3dfa8d2f41f", | |
"hash_input_tokens": "9de9580e4d5db084", | |
"hash_cont_tokens": "174ee430e070c2fa" | |
}, | |
"truncated": 0, | |
"non_truncated": 85, | |
"padded": 170, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Islam_Education|0": { | |
"hashes": { | |
"hash_examples": "0d80355f6a4cb51b", | |
"hash_full_prompts": "0d80355f6a4cb51b", | |
"hash_input_tokens": "d4bfde3af5c09454", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Islam_branches_and_schools|0": { | |
"hashes": { | |
"hash_examples": "5cedce1be2c3ad50", | |
"hash_full_prompts": "5cedce1be2c3ad50", | |
"hash_input_tokens": "72dc8ee6d66bd04a", | |
"hash_cont_tokens": "0060d8f35205c778" | |
}, | |
"truncated": 0, | |
"non_truncated": 175, | |
"padded": 350, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Islamic_law_system|0": { | |
"hashes": { | |
"hash_examples": "c0e6db8bc84e105e", | |
"hash_full_prompts": "c0e6db8bc84e105e", | |
"hash_input_tokens": "f74942b5d7716814", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Jordan|0": { | |
"hashes": { | |
"hash_examples": "33deb5b4e5ddd6a1", | |
"hash_full_prompts": "33deb5b4e5ddd6a1", | |
"hash_input_tokens": "2957aa767a046048", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Kuwait|0": { | |
"hashes": { | |
"hash_examples": "eb41773346d7c46c", | |
"hash_full_prompts": "eb41773346d7c46c", | |
"hash_input_tokens": "b1d08032bec9263a", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Lebanon|0": { | |
"hashes": { | |
"hash_examples": "25932dbf4c13d34f", | |
"hash_full_prompts": "25932dbf4c13d34f", | |
"hash_input_tokens": "46366538cee4446c", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Libya|0": { | |
"hashes": { | |
"hash_examples": "f2c4db63cd402926", | |
"hash_full_prompts": "f2c4db63cd402926", | |
"hash_input_tokens": "97bb7a855bc4a7d5", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Mauritania|0": { | |
"hashes": { | |
"hash_examples": "8723ab5fdf286b54", | |
"hash_full_prompts": "8723ab5fdf286b54", | |
"hash_input_tokens": "3cbbc6cc805cc8c6", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Mesopotamia_civilization|0": { | |
"hashes": { | |
"hash_examples": "c33f5502a6130ca9", | |
"hash_full_prompts": "c33f5502a6130ca9", | |
"hash_input_tokens": "340c89c5258b71cb", | |
"hash_cont_tokens": "ac62599297c498fd" | |
}, | |
"truncated": 0, | |
"non_truncated": 155, | |
"padded": 310, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Morocco|0": { | |
"hashes": { | |
"hash_examples": "588a5ed27904b1ae", | |
"hash_full_prompts": "588a5ed27904b1ae", | |
"hash_input_tokens": "de33fa7c121201f1", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Oman|0": { | |
"hashes": { | |
"hash_examples": "d447c52b94248b69", | |
"hash_full_prompts": "d447c52b94248b69", | |
"hash_input_tokens": "7578daf272d84c9f", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Palestine|0": { | |
"hashes": { | |
"hash_examples": "19197e076ad14ff5", | |
"hash_full_prompts": "19197e076ad14ff5", | |
"hash_input_tokens": "d213bab3dac40187", | |
"hash_cont_tokens": "174ee430e070c2fa" | |
}, | |
"truncated": 0, | |
"non_truncated": 85, | |
"padded": 170, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Qatar|0": { | |
"hashes": { | |
"hash_examples": "cf0736fa185b28f6", | |
"hash_full_prompts": "cf0736fa185b28f6", | |
"hash_input_tokens": "f735251f18dc0e2d", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Saudi_Arabia|0": { | |
"hashes": { | |
"hash_examples": "69beda6e1b85a08d", | |
"hash_full_prompts": "69beda6e1b85a08d", | |
"hash_input_tokens": "3bdd6e865012e3b7", | |
"hash_cont_tokens": "ebddcaf492db5bb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Somalia|0": { | |
"hashes": { | |
"hash_examples": "b387940c65784fbf", | |
"hash_full_prompts": "b387940c65784fbf", | |
"hash_input_tokens": "61dd3bda71481e9b", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Sudan|0": { | |
"hashes": { | |
"hash_examples": "e02c32b9d2dd0c3f", | |
"hash_full_prompts": "e02c32b9d2dd0c3f", | |
"hash_input_tokens": "57440f2bc158619e", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Syria|0": { | |
"hashes": { | |
"hash_examples": "60a6f8fe73bda4bb", | |
"hash_full_prompts": "60a6f8fe73bda4bb", | |
"hash_input_tokens": "3ef9d6ede0448880", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Tunisia|0": { | |
"hashes": { | |
"hash_examples": "34bb15d3830c5649", | |
"hash_full_prompts": "34bb15d3830c5649", | |
"hash_input_tokens": "e47330ece0bc7145", | |
"hash_cont_tokens": "9ad7f58ff8a11e98" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:United_Arab_Emirates|0": { | |
"hashes": { | |
"hash_examples": "98a0ba78172718ce", | |
"hash_full_prompts": "98a0ba78172718ce", | |
"hash_input_tokens": "884fd807294d25f0", | |
"hash_cont_tokens": "174ee430e070c2fa" | |
}, | |
"truncated": 0, | |
"non_truncated": 85, | |
"padded": 170, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Yemen|0": { | |
"hashes": { | |
"hash_examples": "18e9bcccbb4ced7a", | |
"hash_full_prompts": "18e9bcccbb4ced7a", | |
"hash_input_tokens": "9166d951454dd202", | |
"hash_cont_tokens": "96702f2356f6107c" | |
}, | |
"truncated": 0, | |
"non_truncated": 10, | |
"padded": 20, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:communication|0": { | |
"hashes": { | |
"hash_examples": "9ff28ab5eab5c97b", | |
"hash_full_prompts": "9ff28ab5eab5c97b", | |
"hash_input_tokens": "db963414a330545b", | |
"hash_cont_tokens": "b628a89bcecf356d" | |
}, | |
"truncated": 0, | |
"non_truncated": 364, | |
"padded": 728, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:computer_and_phone|0": { | |
"hashes": { | |
"hash_examples": "37bac2f086aaf6c2", | |
"hash_full_prompts": "37bac2f086aaf6c2", | |
"hash_input_tokens": "efb6e3bfd9ec1af7", | |
"hash_cont_tokens": "d8ae57c62ca85f4b" | |
}, | |
"truncated": 0, | |
"non_truncated": 295, | |
"padded": 590, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:daily_life|0": { | |
"hashes": { | |
"hash_examples": "bf07363c1c252e2f", | |
"hash_full_prompts": "bf07363c1c252e2f", | |
"hash_input_tokens": "c4f010864d25294f", | |
"hash_cont_tokens": "d598caf874354f48" | |
}, | |
"truncated": 0, | |
"non_truncated": 337, | |
"padded": 674, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:entertainment|0": { | |
"hashes": { | |
"hash_examples": "37077bc00f0ac56a", | |
"hash_full_prompts": "37077bc00f0ac56a", | |
"hash_input_tokens": "d73836ec28e33898", | |
"hash_cont_tokens": "d8ae57c62ca85f4b" | |
}, | |
"truncated": 0, | |
"non_truncated": 295, | |
"padded": 590, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:mcq_exams_test_ar|0": { | |
"hashes": { | |
"hash_examples": "c07a5e78c5c0b8fe", | |
"hash_full_prompts": "c07a5e78c5c0b8fe", | |
"hash_input_tokens": "f6c2627be131da5f", | |
"hash_cont_tokens": "478fa268e1fd1e4d" | |
}, | |
"truncated": 0, | |
"non_truncated": 557, | |
"padded": 2228, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:meta_ar_dialects|0": { | |
"hashes": { | |
"hash_examples": "c0b6081f83e14064", | |
"hash_full_prompts": "c0b6081f83e14064", | |
"hash_input_tokens": "d284c569538fb7b5", | |
"hash_cont_tokens": "3dea80a1ee9dc316" | |
}, | |
"truncated": 0, | |
"non_truncated": 5395, | |
"padded": 21451, | |
"non_padded": 129, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:meta_ar_msa|0": { | |
"hashes": { | |
"hash_examples": "64eb78a7c5b7484b", | |
"hash_full_prompts": "64eb78a7c5b7484b", | |
"hash_input_tokens": "7585307c8322a0a6", | |
"hash_cont_tokens": "b7b27059aa46f9f5" | |
}, | |
"truncated": 0, | |
"non_truncated": 895, | |
"padded": 3554, | |
"non_padded": 26, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { | |
"hashes": { | |
"hash_examples": "54fc3502c1c02c06", | |
"hash_full_prompts": "54fc3502c1c02c06", | |
"hash_input_tokens": "c223e508cf5e6cee", | |
"hash_cont_tokens": "30ae320a6284bd96" | |
}, | |
"truncated": 0, | |
"non_truncated": 75, | |
"padded": 150, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { | |
"hashes": { | |
"hash_examples": "46572d83696552ae", | |
"hash_full_prompts": "46572d83696552ae", | |
"hash_input_tokens": "1feed0d2e3f9e2af", | |
"hash_cont_tokens": "2e89ca40d66b31a0" | |
}, | |
"truncated": 0, | |
"non_truncated": 150, | |
"padded": 743, | |
"non_padded": 7, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { | |
"hashes": { | |
"hash_examples": "f430d97ff715bc1c", | |
"hash_full_prompts": "f430d97ff715bc1c", | |
"hash_input_tokens": "a3d3d850c97ea53e", | |
"hash_cont_tokens": "6e7f62230276d03d" | |
}, | |
"truncated": 0, | |
"non_truncated": 150, | |
"padded": 746, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { | |
"hashes": { | |
"hash_examples": "6b70a7416584f98c", | |
"hash_full_prompts": "6b70a7416584f98c", | |
"hash_input_tokens": "4564e8cc6eb247a3", | |
"hash_cont_tokens": "cd589f2d0662aca9" | |
}, | |
"truncated": 0, | |
"non_truncated": 7995, | |
"padded": 15990, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": { | |
"hashes": { | |
"hash_examples": "bc2005cc9d2f436e", | |
"hash_full_prompts": "bc2005cc9d2f436e", | |
"hash_input_tokens": "d38206e59fac4514", | |
"hash_cont_tokens": "43c11b648549da66" | |
}, | |
"truncated": 0, | |
"non_truncated": 5995, | |
"padded": 17843, | |
"non_padded": 142, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task|0": { | |
"hashes": { | |
"hash_examples": "6fb0e254ea5945d8", | |
"hash_full_prompts": "6fb0e254ea5945d8", | |
"hash_input_tokens": "c35504312cdf955a", | |
"hash_cont_tokens": "040bf50c610943db" | |
}, | |
"truncated": 0, | |
"non_truncated": 1720, | |
"padded": 5061, | |
"non_padded": 99, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_exams|0": { | |
"hashes": { | |
"hash_examples": "6d721df351722656", | |
"hash_full_prompts": "6d721df351722656", | |
"hash_input_tokens": "6c9c08baaf330e1b", | |
"hash_cont_tokens": "f6dc9d4abb83a50a" | |
}, | |
"truncated": 0, | |
"non_truncated": 537, | |
"padded": 2116, | |
"non_padded": 32, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:abstract_algebra|0": { | |
"hashes": { | |
"hash_examples": "f2ddca8f45c0a511", | |
"hash_full_prompts": "f2ddca8f45c0a511", | |
"hash_input_tokens": "95493da40fb69b92", | |
"hash_cont_tokens": "67c9ff842b18298a" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:anatomy|0": { | |
"hashes": { | |
"hash_examples": "dfdbc1b83107668d", | |
"hash_full_prompts": "dfdbc1b83107668d", | |
"hash_input_tokens": "9f521f9e986b18f0", | |
"hash_cont_tokens": "b5c1c612518185a5" | |
}, | |
"truncated": 0, | |
"non_truncated": 135, | |
"padded": 532, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:astronomy|0": { | |
"hashes": { | |
"hash_examples": "9736a606002a848e", | |
"hash_full_prompts": "9736a606002a848e", | |
"hash_input_tokens": "5c21a94d7cf0394a", | |
"hash_cont_tokens": "9a91066ba51b2074" | |
}, | |
"truncated": 0, | |
"non_truncated": 152, | |
"padded": 608, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:business_ethics|0": { | |
"hashes": { | |
"hash_examples": "735e452fbb6dc63d", | |
"hash_full_prompts": "735e452fbb6dc63d", | |
"hash_input_tokens": "acc3f1572c2d39fa", | |
"hash_cont_tokens": "67c9ff842b18298a" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 396, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:clinical_knowledge|0": { | |
"hashes": { | |
"hash_examples": "6ab0ca4da98aedcf", | |
"hash_full_prompts": "6ab0ca4da98aedcf", | |
"hash_input_tokens": "9dffa81c459dcaee", | |
"hash_cont_tokens": "4a838d5fa832139f" | |
}, | |
"truncated": 0, | |
"non_truncated": 265, | |
"padded": 1044, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_biology|0": { | |
"hashes": { | |
"hash_examples": "17e4e390848018a4", | |
"hash_full_prompts": "17e4e390848018a4", | |
"hash_input_tokens": "65af10c96ea12df7", | |
"hash_cont_tokens": "d95798e261e3ecf2" | |
}, | |
"truncated": 0, | |
"non_truncated": 144, | |
"padded": 568, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_chemistry|0": { | |
"hashes": { | |
"hash_examples": "4abb169f6dfd234b", | |
"hash_full_prompts": "4abb169f6dfd234b", | |
"hash_input_tokens": "24f579edd453da9e", | |
"hash_cont_tokens": "67c9ff842b18298a" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 392, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_computer_science|0": { | |
"hashes": { | |
"hash_examples": "a369e2e941358a1e", | |
"hash_full_prompts": "a369e2e941358a1e", | |
"hash_input_tokens": "844825bac3e07635", | |
"hash_cont_tokens": "67c9ff842b18298a" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_mathematics|0": { | |
"hashes": { | |
"hash_examples": "d7be03b8b6020bff", | |
"hash_full_prompts": "d7be03b8b6020bff", | |
"hash_input_tokens": "4be21042f3552fa7", | |
"hash_cont_tokens": "67c9ff842b18298a" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 392, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_medicine|0": { | |
"hashes": { | |
"hash_examples": "0518a00f097346bf", | |
"hash_full_prompts": "0518a00f097346bf", | |
"hash_input_tokens": "109992838a8abfa6", | |
"hash_cont_tokens": "0a01d731701f68e5" | |
}, | |
"truncated": 0, | |
"non_truncated": 173, | |
"padded": 680, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_physics|0": { | |
"hashes": { | |
"hash_examples": "5d842cd49bc70e12", | |
"hash_full_prompts": "5d842cd49bc70e12", | |
"hash_input_tokens": "05c93e65d5fa5d1c", | |
"hash_cont_tokens": "77e73d9510077678" | |
}, | |
"truncated": 0, | |
"non_truncated": 102, | |
"padded": 404, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:computer_security|0": { | |
"hashes": { | |
"hash_examples": "8e85d9f85be9b32f", | |
"hash_full_prompts": "8e85d9f85be9b32f", | |
"hash_input_tokens": "af43dcb54a4c9636", | |
"hash_cont_tokens": "67c9ff842b18298a" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 392, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:conceptual_physics|0": { | |
"hashes": { | |
"hash_examples": "7964b55a0a49502b", | |
"hash_full_prompts": "7964b55a0a49502b", | |
"hash_input_tokens": "8e04fb6e64e28a4a", | |
"hash_cont_tokens": "01df071ebfaec74d" | |
}, | |
"truncated": 0, | |
"non_truncated": 235, | |
"padded": 896, | |
"non_padded": 44, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:econometrics|0": { | |
"hashes": { | |
"hash_examples": "1e192eae38347257", | |
"hash_full_prompts": "1e192eae38347257", | |
"hash_input_tokens": "f2449d972c4d7c8f", | |
"hash_cont_tokens": "e3fc2917921a9eaf" | |
}, | |
"truncated": 0, | |
"non_truncated": 114, | |
"padded": 432, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:electrical_engineering|0": { | |
"hashes": { | |
"hash_examples": "cf97671d5c441da1", | |
"hash_full_prompts": "cf97671d5c441da1", | |
"hash_input_tokens": "6dc654413c64caf0", | |
"hash_cont_tokens": "c8a5aa308b735a83" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 560, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:elementary_mathematics|0": { | |
"hashes": { | |
"hash_examples": "6f49107ed43c40c5", | |
"hash_full_prompts": "6f49107ed43c40c5", | |
"hash_input_tokens": "9c49913c8ee12f29", | |
"hash_cont_tokens": "df4fdf22c42e07a2" | |
}, | |
"truncated": 0, | |
"non_truncated": 378, | |
"padded": 1488, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:formal_logic|0": { | |
"hashes": { | |
"hash_examples": "7922c376008ba77b", | |
"hash_full_prompts": "7922c376008ba77b", | |
"hash_input_tokens": "11124cbe31a9a453", | |
"hash_cont_tokens": "3a65271847fd1f2e" | |
}, | |
"truncated": 0, | |
"non_truncated": 126, | |
"padded": 496, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:global_facts|0": { | |
"hashes": { | |
"hash_examples": "11f9813185047d5b", | |
"hash_full_prompts": "11f9813185047d5b", | |
"hash_input_tokens": "38bc5a531955abb0", | |
"hash_cont_tokens": "67c9ff842b18298a" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 380, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_biology|0": { | |
"hashes": { | |
"hash_examples": "2a804b1d90cbe66e", | |
"hash_full_prompts": "2a804b1d90cbe66e", | |
"hash_input_tokens": "7dd451fb49567d74", | |
"hash_cont_tokens": "a6b83eb9e3d31488" | |
}, | |
"truncated": 0, | |
"non_truncated": 310, | |
"padded": 1212, | |
"non_padded": 28, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_chemistry|0": { | |
"hashes": { | |
"hash_examples": "0032168adabc53b4", | |
"hash_full_prompts": "0032168adabc53b4", | |
"hash_input_tokens": "e6a67069b7c5c2ad", | |
"hash_cont_tokens": "8a704d95abde0656" | |
}, | |
"truncated": 0, | |
"non_truncated": 203, | |
"padded": 796, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_computer_science|0": { | |
"hashes": { | |
"hash_examples": "f2fb8740f9df980f", | |
"hash_full_prompts": "f2fb8740f9df980f", | |
"hash_input_tokens": "dcbf6ff3bdd3e92e", | |
"hash_cont_tokens": "67c9ff842b18298a" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 392, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_european_history|0": { | |
"hashes": { | |
"hash_examples": "73509021e7e66435", | |
"hash_full_prompts": "73509021e7e66435", | |
"hash_input_tokens": "8bba260a537bd934", | |
"hash_cont_tokens": "bc6082a11551e6de" | |
}, | |
"truncated": 0, | |
"non_truncated": 165, | |
"padded": 576, | |
"non_padded": 84, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_geography|0": { | |
"hashes": { | |
"hash_examples": "9e08d1894940ff42", | |
"hash_full_prompts": "9e08d1894940ff42", | |
"hash_input_tokens": "2a3d9c5c789d91e8", | |
"hash_cont_tokens": "17f69684d1b7da75" | |
}, | |
"truncated": 0, | |
"non_truncated": 198, | |
"padded": 772, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_government_and_politics|0": { | |
"hashes": { | |
"hash_examples": "64b7e97817ca6c76", | |
"hash_full_prompts": "64b7e97817ca6c76", | |
"hash_input_tokens": "fd2a128749bd7742", | |
"hash_cont_tokens": "bfdbe0094ac4e89d" | |
}, | |
"truncated": 0, | |
"non_truncated": 193, | |
"padded": 764, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_macroeconomics|0": { | |
"hashes": { | |
"hash_examples": "9f582da8534bd2ef", | |
"hash_full_prompts": "9f582da8534bd2ef", | |
"hash_input_tokens": "92b5090100b190f6", | |
"hash_cont_tokens": "a61cb28cbed86a76" | |
}, | |
"truncated": 0, | |
"non_truncated": 390, | |
"padded": 1532, | |
"non_padded": 28, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_mathematics|0": { | |
"hashes": { | |
"hash_examples": "fd54f1c10d423c51", | |
"hash_full_prompts": "fd54f1c10d423c51", | |
"hash_input_tokens": "0c321c6c6d659079", | |
"hash_cont_tokens": "ede1813083303def" | |
}, | |
"truncated": 0, | |
"non_truncated": 270, | |
"padded": 1068, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_microeconomics|0": { | |
"hashes": { | |
"hash_examples": "7037896925aaf42f", | |
"hash_full_prompts": "7037896925aaf42f", | |
"hash_input_tokens": "d7288ee52bee450a", | |
"hash_cont_tokens": "2ce9a16a87758ab3" | |
}, | |
"truncated": 0, | |
"non_truncated": 238, | |
"padded": 920, | |
"non_padded": 32, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_physics|0": { | |
"hashes": { | |
"hash_examples": "60c3776215167dae", | |
"hash_full_prompts": "60c3776215167dae", | |
"hash_input_tokens": "9f47ec7dd538bc5e", | |
"hash_cont_tokens": "34e90e2adee42b92" | |
}, | |
"truncated": 0, | |
"non_truncated": 151, | |
"padded": 580, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_psychology|0": { | |
"hashes": { | |
"hash_examples": "61176bfd5da1298f", | |
"hash_full_prompts": "61176bfd5da1298f", | |
"hash_input_tokens": "3605efa3b22716ef", | |
"hash_cont_tokens": "2d376df2c1814495" | |
}, | |
"truncated": 0, | |
"non_truncated": 545, | |
"padded": 2140, | |
"non_padded": 40, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_statistics|0": { | |
"hashes": { | |
"hash_examples": "40dfeebd1ea10f76", | |
"hash_full_prompts": "40dfeebd1ea10f76", | |
"hash_input_tokens": "b43add071e6f904a", | |
"hash_cont_tokens": "4bd9660a3b058b49" | |
}, | |
"truncated": 0, | |
"non_truncated": 216, | |
"padded": 856, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_us_history|0": { | |
"hashes": { | |
"hash_examples": "03daa510ba917f4d", | |
"hash_full_prompts": "03daa510ba917f4d", | |
"hash_input_tokens": "e2f960b5c86c0102", | |
"hash_cont_tokens": "5c36ec463d3a0755" | |
}, | |
"truncated": 0, | |
"non_truncated": 204, | |
"padded": 788, | |
"non_padded": 28, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_world_history|0": { | |
"hashes": { | |
"hash_examples": "be075ffd579f43c2", | |
"hash_full_prompts": "be075ffd579f43c2", | |
"hash_input_tokens": "ccc7751b748151ac", | |
"hash_cont_tokens": "f21688a7138caf58" | |
}, | |
"truncated": 0, | |
"non_truncated": 237, | |
"padded": 872, | |
"non_padded": 76, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:human_aging|0": { | |
"hashes": { | |
"hash_examples": "caa5b69f640bd1ef", | |
"hash_full_prompts": "caa5b69f640bd1ef", | |
"hash_input_tokens": "2a3aba61d4ab180f", | |
"hash_cont_tokens": "22b919059cbabb52" | |
}, | |
"truncated": 0, | |
"non_truncated": 223, | |
"padded": 868, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:human_sexuality|0": { | |
"hashes": { | |
"hash_examples": "5ed2e38fb25a3767", | |
"hash_full_prompts": "5ed2e38fb25a3767", | |
"hash_input_tokens": "2e20cc06a88b48bc", | |
"hash_cont_tokens": "7033845bac78be24" | |
}, | |
"truncated": 0, | |
"non_truncated": 131, | |
"padded": 524, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:international_law|0": { | |
"hashes": { | |
"hash_examples": "4e3e9e28d1b96484", | |
"hash_full_prompts": "4e3e9e28d1b96484", | |
"hash_input_tokens": "6ada6f50afb308a2", | |
"hash_cont_tokens": "29048e59854cec5c" | |
}, | |
"truncated": 0, | |
"non_truncated": 121, | |
"padded": 476, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:jurisprudence|0": { | |
"hashes": { | |
"hash_examples": "e264b755366310b3", | |
"hash_full_prompts": "e264b755366310b3", | |
"hash_input_tokens": "73472ce451e5a8ff", | |
"hash_cont_tokens": "fae0419b42375fd2" | |
}, | |
"truncated": 0, | |
"non_truncated": 108, | |
"padded": 420, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:logical_fallacies|0": { | |
"hashes": { | |
"hash_examples": "a4ab6965a3e38071", | |
"hash_full_prompts": "a4ab6965a3e38071", | |
"hash_input_tokens": "58be6c072dceb983", | |
"hash_cont_tokens": "93c4e716b01bd87c" | |
}, | |
"truncated": 0, | |
"non_truncated": 163, | |
"padded": 636, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:machine_learning|0": { | |
"hashes": { | |
"hash_examples": "b92320efa6636b40", | |
"hash_full_prompts": "b92320efa6636b40", | |
"hash_input_tokens": "83bf4721912dcabc", | |
"hash_cont_tokens": "2e4467ffdab3254d" | |
}, | |
"truncated": 0, | |
"non_truncated": 112, | |
"padded": 432, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:management|0": { | |
"hashes": { | |
"hash_examples": "c9ee4872a850fe20", | |
"hash_full_prompts": "c9ee4872a850fe20", | |
"hash_input_tokens": "791aa6cff6083f42", | |
"hash_cont_tokens": "b0936addbab0c265" | |
}, | |
"truncated": 0, | |
"non_truncated": 103, | |
"padded": 408, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:marketing|0": { | |
"hashes": { | |
"hash_examples": "0c151b70f6a047e3", | |
"hash_full_prompts": "0c151b70f6a047e3", | |
"hash_input_tokens": "0ca2bb3b1c9dd9f7", | |
"hash_cont_tokens": "54e2619fba846f17" | |
}, | |
"truncated": 0, | |
"non_truncated": 234, | |
"padded": 920, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:medical_genetics|0": { | |
"hashes": { | |
"hash_examples": "513f6cb8fca3a24e", | |
"hash_full_prompts": "513f6cb8fca3a24e", | |
"hash_input_tokens": "7be9a964aef8319e", | |
"hash_cont_tokens": "67c9ff842b18298a" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 388, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:miscellaneous|0": { | |
"hashes": { | |
"hash_examples": "259a190d635331db", | |
"hash_full_prompts": "259a190d635331db", | |
"hash_input_tokens": "ed027028f1fa1bb1", | |
"hash_cont_tokens": "b68c3a07a4a75876" | |
}, | |
"truncated": 0, | |
"non_truncated": 783, | |
"padded": 3088, | |
"non_padded": 44, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:moral_disputes|0": { | |
"hashes": { | |
"hash_examples": "b85052c48a0b7bc3", | |
"hash_full_prompts": "b85052c48a0b7bc3", | |
"hash_input_tokens": "866a49d242375939", | |
"hash_cont_tokens": "f54406d1e4cf99f8" | |
}, | |
"truncated": 0, | |
"non_truncated": 346, | |
"padded": 1348, | |
"non_padded": 36, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:moral_scenarios|0": { | |
"hashes": { | |
"hash_examples": "28d0b069ef00dd00", | |
"hash_full_prompts": "28d0b069ef00dd00", | |
"hash_input_tokens": "ec48ad4c756e4c49", | |
"hash_cont_tokens": "1a38cd5b4241444e" | |
}, | |
"truncated": 0, | |
"non_truncated": 895, | |
"padded": 3580, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:nutrition|0": { | |
"hashes": { | |
"hash_examples": "00c9bc5f1d305b2f", | |
"hash_full_prompts": "00c9bc5f1d305b2f", | |
"hash_input_tokens": "a5b663a9ebff8634", | |
"hash_cont_tokens": "9d443ff23bc12c12" | |
}, | |
"truncated": 0, | |
"non_truncated": 306, | |
"padded": 1192, | |
"non_padded": 32, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:philosophy|0": { | |
"hashes": { | |
"hash_examples": "a458c08454a3fd5f", | |
"hash_full_prompts": "a458c08454a3fd5f", | |
"hash_input_tokens": "8cbb88ec7feff38a", | |
"hash_cont_tokens": "09e5454b7258b0b7" | |
}, | |
"truncated": 0, | |
"non_truncated": 311, | |
"padded": 1204, | |
"non_padded": 40, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:prehistory|0": { | |
"hashes": { | |
"hash_examples": "d6a0ecbdbb670e9c", | |
"hash_full_prompts": "d6a0ecbdbb670e9c", | |
"hash_input_tokens": "405b88aa53ff3b4a", | |
"hash_cont_tokens": "f20b5fcd2df4488d" | |
}, | |
"truncated": 0, | |
"non_truncated": 324, | |
"padded": 1272, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:professional_accounting|0": { | |
"hashes": { | |
"hash_examples": "b4a95fe480b6540e", | |
"hash_full_prompts": "b4a95fe480b6540e", | |
"hash_input_tokens": "18dee047150881e4", | |
"hash_cont_tokens": "a3a4b0df5e20638f" | |
}, | |
"truncated": 0, | |
"non_truncated": 282, | |
"padded": 1112, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:professional_law|0": { | |
"hashes": { | |
"hash_examples": "c2be9651cdbdde3b", | |
"hash_full_prompts": "c2be9651cdbdde3b", | |
"hash_input_tokens": "8d70619c9cfadd6c", | |
"hash_cont_tokens": "eb156ebe8faf1aaf" | |
}, | |
"truncated": 0, | |
"non_truncated": 1534, | |
"padded": 6080, | |
"non_padded": 56, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:professional_medicine|0": { | |
"hashes": { | |
"hash_examples": "26ce92416288f273", | |
"hash_full_prompts": "26ce92416288f273", | |
"hash_input_tokens": "567b259d8741d496", | |
"hash_cont_tokens": "0218217c2fa604a0" | |
}, | |
"truncated": 0, | |
"non_truncated": 272, | |
"padded": 1076, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:professional_psychology|0": { | |
"hashes": { | |
"hash_examples": "71ea5f182ea9a641", | |
"hash_full_prompts": "71ea5f182ea9a641", | |
"hash_input_tokens": "650d704033401a6e", | |
"hash_cont_tokens": "4fea6b16917c8330" | |
}, | |
"truncated": 0, | |
"non_truncated": 612, | |
"padded": 2396, | |
"non_padded": 52, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:public_relations|0": { | |
"hashes": { | |
"hash_examples": "125adc21f91f8d77", | |
"hash_full_prompts": "125adc21f91f8d77", | |
"hash_input_tokens": "e8575cc674cc40bd", | |
"hash_cont_tokens": "fae64d3b41255dc8" | |
}, | |
"truncated": 0, | |
"non_truncated": 110, | |
"padded": 420, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:security_studies|0": { | |
"hashes": { | |
"hash_examples": "3c18b216c099fb26", | |
"hash_full_prompts": "3c18b216c099fb26", | |
"hash_input_tokens": "4dde93cad40d1dfa", | |
"hash_cont_tokens": "f81c4b3cc61f9738" | |
}, | |
"truncated": 0, | |
"non_truncated": 245, | |
"padded": 980, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:sociology|0": { | |
"hashes": { | |
"hash_examples": "3f2a9634cef7417d", | |
"hash_full_prompts": "3f2a9634cef7417d", | |
"hash_input_tokens": "2f28f438ba10899a", | |
"hash_cont_tokens": "a68d71e598a7eb7f" | |
}, | |
"truncated": 0, | |
"non_truncated": 201, | |
"padded": 768, | |
"non_padded": 36, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:us_foreign_policy|0": { | |
"hashes": { | |
"hash_examples": "22249da54056475e", | |
"hash_full_prompts": "22249da54056475e", | |
"hash_input_tokens": "062e8a0f4350b299", | |
"hash_cont_tokens": "67c9ff842b18298a" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 380, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:virology|0": { | |
"hashes": { | |
"hash_examples": "9d194b9471dc624e", | |
"hash_full_prompts": "9d194b9471dc624e", | |
"hash_input_tokens": "1ece00888103132d", | |
"hash_cont_tokens": "6c8625e5b2b8ffeb" | |
}, | |
"truncated": 0, | |
"non_truncated": 166, | |
"padded": 664, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:world_religions|0": { | |
"hashes": { | |
"hash_examples": "229e5fe50082b064", | |
"hash_full_prompts": "229e5fe50082b064", | |
"hash_input_tokens": "2f3683432d9a98f1", | |
"hash_cont_tokens": "e0faaa109c671b8f" | |
}, | |
"truncated": 0, | |
"non_truncated": 171, | |
"padded": 668, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arc_challenge_okapi_ar|0": { | |
"hashes": { | |
"hash_examples": "ab893807673bc355", | |
"hash_full_prompts": "ab893807673bc355", | |
"hash_input_tokens": "e686059e5f359f5f", | |
"hash_cont_tokens": "5ece5b0fdbfa8076" | |
}, | |
"truncated": 0, | |
"non_truncated": 1160, | |
"padded": 4558, | |
"non_padded": 82, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arc_easy_ar|0": { | |
"hashes": { | |
"hash_examples": "acb688624acc3d04", | |
"hash_full_prompts": "acb688624acc3d04", | |
"hash_input_tokens": "730a4690ece4ff43", | |
"hash_cont_tokens": "f9e7ef6e6d49b466" | |
}, | |
"truncated": 0, | |
"non_truncated": 2364, | |
"padded": 9254, | |
"non_padded": 202, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|boolq_ar|0": { | |
"hashes": { | |
"hash_examples": "48355a67867e0c32", | |
"hash_full_prompts": "48355a67867e0c32", | |
"hash_input_tokens": "2ade8e3de9a42680", | |
"hash_cont_tokens": "6cc6329be7dcb0ef" | |
}, | |
"truncated": 0, | |
"non_truncated": 3260, | |
"padded": 6474, | |
"non_padded": 46, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|copa_ext_ar|0": { | |
"hashes": { | |
"hash_examples": "9bb83301bb72eecf", | |
"hash_full_prompts": "9bb83301bb72eecf", | |
"hash_input_tokens": "8f420e0333177a62", | |
"hash_cont_tokens": "48c2323d601b2b0c" | |
}, | |
"truncated": 0, | |
"non_truncated": 90, | |
"padded": 180, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|hellaswag_okapi_ar|0": { | |
"hashes": { | |
"hash_examples": "6e8cf57a322dfadd", | |
"hash_full_prompts": "6e8cf57a322dfadd", | |
"hash_input_tokens": "7a1376f1de1d45d4", | |
"hash_cont_tokens": "963f62478d9c5df7" | |
}, | |
"truncated": 0, | |
"non_truncated": 9171, | |
"padded": 36560, | |
"non_padded": 124, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|openbook_qa_ext_ar|0": { | |
"hashes": { | |
"hash_examples": "923d41eb0aca93eb", | |
"hash_full_prompts": "923d41eb0aca93eb", | |
"hash_input_tokens": "37fc668b2f5d3d23", | |
"hash_cont_tokens": "e9d7e284d35d6b14" | |
}, | |
"truncated": 0, | |
"non_truncated": 495, | |
"padded": 1949, | |
"non_padded": 31, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|piqa_ar|0": { | |
"hashes": { | |
"hash_examples": "94bc205a520d3ea0", | |
"hash_full_prompts": "94bc205a520d3ea0", | |
"hash_input_tokens": "c7dd25d9b229dbc0", | |
"hash_cont_tokens": "6b782d2479e2c028" | |
}, | |
"truncated": 0, | |
"non_truncated": 1833, | |
"padded": 3621, | |
"non_padded": 45, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|race_ar|0": { | |
"hashes": { | |
"hash_examples": "de65130bae647516", | |
"hash_full_prompts": "de65130bae647516", | |
"hash_input_tokens": "d1804042fe02203f", | |
"hash_cont_tokens": "03d651c5338bf364" | |
}, | |
"truncated": 0, | |
"non_truncated": 4929, | |
"padded": 19698, | |
"non_padded": 18, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|sciq_ar|0": { | |
"hashes": { | |
"hash_examples": "fde3aa016caf9d62", | |
"hash_full_prompts": "fde3aa016caf9d62", | |
"hash_input_tokens": "21f69d698aac8fd2", | |
"hash_cont_tokens": "a20b5fb8b8479ad1" | |
}, | |
"truncated": 0, | |
"non_truncated": 995, | |
"padded": 3961, | |
"non_padded": 19, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|toxigen_ar|0": { | |
"hashes": { | |
"hash_examples": "1e139513004a9a2e", | |
"hash_full_prompts": "1e139513004a9a2e", | |
"hash_input_tokens": "931eeb48a68bf117", | |
"hash_cont_tokens": "23c85267d1c209f3" | |
}, | |
"truncated": 0, | |
"non_truncated": 935, | |
"padded": 1858, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|xstory_cloze:ar|0": { | |
"hashes": { | |
"hash_examples": "865426a22c787481", | |
"hash_full_prompts": "865426a22c787481", | |
"hash_input_tokens": "58f0445f0af3dc21", | |
"hash_cont_tokens": "cb73d8896239c71e" | |
}, | |
"truncated": 0, | |
"non_truncated": 1511, | |
"padded": 2980, | |
"non_padded": 42, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "41c14a2dfd004bfa", | |
"hash_full_prompts": "41c14a2dfd004bfa", | |
"hash_input_tokens": "f52d18750499fade", | |
"hash_cont_tokens": "1bed702f6f33407e" | |
}, | |
"truncated": 0, | |
"non_truncated": 72964, | |
"padded": 233423, | |
"non_padded": 2200, | |
"num_truncated_few_shots": 0 | |
} | |
} |