{ "config_general": { "lighteval_sha": "?", "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null, "job_id": "", "start_time": 600.878365688, "end_time": 17429.088478404, "total_evaluation_time_secondes": "16828.210112716", "model_name": "NousResearch/Hermes-3-Llama-3.1-8B", "model_sha": "aabb745a717e133b74dcae23195d2635cf5f38cc", "model_dtype": "torch.bfloat16", "model_size": "14.96 GB", "config": null }, "results": { "community|acva:Algeria|0": { "acc_norm": 0.5230769230769231, "acc_norm_stderr": 0.0358596530894741 }, "community|acva:Ancient_Egypt|0": { "acc_norm": 0.050793650793650794, "acc_norm_stderr": 0.01239139518482262 }, "community|acva:Arab_Empire|0": { "acc_norm": 0.30943396226415093, "acc_norm_stderr": 0.028450154794118627 }, "community|acva:Arabic_Architecture|0": { "acc_norm": 0.4564102564102564, "acc_norm_stderr": 0.035761230969912135 }, "community|acva:Arabic_Art|0": { "acc_norm": 0.37435897435897436, "acc_norm_stderr": 0.03474608430626236 }, "community|acva:Arabic_Astronomy|0": { "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.03581804596782233 }, "community|acva:Arabic_Calligraphy|0": { "acc_norm": 0.48627450980392156, "acc_norm_stderr": 0.03136096744694241 }, "community|acva:Arabic_Ceremony|0": { "acc_norm": 0.518918918918919, "acc_norm_stderr": 0.036834092970087065 }, "community|acva:Arabic_Clothing|0": { "acc_norm": 0.5128205128205128, "acc_norm_stderr": 0.03588610523192215 }, "community|acva:Arabic_Culture|0": { "acc_norm": 0.23076923076923078, "acc_norm_stderr": 0.0302493752938313 }, "community|acva:Arabic_Food|0": { "acc_norm": 0.441025641025641, "acc_norm_stderr": 0.0356473293185358 }, "community|acva:Arabic_Funeral|0": { "acc_norm": 0.4, "acc_norm_stderr": 0.050529115263991134 }, "community|acva:Arabic_Geography|0": { "acc_norm": 0.6068965517241379, "acc_norm_stderr": 0.040703290137070705 }, "community|acva:Arabic_History|0": { "acc_norm": 0.30256410256410254, "acc_norm_stderr": 0.03298070870085619 }, "community|acva:Arabic_Language_Origin|0": { "acc_norm": 0.5473684210526316, "acc_norm_stderr": 0.051339113773544845 }, "community|acva:Arabic_Literature|0": { "acc_norm": 0.4689655172413793, "acc_norm_stderr": 0.04158632762097828 }, "community|acva:Arabic_Math|0": { "acc_norm": 0.30256410256410254, "acc_norm_stderr": 0.03298070870085618 }, "community|acva:Arabic_Medicine|0": { "acc_norm": 0.46206896551724136, "acc_norm_stderr": 0.041546596717075474 }, "community|acva:Arabic_Music|0": { "acc_norm": 0.23741007194244604, "acc_norm_stderr": 0.036220593237998276 }, "community|acva:Arabic_Ornament|0": { "acc_norm": 0.47692307692307695, "acc_norm_stderr": 0.0358596530894741 }, "community|acva:Arabic_Philosophy|0": { "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "community|acva:Arabic_Physics_and_Chemistry|0": { "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.03581804596782232 }, "community|acva:Arabic_Wedding|0": { "acc_norm": 0.41025641025641024, "acc_norm_stderr": 0.03531493712326671 }, "community|acva:Bahrain|0": { "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.06979205927323111 }, "community|acva:Comoros|0": { "acc_norm": 0.37777777777777777, "acc_norm_stderr": 0.07309112127323451 }, "community|acva:Egypt_modern|0": { "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.04794350420740798 }, "community|acva:InfluenceFromAncientEgypt|0": { "acc_norm": 0.6, "acc_norm_stderr": 0.035172622905632896 }, "community|acva:InfluenceFromByzantium|0": { "acc_norm": 0.7172413793103448, "acc_norm_stderr": 0.03752833958003337 }, "community|acva:InfluenceFromChina|0": { "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.0317493043641267 }, "community|acva:InfluenceFromGreece|0": { "acc_norm": 0.6307692307692307, "acc_norm_stderr": 0.034648411418637566 }, "community|acva:InfluenceFromIslam|0": { "acc_norm": 0.296551724137931, "acc_norm_stderr": 0.03806142687309993 }, "community|acva:InfluenceFromPersia|0": { "acc_norm": 0.6971428571428572, "acc_norm_stderr": 0.03483414676585986 }, "community|acva:InfluenceFromRome|0": { "acc_norm": 0.5743589743589743, "acc_norm_stderr": 0.03549871080367708 }, "community|acva:Iraq|0": { "acc_norm": 0.5058823529411764, "acc_norm_stderr": 0.05455069703232772 }, "community|acva:Islam_Education|0": { "acc_norm": 0.4512820512820513, "acc_norm_stderr": 0.03572709860318392 }, "community|acva:Islam_branches_and_schools|0": { "acc_norm": 0.4342857142857143, "acc_norm_stderr": 0.037576101528126626 }, "community|acva:Islamic_law_system|0": { "acc_norm": 0.4256410256410256, "acc_norm_stderr": 0.035498710803677086 }, "community|acva:Jordan|0": { "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.07106690545187012 }, "community|acva:Kuwait|0": { "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.06666666666666667 }, "community|acva:Lebanon|0": { "acc_norm": 0.17777777777777778, "acc_norm_stderr": 0.05763774795025094 }, "community|acva:Libya|0": { "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.07491109582924914 }, "community|acva:Mauritania|0": { "acc_norm": 0.4222222222222222, "acc_norm_stderr": 0.07446027270295805 }, "community|acva:Mesopotamia_civilization|0": { "acc_norm": 0.5225806451612903, "acc_norm_stderr": 0.0402500394824441 }, "community|acva:Morocco|0": { "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.06267511942419628 }, "community|acva:Oman|0": { "acc_norm": 0.17777777777777778, "acc_norm_stderr": 0.05763774795025094 }, "community|acva:Palestine|0": { "acc_norm": 0.25882352941176473, "acc_norm_stderr": 0.047788461203740945 }, "community|acva:Qatar|0": { "acc_norm": 0.4, "acc_norm_stderr": 0.07385489458759964 }, "community|acva:Saudi_Arabia|0": { "acc_norm": 0.3282051282051282, "acc_norm_stderr": 0.03371243782413707 }, "community|acva:Somalia|0": { "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.07216392363431012 }, "community|acva:Sudan|0": { "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.07216392363431012 }, "community|acva:Syria|0": { "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.07106690545187012 }, "community|acva:Tunisia|0": { "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.06979205927323111 }, "community|acva:United_Arab_Emirates|0": { "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04628210543937907 }, "community|acva:Yemen|0": { "acc_norm": 0.2, "acc_norm_stderr": 0.13333333333333333 }, "community|acva:communication|0": { "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.025974025974025955 }, "community|acva:computer_and_phone|0": { "acc_norm": 0.45084745762711864, "acc_norm_stderr": 0.02901934773187137 }, "community|acva:daily_life|0": { "acc_norm": 0.18694362017804153, "acc_norm_stderr": 0.021268948348414647 }, "community|acva:entertainment|0": { "acc_norm": 0.23389830508474577, "acc_norm_stderr": 0.024687839412166384 }, "community|alghafa:mcq_exams_test_ar|0": { "acc_norm": 0.3644524236983842, "acc_norm_stderr": 0.020410660502689287 }, "community|alghafa:meta_ar_dialects|0": { "acc_norm": 0.35088044485634845, "acc_norm_stderr": 0.006498101007131396 }, "community|alghafa:meta_ar_msa|0": { "acc_norm": 0.40893854748603353, "acc_norm_stderr": 0.016442830654715544 }, "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { "acc_norm": 0.52, "acc_norm_stderr": 0.05807730170189531 }, "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { "acc_norm": 0.5933333333333334, "acc_norm_stderr": 0.04024162665739063 }, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { "acc_norm": 0.46, "acc_norm_stderr": 0.04083030852148599 }, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { "acc_norm": 0.8120075046904315, "acc_norm_stderr": 0.004369866866327708 }, "community|alghafa:multiple_choice_rating_sentiment_task|0": { "acc_norm": 0.5399499582985822, "acc_norm_stderr": 0.006437554575631948 }, "community|alghafa:multiple_choice_sentiment_task|0": { "acc_norm": 0.36686046511627907, "acc_norm_stderr": 0.01162417753415327 }, "community|arabic_exams|0": { "acc_norm": 0.4450651769087523, "acc_norm_stderr": 0.021465964261221878 }, "community|arabic_mmlu:abstract_algebra|0": { "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "community|arabic_mmlu:anatomy|0": { "acc_norm": 0.37777777777777777, "acc_norm_stderr": 0.04188307537595853 }, "community|arabic_mmlu:astronomy|0": { "acc_norm": 0.46710526315789475, "acc_norm_stderr": 0.04060127035236397 }, "community|arabic_mmlu:business_ethics|0": { "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "community|arabic_mmlu:clinical_knowledge|0": { "acc_norm": 0.5094339622641509, "acc_norm_stderr": 0.030767394707808107 }, "community|arabic_mmlu:college_biology|0": { "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.041227287076512825 }, "community|arabic_mmlu:college_chemistry|0": { "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "community|arabic_mmlu:college_computer_science|0": { "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "community|arabic_mmlu:college_mathematics|0": { "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421296 }, "community|arabic_mmlu:college_medicine|0": { "acc_norm": 0.41040462427745666, "acc_norm_stderr": 0.03750757044895536 }, "community|arabic_mmlu:college_physics|0": { "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.04280105837364395 }, "community|arabic_mmlu:computer_security|0": { "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "community|arabic_mmlu:conceptual_physics|0": { "acc_norm": 0.4085106382978723, "acc_norm_stderr": 0.03213418026701576 }, "community|arabic_mmlu:econometrics|0": { "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "community|arabic_mmlu:electrical_engineering|0": { "acc_norm": 0.42758620689655175, "acc_norm_stderr": 0.04122737111370333 }, "community|arabic_mmlu:elementary_mathematics|0": { "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.024870815251057082 }, "community|arabic_mmlu:formal_logic|0": { "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04216370213557835 }, "community|arabic_mmlu:global_facts|0": { "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "community|arabic_mmlu:high_school_biology|0": { "acc_norm": 0.49032258064516127, "acc_norm_stderr": 0.028438677998909558 }, "community|arabic_mmlu:high_school_chemistry|0": { "acc_norm": 0.33497536945812806, "acc_norm_stderr": 0.033208527423483104 }, "community|arabic_mmlu:high_school_computer_science|0": { "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "community|arabic_mmlu:high_school_european_history|0": { "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "community|arabic_mmlu:high_school_geography|0": { "acc_norm": 0.5252525252525253, "acc_norm_stderr": 0.035578062450873145 }, "community|arabic_mmlu:high_school_government_and_politics|0": { "acc_norm": 0.41968911917098445, "acc_norm_stderr": 0.03561587327685884 }, "community|arabic_mmlu:high_school_macroeconomics|0": { "acc_norm": 0.4230769230769231, "acc_norm_stderr": 0.025049197876042338 }, "community|arabic_mmlu:high_school_mathematics|0": { "acc_norm": 0.29259259259259257, "acc_norm_stderr": 0.027738969632176095 }, "community|arabic_mmlu:high_school_microeconomics|0": { "acc_norm": 0.40336134453781514, "acc_norm_stderr": 0.031866081214088314 }, "community|arabic_mmlu:high_school_physics|0": { "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "community|arabic_mmlu:high_school_psychology|0": { "acc_norm": 0.44954128440366975, "acc_norm_stderr": 0.02132788141782338 }, "community|arabic_mmlu:high_school_statistics|0": { "acc_norm": 0.375, "acc_norm_stderr": 0.033016908987210894 }, "community|arabic_mmlu:high_school_us_history|0": { "acc_norm": 0.27941176470588236, "acc_norm_stderr": 0.03149328104507956 }, "community|arabic_mmlu:high_school_world_history|0": { "acc_norm": 0.379746835443038, "acc_norm_stderr": 0.03159188752965851 }, "community|arabic_mmlu:human_aging|0": { "acc_norm": 0.3991031390134529, "acc_norm_stderr": 0.03286745312567961 }, "community|arabic_mmlu:human_sexuality|0": { "acc_norm": 0.4732824427480916, "acc_norm_stderr": 0.04379024936553894 }, "community|arabic_mmlu:international_law|0": { "acc_norm": 0.6776859504132231, "acc_norm_stderr": 0.042664163633521685 }, "community|arabic_mmlu:jurisprudence|0": { "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.04832853553437055 }, "community|arabic_mmlu:logical_fallacies|0": { "acc_norm": 0.4723926380368098, "acc_norm_stderr": 0.0392237829061099 }, "community|arabic_mmlu:machine_learning|0": { "acc_norm": 0.33035714285714285, "acc_norm_stderr": 0.04464285714285713 }, "community|arabic_mmlu:management|0": { "acc_norm": 0.5048543689320388, "acc_norm_stderr": 0.049505043821289195 }, "community|arabic_mmlu:marketing|0": { "acc_norm": 0.7051282051282052, "acc_norm_stderr": 0.029872577708891176 }, "community|arabic_mmlu:medical_genetics|0": { "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "community|arabic_mmlu:miscellaneous|0": { "acc_norm": 0.5210727969348659, "acc_norm_stderr": 0.0178640767862129 }, "community|arabic_mmlu:moral_disputes|0": { "acc_norm": 0.4682080924855491, "acc_norm_stderr": 0.026864624366756646 }, "community|arabic_mmlu:moral_scenarios|0": { "acc_norm": 0.3016759776536313, "acc_norm_stderr": 0.015350767572220286 }, "community|arabic_mmlu:nutrition|0": { "acc_norm": 0.5032679738562091, "acc_norm_stderr": 0.028629305194003543 }, "community|arabic_mmlu:philosophy|0": { "acc_norm": 0.44694533762057875, "acc_norm_stderr": 0.028237769422085335 }, "community|arabic_mmlu:prehistory|0": { "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.027648477877413327 }, "community|arabic_mmlu:professional_accounting|0": { "acc_norm": 0.3049645390070922, "acc_norm_stderr": 0.02746470844202213 }, "community|arabic_mmlu:professional_law|0": { "acc_norm": 0.3213820078226858, "acc_norm_stderr": 0.011927581352265081 }, "community|arabic_mmlu:professional_medicine|0": { "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.026799562024887667 }, "community|arabic_mmlu:professional_psychology|0": { "acc_norm": 0.3937908496732026, "acc_norm_stderr": 0.019766211991073056 }, "community|arabic_mmlu:public_relations|0": { "acc_norm": 0.4909090909090909, "acc_norm_stderr": 0.04788339768702861 }, "community|arabic_mmlu:security_studies|0": { "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.031680911612338825 }, "community|arabic_mmlu:sociology|0": { "acc_norm": 0.5671641791044776, "acc_norm_stderr": 0.03503490923673281 }, "community|arabic_mmlu:us_foreign_policy|0": { "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "community|arabic_mmlu:virology|0": { "acc_norm": 0.42168674698795183, "acc_norm_stderr": 0.03844453181770917 }, "community|arabic_mmlu:world_religions|0": { "acc_norm": 0.47953216374269003, "acc_norm_stderr": 0.0383161053282193 }, "community|arc_challenge_okapi_ar|0": { "acc_norm": 0.39827586206896554, "acc_norm_stderr": 0.014379672396285426 }, "community|arc_easy_ar|0": { "acc_norm": 0.4170896785109983, "acc_norm_stderr": 0.010143404232539966 }, "community|boolq_ar|0": { "acc_norm": 0.7969325153374233, "acc_norm_stderr": 0.007046748760898687 }, "community|copa_ext_ar|0": { "acc_norm": 0.5, "acc_norm_stderr": 0.052999894000318 }, "community|hellaswag_okapi_ar|0": { "acc_norm": 0.3094537127903173, "acc_norm_stderr": 0.004827358786083367 }, "community|openbook_qa_ext_ar|0": { "acc_norm": 0.46060606060606063, "acc_norm_stderr": 0.022426132346490935 }, "community|piqa_ar|0": { "acc_norm": 0.5886524822695035, "acc_norm_stderr": 0.011496646971488453 }, "community|race_ar|0": { "acc_norm": 0.42036924325420977, "acc_norm_stderr": 0.007031627047323341 }, "community|sciq_ar|0": { "acc_norm": 0.5437185929648242, "acc_norm_stderr": 0.015798297434857354 }, "community|toxigen_ar|0": { "acc_norm": 0.4320855614973262, "acc_norm_stderr": 0.01620887578524445 }, "lighteval|xstory_cloze:ar|0": { "acc": 0.6234281932495036, "acc_stderr": 0.012468914489659356 }, "community|acva:_average|0": { "acc_norm": 0.3956530123422574, "acc_norm_stderr": 0.045812219507880864 }, "community|alghafa:_average|0": { "acc_norm": 0.4907136308310435, "acc_norm_stderr": 0.0227702697801579 }, "community|arabic_mmlu:_average|0": { "acc_norm": 0.41778485074586813, "acc_norm_stderr": 0.036071085756289194 }, "all": { "acc_norm": 0.41844653905224566, "acc_norm_stderr": 0.037791982737849984, "acc": 0.6234281932495036, "acc_stderr": 0.012468914489659356 } }, "versions": { "community|acva:Algeria|0": 0, "community|acva:Ancient_Egypt|0": 0, "community|acva:Arab_Empire|0": 0, "community|acva:Arabic_Architecture|0": 0, "community|acva:Arabic_Art|0": 0, "community|acva:Arabic_Astronomy|0": 0, "community|acva:Arabic_Calligraphy|0": 0, "community|acva:Arabic_Ceremony|0": 0, "community|acva:Arabic_Clothing|0": 0, "community|acva:Arabic_Culture|0": 0, "community|acva:Arabic_Food|0": 0, "community|acva:Arabic_Funeral|0": 0, "community|acva:Arabic_Geography|0": 0, "community|acva:Arabic_History|0": 0, "community|acva:Arabic_Language_Origin|0": 0, "community|acva:Arabic_Literature|0": 0, "community|acva:Arabic_Math|0": 0, "community|acva:Arabic_Medicine|0": 0, "community|acva:Arabic_Music|0": 0, "community|acva:Arabic_Ornament|0": 0, "community|acva:Arabic_Philosophy|0": 0, "community|acva:Arabic_Physics_and_Chemistry|0": 0, "community|acva:Arabic_Wedding|0": 0, "community|acva:Bahrain|0": 0, "community|acva:Comoros|0": 0, "community|acva:Egypt_modern|0": 0, "community|acva:InfluenceFromAncientEgypt|0": 0, "community|acva:InfluenceFromByzantium|0": 0, "community|acva:InfluenceFromChina|0": 0, "community|acva:InfluenceFromGreece|0": 0, "community|acva:InfluenceFromIslam|0": 0, "community|acva:InfluenceFromPersia|0": 0, "community|acva:InfluenceFromRome|0": 0, "community|acva:Iraq|0": 0, "community|acva:Islam_Education|0": 0, "community|acva:Islam_branches_and_schools|0": 0, "community|acva:Islamic_law_system|0": 0, "community|acva:Jordan|0": 0, "community|acva:Kuwait|0": 0, "community|acva:Lebanon|0": 0, "community|acva:Libya|0": 0, "community|acva:Mauritania|0": 0, "community|acva:Mesopotamia_civilization|0": 0, "community|acva:Morocco|0": 0, "community|acva:Oman|0": 0, "community|acva:Palestine|0": 0, "community|acva:Qatar|0": 0, "community|acva:Saudi_Arabia|0": 0, "community|acva:Somalia|0": 0, "community|acva:Sudan|0": 0, "community|acva:Syria|0": 0, "community|acva:Tunisia|0": 0, "community|acva:United_Arab_Emirates|0": 0, "community|acva:Yemen|0": 0, "community|acva:communication|0": 0, "community|acva:computer_and_phone|0": 0, "community|acva:daily_life|0": 0, "community|acva:entertainment|0": 0, "community|alghafa:mcq_exams_test_ar|0": 0, "community|alghafa:meta_ar_dialects|0": 0, "community|alghafa:meta_ar_msa|0": 0, "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, "community|alghafa:multiple_choice_rating_sentiment_task|0": 0, "community|alghafa:multiple_choice_sentiment_task|0": 0, "community|arabic_exams|0": 0, "community|arabic_mmlu:abstract_algebra|0": 0, "community|arabic_mmlu:anatomy|0": 0, "community|arabic_mmlu:astronomy|0": 0, "community|arabic_mmlu:business_ethics|0": 0, "community|arabic_mmlu:clinical_knowledge|0": 0, "community|arabic_mmlu:college_biology|0": 0, "community|arabic_mmlu:college_chemistry|0": 0, "community|arabic_mmlu:college_computer_science|0": 0, "community|arabic_mmlu:college_mathematics|0": 0, "community|arabic_mmlu:college_medicine|0": 0, "community|arabic_mmlu:college_physics|0": 0, "community|arabic_mmlu:computer_security|0": 0, "community|arabic_mmlu:conceptual_physics|0": 0, "community|arabic_mmlu:econometrics|0": 0, "community|arabic_mmlu:electrical_engineering|0": 0, "community|arabic_mmlu:elementary_mathematics|0": 0, "community|arabic_mmlu:formal_logic|0": 0, "community|arabic_mmlu:global_facts|0": 0, "community|arabic_mmlu:high_school_biology|0": 0, "community|arabic_mmlu:high_school_chemistry|0": 0, "community|arabic_mmlu:high_school_computer_science|0": 0, "community|arabic_mmlu:high_school_european_history|0": 0, "community|arabic_mmlu:high_school_geography|0": 0, "community|arabic_mmlu:high_school_government_and_politics|0": 0, "community|arabic_mmlu:high_school_macroeconomics|0": 0, "community|arabic_mmlu:high_school_mathematics|0": 0, "community|arabic_mmlu:high_school_microeconomics|0": 0, "community|arabic_mmlu:high_school_physics|0": 0, "community|arabic_mmlu:high_school_psychology|0": 0, "community|arabic_mmlu:high_school_statistics|0": 0, "community|arabic_mmlu:high_school_us_history|0": 0, "community|arabic_mmlu:high_school_world_history|0": 0, "community|arabic_mmlu:human_aging|0": 0, "community|arabic_mmlu:human_sexuality|0": 0, "community|arabic_mmlu:international_law|0": 0, "community|arabic_mmlu:jurisprudence|0": 0, "community|arabic_mmlu:logical_fallacies|0": 0, "community|arabic_mmlu:machine_learning|0": 0, "community|arabic_mmlu:management|0": 0, "community|arabic_mmlu:marketing|0": 0, "community|arabic_mmlu:medical_genetics|0": 0, "community|arabic_mmlu:miscellaneous|0": 0, "community|arabic_mmlu:moral_disputes|0": 0, "community|arabic_mmlu:moral_scenarios|0": 0, "community|arabic_mmlu:nutrition|0": 0, "community|arabic_mmlu:philosophy|0": 0, "community|arabic_mmlu:prehistory|0": 0, "community|arabic_mmlu:professional_accounting|0": 0, "community|arabic_mmlu:professional_law|0": 0, "community|arabic_mmlu:professional_medicine|0": 0, "community|arabic_mmlu:professional_psychology|0": 0, "community|arabic_mmlu:public_relations|0": 0, "community|arabic_mmlu:security_studies|0": 0, "community|arabic_mmlu:sociology|0": 0, "community|arabic_mmlu:us_foreign_policy|0": 0, "community|arabic_mmlu:virology|0": 0, "community|arabic_mmlu:world_religions|0": 0, "community|arc_challenge_okapi_ar|0": 0, "community|arc_easy_ar|0": 0, "community|boolq_ar|0": 0, "community|copa_ext_ar|0": 0, "community|hellaswag_okapi_ar|0": 0, "community|openbook_qa_ext_ar|0": 0, "community|piqa_ar|0": 0, "community|race_ar|0": 0, "community|sciq_ar|0": 0, "community|toxigen_ar|0": 0, "lighteval|xstory_cloze:ar|0": 0 }, "config_tasks": { "community|acva:Algeria": { "name": "acva:Algeria", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Algeria", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Ancient_Egypt": { "name": "acva:Ancient_Egypt", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Ancient_Egypt", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 315, "effective_num_docs": 315, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arab_Empire": { "name": "acva:Arab_Empire", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arab_Empire", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 265, "effective_num_docs": 265, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Architecture": { "name": "acva:Arabic_Architecture", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Architecture", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Art": { "name": "acva:Arabic_Art", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Art", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Astronomy": { "name": "acva:Arabic_Astronomy", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Astronomy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Calligraphy": { "name": "acva:Arabic_Calligraphy", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Calligraphy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 255, "effective_num_docs": 255, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Ceremony": { "name": "acva:Arabic_Ceremony", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Ceremony", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 185, "effective_num_docs": 185, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Clothing": { "name": "acva:Arabic_Clothing", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Clothing", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Culture": { "name": "acva:Arabic_Culture", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Culture", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Food": { "name": "acva:Arabic_Food", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Food", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Funeral": { "name": "acva:Arabic_Funeral", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Funeral", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 95, "effective_num_docs": 95, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Geography": { "name": "acva:Arabic_Geography", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Geography", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_History": { "name": "acva:Arabic_History", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_History", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Language_Origin": { "name": "acva:Arabic_Language_Origin", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Language_Origin", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 95, "effective_num_docs": 95, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Literature": { "name": "acva:Arabic_Literature", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Literature", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Math": { "name": "acva:Arabic_Math", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Math", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Medicine": { "name": "acva:Arabic_Medicine", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Medicine", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Music": { "name": "acva:Arabic_Music", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Music", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 139, "effective_num_docs": 139, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Ornament": { "name": "acva:Arabic_Ornament", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Ornament", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Philosophy": { "name": "acva:Arabic_Philosophy", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Philosophy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Physics_and_Chemistry": { "name": "acva:Arabic_Physics_and_Chemistry", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Physics_and_Chemistry", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Arabic_Wedding": { "name": "acva:Arabic_Wedding", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Arabic_Wedding", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Bahrain": { "name": "acva:Bahrain", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Bahrain", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Comoros": { "name": "acva:Comoros", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Comoros", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Egypt_modern": { "name": "acva:Egypt_modern", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Egypt_modern", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 95, "effective_num_docs": 95, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromAncientEgypt": { "name": "acva:InfluenceFromAncientEgypt", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromAncientEgypt", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromByzantium": { "name": "acva:InfluenceFromByzantium", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromByzantium", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromChina": { "name": "acva:InfluenceFromChina", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromChina", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromGreece": { "name": "acva:InfluenceFromGreece", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromGreece", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromIslam": { "name": "acva:InfluenceFromIslam", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromIslam", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromPersia": { "name": "acva:InfluenceFromPersia", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromPersia", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 175, "effective_num_docs": 175, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:InfluenceFromRome": { "name": "acva:InfluenceFromRome", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "InfluenceFromRome", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Iraq": { "name": "acva:Iraq", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Iraq", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 85, "effective_num_docs": 85, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Islam_Education": { "name": "acva:Islam_Education", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Islam_Education", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Islam_branches_and_schools": { "name": "acva:Islam_branches_and_schools", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Islam_branches_and_schools", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 175, "effective_num_docs": 175, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Islamic_law_system": { "name": "acva:Islamic_law_system", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Islamic_law_system", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Jordan": { "name": "acva:Jordan", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Jordan", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Kuwait": { "name": "acva:Kuwait", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Kuwait", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Lebanon": { "name": "acva:Lebanon", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Lebanon", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Libya": { "name": "acva:Libya", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Libya", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Mauritania": { "name": "acva:Mauritania", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Mauritania", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Mesopotamia_civilization": { "name": "acva:Mesopotamia_civilization", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Mesopotamia_civilization", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 155, "effective_num_docs": 155, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Morocco": { "name": "acva:Morocco", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Morocco", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Oman": { "name": "acva:Oman", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Oman", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Palestine": { "name": "acva:Palestine", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Palestine", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 85, "effective_num_docs": 85, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Qatar": { "name": "acva:Qatar", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Qatar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Saudi_Arabia": { "name": "acva:Saudi_Arabia", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Saudi_Arabia", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 195, "effective_num_docs": 195, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Somalia": { "name": "acva:Somalia", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Somalia", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Sudan": { "name": "acva:Sudan", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Sudan", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Syria": { "name": "acva:Syria", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Syria", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Tunisia": { "name": "acva:Tunisia", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Tunisia", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 45, "effective_num_docs": 45, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:United_Arab_Emirates": { "name": "acva:United_Arab_Emirates", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "United_Arab_Emirates", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 85, "effective_num_docs": 85, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:Yemen": { "name": "acva:Yemen", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "Yemen", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 10, "effective_num_docs": 10, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:communication": { "name": "acva:communication", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "communication", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 364, "effective_num_docs": 364, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:computer_and_phone": { "name": "acva:computer_and_phone", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "computer_and_phone", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 295, "effective_num_docs": 295, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:daily_life": { "name": "acva:daily_life", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "daily_life", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 337, "effective_num_docs": 337, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|acva:entertainment": { "name": "acva:entertainment", "prompt_function": "acva", "hf_repo": "OALL/ACVA", "hf_subset": "entertainment", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 295, "effective_num_docs": 295, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:mcq_exams_test_ar": { "name": "alghafa:mcq_exams_test_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "mcq_exams_test_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 557, "effective_num_docs": 557, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:meta_ar_dialects": { "name": "alghafa:meta_ar_dialects", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "meta_ar_dialects", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 5395, "effective_num_docs": 5395, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:meta_ar_msa": { "name": "alghafa:meta_ar_msa", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "meta_ar_msa", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 895, "effective_num_docs": 895, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_facts_truefalse_balanced_task": { "name": "alghafa:multiple_choice_facts_truefalse_balanced_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_facts_truefalse_balanced_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 75, "effective_num_docs": 75, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_grounded_statement_soqal_task": { "name": "alghafa:multiple_choice_grounded_statement_soqal_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_grounded_statement_soqal_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 150, "effective_num_docs": 150, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { "name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 150, "effective_num_docs": 150, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { "name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 7995, "effective_num_docs": 7995, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_rating_sentiment_task": { "name": "alghafa:multiple_choice_rating_sentiment_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_rating_sentiment_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 5995, "effective_num_docs": 5995, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|alghafa:multiple_choice_sentiment_task": { "name": "alghafa:multiple_choice_sentiment_task", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", "hf_subset": "multiple_choice_sentiment_task", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 1720, "effective_num_docs": 1720, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_exams": { "name": "arabic_exams", "prompt_function": "arabic_exams", "hf_repo": "OALL/Arabic_EXAMS", "hf_subset": "default", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 537, "effective_num_docs": 537, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:abstract_algebra": { "name": "arabic_mmlu:abstract_algebra", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "abstract_algebra", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:anatomy": { "name": "arabic_mmlu:anatomy", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "anatomy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 135, "effective_num_docs": 135, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:astronomy": { "name": "arabic_mmlu:astronomy", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "astronomy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 152, "effective_num_docs": 152, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:business_ethics": { "name": "arabic_mmlu:business_ethics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "business_ethics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:clinical_knowledge": { "name": "arabic_mmlu:clinical_knowledge", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "clinical_knowledge", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 265, "effective_num_docs": 265, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_biology": { "name": "arabic_mmlu:college_biology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_biology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 144, "effective_num_docs": 144, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_chemistry": { "name": "arabic_mmlu:college_chemistry", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_chemistry", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_computer_science": { "name": "arabic_mmlu:college_computer_science", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_computer_science", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_mathematics": { "name": "arabic_mmlu:college_mathematics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_mathematics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_medicine": { "name": "arabic_mmlu:college_medicine", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_medicine", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 173, "effective_num_docs": 173, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:college_physics": { "name": "arabic_mmlu:college_physics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "college_physics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 102, "effective_num_docs": 102, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:computer_security": { "name": "arabic_mmlu:computer_security", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "computer_security", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:conceptual_physics": { "name": "arabic_mmlu:conceptual_physics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "conceptual_physics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 235, "effective_num_docs": 235, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:econometrics": { "name": "arabic_mmlu:econometrics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "econometrics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 114, "effective_num_docs": 114, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:electrical_engineering": { "name": "arabic_mmlu:electrical_engineering", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "electrical_engineering", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 145, "effective_num_docs": 145, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:elementary_mathematics": { "name": "arabic_mmlu:elementary_mathematics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "elementary_mathematics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 378, "effective_num_docs": 378, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:formal_logic": { "name": "arabic_mmlu:formal_logic", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "formal_logic", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 126, "effective_num_docs": 126, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:global_facts": { "name": "arabic_mmlu:global_facts", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "global_facts", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_biology": { "name": "arabic_mmlu:high_school_biology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_biology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 310, "effective_num_docs": 310, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_chemistry": { "name": "arabic_mmlu:high_school_chemistry", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_chemistry", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 203, "effective_num_docs": 203, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_computer_science": { "name": "arabic_mmlu:high_school_computer_science", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_computer_science", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_european_history": { "name": "arabic_mmlu:high_school_european_history", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_european_history", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 165, "effective_num_docs": 165, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_geography": { "name": "arabic_mmlu:high_school_geography", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_geography", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 198, "effective_num_docs": 198, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_government_and_politics": { "name": "arabic_mmlu:high_school_government_and_politics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_government_and_politics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 193, "effective_num_docs": 193, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_macroeconomics": { "name": "arabic_mmlu:high_school_macroeconomics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_macroeconomics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 390, "effective_num_docs": 390, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_mathematics": { "name": "arabic_mmlu:high_school_mathematics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_mathematics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 270, "effective_num_docs": 270, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_microeconomics": { "name": "arabic_mmlu:high_school_microeconomics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_microeconomics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 238, "effective_num_docs": 238, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_physics": { "name": "arabic_mmlu:high_school_physics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_physics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 151, "effective_num_docs": 151, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_psychology": { "name": "arabic_mmlu:high_school_psychology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_psychology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 545, "effective_num_docs": 545, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_statistics": { "name": "arabic_mmlu:high_school_statistics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_statistics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 216, "effective_num_docs": 216, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_us_history": { "name": "arabic_mmlu:high_school_us_history", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_us_history", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 204, "effective_num_docs": 204, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:high_school_world_history": { "name": "arabic_mmlu:high_school_world_history", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "high_school_world_history", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 237, "effective_num_docs": 237, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:human_aging": { "name": "arabic_mmlu:human_aging", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "human_aging", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 223, "effective_num_docs": 223, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:human_sexuality": { "name": "arabic_mmlu:human_sexuality", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "human_sexuality", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 131, "effective_num_docs": 131, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:international_law": { "name": "arabic_mmlu:international_law", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "international_law", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 121, "effective_num_docs": 121, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:jurisprudence": { "name": "arabic_mmlu:jurisprudence", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "jurisprudence", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 108, "effective_num_docs": 108, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:logical_fallacies": { "name": "arabic_mmlu:logical_fallacies", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "logical_fallacies", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 163, "effective_num_docs": 163, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:machine_learning": { "name": "arabic_mmlu:machine_learning", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "machine_learning", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 112, "effective_num_docs": 112, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:management": { "name": "arabic_mmlu:management", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "management", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 103, "effective_num_docs": 103, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:marketing": { "name": "arabic_mmlu:marketing", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "marketing", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 234, "effective_num_docs": 234, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:medical_genetics": { "name": "arabic_mmlu:medical_genetics", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "medical_genetics", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:miscellaneous": { "name": "arabic_mmlu:miscellaneous", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "miscellaneous", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 783, "effective_num_docs": 783, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:moral_disputes": { "name": "arabic_mmlu:moral_disputes", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "moral_disputes", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 346, "effective_num_docs": 346, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:moral_scenarios": { "name": "arabic_mmlu:moral_scenarios", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "moral_scenarios", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 895, "effective_num_docs": 895, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:nutrition": { "name": "arabic_mmlu:nutrition", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "nutrition", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 306, "effective_num_docs": 306, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:philosophy": { "name": "arabic_mmlu:philosophy", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "philosophy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 311, "effective_num_docs": 311, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:prehistory": { "name": "arabic_mmlu:prehistory", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "prehistory", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 324, "effective_num_docs": 324, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:professional_accounting": { "name": "arabic_mmlu:professional_accounting", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "professional_accounting", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 282, "effective_num_docs": 282, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:professional_law": { "name": "arabic_mmlu:professional_law", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "professional_law", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 1534, "effective_num_docs": 1534, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:professional_medicine": { "name": "arabic_mmlu:professional_medicine", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "professional_medicine", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 272, "effective_num_docs": 272, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:professional_psychology": { "name": "arabic_mmlu:professional_psychology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "professional_psychology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 612, "effective_num_docs": 612, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:public_relations": { "name": "arabic_mmlu:public_relations", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "public_relations", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 110, "effective_num_docs": 110, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:security_studies": { "name": "arabic_mmlu:security_studies", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "security_studies", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 245, "effective_num_docs": 245, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:sociology": { "name": "arabic_mmlu:sociology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "sociology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 201, "effective_num_docs": 201, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:us_foreign_policy": { "name": "arabic_mmlu:us_foreign_policy", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "us_foreign_policy", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 100, "effective_num_docs": 100, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:virology": { "name": "arabic_mmlu:virology", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "virology", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 166, "effective_num_docs": 166, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arabic_mmlu:world_religions": { "name": "arabic_mmlu:world_religions", "prompt_function": "mmlu_arabic", "hf_repo": "OALL/Arabic_MMLU", "hf_subset": "world_religions", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "dev" ], "evaluation_splits": [ "test" ], "few_shots_split": "dev", "few_shots_select": "sequential", "generation_size": -1, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 171, "effective_num_docs": 171, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arc_challenge_okapi_ar": { "name": "arc_challenge_okapi_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "arc_challenge_okapi_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 1160, "effective_num_docs": 1160, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|arc_easy_ar": { "name": "arc_easy_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "arc_easy_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 2364, "effective_num_docs": 2364, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|boolq_ar": { "name": "boolq_ar", "prompt_function": "boolq_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "boolq_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 3260, "effective_num_docs": 3260, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|copa_ext_ar": { "name": "copa_ext_ar", "prompt_function": "copa_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "copa_ext_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 90, "effective_num_docs": 90, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|hellaswag_okapi_ar": { "name": "hellaswag_okapi_ar", "prompt_function": "hellaswag_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "hellaswag_okapi_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 9171, "effective_num_docs": 9171, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|openbook_qa_ext_ar": { "name": "openbook_qa_ext_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "openbook_qa_ext_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 495, "effective_num_docs": 495, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|piqa_ar": { "name": "piqa_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "piqa_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 1833, "effective_num_docs": 1833, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|race_ar": { "name": "race_ar", "prompt_function": "alghafa_prompt", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "race_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 4929, "effective_num_docs": 4929, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|sciq_ar": { "name": "sciq_ar", "prompt_function": "sciq_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "sciq_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 995, "effective_num_docs": 995, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "community|toxigen_ar": { "name": "toxigen_ar", "prompt_function": "toxigen_prompt_arabic", "hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", "hf_subset": "toxigen_ar", "metric": [ "loglikelihood_acc_norm" ], "hf_avail_splits": [ "test", "validation" ], "evaluation_splits": [ "test" ], "few_shots_split": "validation", "few_shots_select": "sequential", "generation_size": null, "stop_sequence": null, "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "community" ], "original_num_docs": 935, "effective_num_docs": 935, "trust_dataset": null, "must_remove_duplicate_docs": null, "version": 0 }, "lighteval|xstory_cloze:ar": { "name": "xstory_cloze:ar", "prompt_function": "storycloze", "hf_repo": "juletxara/xstory_cloze", "hf_subset": "ar", "metric": [ "loglikelihood_acc" ], "hf_avail_splits": [ "training", "eval" ], "evaluation_splits": [ "eval" ], "few_shots_split": null, "few_shots_select": null, "generation_size": -1, "stop_sequence": [ "\n" ], "output_regex": null, "num_samples": null, "frozen": false, "suite": [ "lighteval" ], "original_num_docs": 1511, "effective_num_docs": 1511, "trust_dataset": true, "must_remove_duplicate_docs": null, "version": 0 } }, "summary_tasks": { "community|acva:Algeria|0": { "hashes": { "hash_examples": "da5a3003cd46f6f9", "hash_full_prompts": "da5a3003cd46f6f9", "hash_input_tokens": "d93423d91b495ce3", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Ancient_Egypt|0": { "hashes": { "hash_examples": "52d6f767fede195b", "hash_full_prompts": "52d6f767fede195b", "hash_input_tokens": "fd2ed570bfe4e3e9", "hash_cont_tokens": "c1e2b54cf8250f31" }, "truncated": 0, "non_truncated": 315, "padded": 630, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arab_Empire|0": { "hashes": { "hash_examples": "8dacff6a79804a75", "hash_full_prompts": "8dacff6a79804a75", "hash_input_tokens": "eaee28cb7e3e4289", "hash_cont_tokens": "a57d793a5ea04c42" }, "truncated": 0, "non_truncated": 265, "padded": 530, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Architecture|0": { "hashes": { "hash_examples": "df286cd862d9f6bb", "hash_full_prompts": "df286cd862d9f6bb", "hash_input_tokens": "96b173c62fce91dc", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Art|0": { "hashes": { "hash_examples": "112883d764118a49", "hash_full_prompts": "112883d764118a49", "hash_input_tokens": "e5ff56fbf53a6157", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Astronomy|0": { "hashes": { "hash_examples": "20dcdf2454bf8671", "hash_full_prompts": "20dcdf2454bf8671", "hash_input_tokens": "094adc131fc4e087", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Calligraphy|0": { "hashes": { "hash_examples": "3a9f9d1ebe868a15", "hash_full_prompts": "3a9f9d1ebe868a15", "hash_input_tokens": "771f4f0ab4c5216e", "hash_cont_tokens": "b6820eb4f3eef8a0" }, "truncated": 0, "non_truncated": 255, "padded": 510, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Ceremony|0": { "hashes": { "hash_examples": "c927630f8d2f44da", "hash_full_prompts": "c927630f8d2f44da", "hash_input_tokens": "4a4de1678e041987", "hash_cont_tokens": "4bdb062f9ac7e83c" }, "truncated": 0, "non_truncated": 185, "padded": 370, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Clothing|0": { "hashes": { "hash_examples": "6ad0740c2ac6ac92", "hash_full_prompts": "6ad0740c2ac6ac92", "hash_input_tokens": "1c6f0d9f5a0b9c20", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Culture|0": { "hashes": { "hash_examples": "2177bd857ad872ae", "hash_full_prompts": "2177bd857ad872ae", "hash_input_tokens": "e0b3ed99ace8746d", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Food|0": { "hashes": { "hash_examples": "a6ada65b71d7c9c5", "hash_full_prompts": "a6ada65b71d7c9c5", "hash_input_tokens": "1b90a75b05c6eed3", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Funeral|0": { "hashes": { "hash_examples": "fcee39dc29eaae91", "hash_full_prompts": "fcee39dc29eaae91", "hash_input_tokens": "64ae1b9611d5836e", "hash_cont_tokens": "d00f5e9bb7608898" }, "truncated": 0, "non_truncated": 95, "padded": 190, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Geography|0": { "hashes": { "hash_examples": "d36eda7c89231c02", "hash_full_prompts": "d36eda7c89231c02", "hash_input_tokens": "ffd73ed904847964", "hash_cont_tokens": "fe3a24e435a5cdd7" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_History|0": { "hashes": { "hash_examples": "6354ac0d6db6a5fc", "hash_full_prompts": "6354ac0d6db6a5fc", "hash_input_tokens": "e61e7ff93969bcde", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Language_Origin|0": { "hashes": { "hash_examples": "ddc967c8aca34402", "hash_full_prompts": "ddc967c8aca34402", "hash_input_tokens": "990b3ef7fd87a19e", "hash_cont_tokens": "d00f5e9bb7608898" }, "truncated": 0, "non_truncated": 95, "padded": 190, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Literature|0": { "hashes": { "hash_examples": "4305379fd46be5d8", "hash_full_prompts": "4305379fd46be5d8", "hash_input_tokens": "8fd9889e88bd6b56", "hash_cont_tokens": "fe3a24e435a5cdd7" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Math|0": { "hashes": { "hash_examples": "dec621144f4d28be", "hash_full_prompts": "dec621144f4d28be", "hash_input_tokens": "96e14899de8e98dd", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Medicine|0": { "hashes": { "hash_examples": "2b344cdae9495ff2", "hash_full_prompts": "2b344cdae9495ff2", "hash_input_tokens": "865484039c1c94b6", "hash_cont_tokens": "fe3a24e435a5cdd7" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Music|0": { "hashes": { "hash_examples": "0c54624d881944ce", "hash_full_prompts": "0c54624d881944ce", "hash_input_tokens": "1050a20ea888fe88", "hash_cont_tokens": "4b866375ab9b5507" }, "truncated": 0, "non_truncated": 139, "padded": 278, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Ornament|0": { "hashes": { "hash_examples": "251a4a84289d8bc1", "hash_full_prompts": "251a4a84289d8bc1", "hash_input_tokens": "d2299fbaa6e9420b", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Philosophy|0": { "hashes": { "hash_examples": "3f86fb9c94c13d22", "hash_full_prompts": "3f86fb9c94c13d22", "hash_input_tokens": "c92c334620eaa9b7", "hash_cont_tokens": "fe3a24e435a5cdd7" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Physics_and_Chemistry|0": { "hashes": { "hash_examples": "8fec65af3695b62a", "hash_full_prompts": "8fec65af3695b62a", "hash_input_tokens": "2324231c491d0194", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Arabic_Wedding|0": { "hashes": { "hash_examples": "9cc3477184d7a4b8", "hash_full_prompts": "9cc3477184d7a4b8", "hash_input_tokens": "aaee6091a4521f15", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Bahrain|0": { "hashes": { "hash_examples": "c92e803a0fa8b9e2", "hash_full_prompts": "c92e803a0fa8b9e2", "hash_input_tokens": "eabf6ccfc08f16c0", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Comoros|0": { "hashes": { "hash_examples": "06e5d4bba8e54cae", "hash_full_prompts": "06e5d4bba8e54cae", "hash_input_tokens": "e19aedde72f85c15", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Egypt_modern|0": { "hashes": { "hash_examples": "c6ec369164f93446", "hash_full_prompts": "c6ec369164f93446", "hash_input_tokens": "df56abc602d059ba", "hash_cont_tokens": "d00f5e9bb7608898" }, "truncated": 0, "non_truncated": 95, "padded": 190, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromAncientEgypt|0": { "hashes": { "hash_examples": "b9d56d74818b9bd4", "hash_full_prompts": "b9d56d74818b9bd4", "hash_input_tokens": "1710d33e280d904b", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromByzantium|0": { "hashes": { "hash_examples": "5316c9624e7e59b8", "hash_full_prompts": "5316c9624e7e59b8", "hash_input_tokens": "4228a11a23c80e8b", "hash_cont_tokens": "fe3a24e435a5cdd7" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromChina|0": { "hashes": { "hash_examples": "87894bce95a56411", "hash_full_prompts": "87894bce95a56411", "hash_input_tokens": "da82963236399159", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromGreece|0": { "hashes": { "hash_examples": "0baa78a27e469312", "hash_full_prompts": "0baa78a27e469312", "hash_input_tokens": "562ed99d4d39d3a4", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromIslam|0": { "hashes": { "hash_examples": "0c2532cde6541ff2", "hash_full_prompts": "0c2532cde6541ff2", "hash_input_tokens": "f59415d4783fe0ca", "hash_cont_tokens": "fe3a24e435a5cdd7" }, "truncated": 0, "non_truncated": 145, "padded": 290, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromPersia|0": { "hashes": { "hash_examples": "efcd8112dc53c6e5", "hash_full_prompts": "efcd8112dc53c6e5", "hash_input_tokens": "f69e057c6aa2c47d", "hash_cont_tokens": "919736d4992ad983" }, "truncated": 0, "non_truncated": 175, "padded": 350, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:InfluenceFromRome|0": { "hashes": { "hash_examples": "9db61480e2e85fd3", "hash_full_prompts": "9db61480e2e85fd3", "hash_input_tokens": "531524bbd0b33630", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Iraq|0": { "hashes": { "hash_examples": "96dac3dfa8d2f41f", "hash_full_prompts": "96dac3dfa8d2f41f", "hash_input_tokens": "0606b68feff69418", "hash_cont_tokens": "13c8aae5240b62db" }, "truncated": 0, "non_truncated": 85, "padded": 170, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Islam_Education|0": { "hashes": { "hash_examples": "0d80355f6a4cb51b", "hash_full_prompts": "0d80355f6a4cb51b", "hash_input_tokens": "72997f559c05f135", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Islam_branches_and_schools|0": { "hashes": { "hash_examples": "5cedce1be2c3ad50", "hash_full_prompts": "5cedce1be2c3ad50", "hash_input_tokens": "8cf5b726f5290a67", "hash_cont_tokens": "919736d4992ad983" }, "truncated": 0, "non_truncated": 175, "padded": 350, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Islamic_law_system|0": { "hashes": { "hash_examples": "c0e6db8bc84e105e", "hash_full_prompts": "c0e6db8bc84e105e", "hash_input_tokens": "d34897afb8ab5e09", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Jordan|0": { "hashes": { "hash_examples": "33deb5b4e5ddd6a1", "hash_full_prompts": "33deb5b4e5ddd6a1", "hash_input_tokens": "09bbb49528e55e95", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Kuwait|0": { "hashes": { "hash_examples": "eb41773346d7c46c", "hash_full_prompts": "eb41773346d7c46c", "hash_input_tokens": "b007521e3ee9a3e3", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Lebanon|0": { "hashes": { "hash_examples": "25932dbf4c13d34f", "hash_full_prompts": "25932dbf4c13d34f", "hash_input_tokens": "e9c65d47ccb92a4d", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Libya|0": { "hashes": { "hash_examples": "f2c4db63cd402926", "hash_full_prompts": "f2c4db63cd402926", "hash_input_tokens": "14d5ee3d445791e7", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Mauritania|0": { "hashes": { "hash_examples": "8723ab5fdf286b54", "hash_full_prompts": "8723ab5fdf286b54", "hash_input_tokens": "2ed92e497ff90422", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Mesopotamia_civilization|0": { "hashes": { "hash_examples": "c33f5502a6130ca9", "hash_full_prompts": "c33f5502a6130ca9", "hash_input_tokens": "70e12c55674679bc", "hash_cont_tokens": "e00b82159a687ad7" }, "truncated": 0, "non_truncated": 155, "padded": 310, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Morocco|0": { "hashes": { "hash_examples": "588a5ed27904b1ae", "hash_full_prompts": "588a5ed27904b1ae", "hash_input_tokens": "fe66d3ee3528a4e1", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Oman|0": { "hashes": { "hash_examples": "d447c52b94248b69", "hash_full_prompts": "d447c52b94248b69", "hash_input_tokens": "41f4f25c4c1e4c48", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Palestine|0": { "hashes": { "hash_examples": "19197e076ad14ff5", "hash_full_prompts": "19197e076ad14ff5", "hash_input_tokens": "7dbf9f6975bb562b", "hash_cont_tokens": "13c8aae5240b62db" }, "truncated": 0, "non_truncated": 85, "padded": 170, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Qatar|0": { "hashes": { "hash_examples": "cf0736fa185b28f6", "hash_full_prompts": "cf0736fa185b28f6", "hash_input_tokens": "afd869847a7ac8be", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Saudi_Arabia|0": { "hashes": { "hash_examples": "69beda6e1b85a08d", "hash_full_prompts": "69beda6e1b85a08d", "hash_input_tokens": "bdc0858a3346fa55", "hash_cont_tokens": "56dfe27ee01362a4" }, "truncated": 0, "non_truncated": 195, "padded": 390, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Somalia|0": { "hashes": { "hash_examples": "b387940c65784fbf", "hash_full_prompts": "b387940c65784fbf", "hash_input_tokens": "f81294d9be9fe0e0", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Sudan|0": { "hashes": { "hash_examples": "e02c32b9d2dd0c3f", "hash_full_prompts": "e02c32b9d2dd0c3f", "hash_input_tokens": "272bf04db550f884", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Syria|0": { "hashes": { "hash_examples": "60a6f8fe73bda4bb", "hash_full_prompts": "60a6f8fe73bda4bb", "hash_input_tokens": "69c73ea4d3e95c40", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Tunisia|0": { "hashes": { "hash_examples": "34bb15d3830c5649", "hash_full_prompts": "34bb15d3830c5649", "hash_input_tokens": "14e7d3ab646c9502", "hash_cont_tokens": "f2c5b8cf6c0e0976" }, "truncated": 0, "non_truncated": 45, "padded": 90, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:United_Arab_Emirates|0": { "hashes": { "hash_examples": "98a0ba78172718ce", "hash_full_prompts": "98a0ba78172718ce", "hash_input_tokens": "50db0af545447eee", "hash_cont_tokens": "13c8aae5240b62db" }, "truncated": 0, "non_truncated": 85, "padded": 170, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:Yemen|0": { "hashes": { "hash_examples": "18e9bcccbb4ced7a", "hash_full_prompts": "18e9bcccbb4ced7a", "hash_input_tokens": "6f0d6b90714901ac", "hash_cont_tokens": "12d6d46b075f79eb" }, "truncated": 0, "non_truncated": 10, "padded": 20, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:communication|0": { "hashes": { "hash_examples": "9ff28ab5eab5c97b", "hash_full_prompts": "9ff28ab5eab5c97b", "hash_input_tokens": "0ffbf34fcca6e60d", "hash_cont_tokens": "b43f6e0ab3067882" }, "truncated": 0, "non_truncated": 364, "padded": 728, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:computer_and_phone|0": { "hashes": { "hash_examples": "37bac2f086aaf6c2", "hash_full_prompts": "37bac2f086aaf6c2", "hash_input_tokens": "1574c9561682e0fb", "hash_cont_tokens": "b704dcf1720c5755" }, "truncated": 0, "non_truncated": 295, "padded": 590, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:daily_life|0": { "hashes": { "hash_examples": "bf07363c1c252e2f", "hash_full_prompts": "bf07363c1c252e2f", "hash_input_tokens": "a6edfca92356d116", "hash_cont_tokens": "7308a3845f72e43e" }, "truncated": 0, "non_truncated": 337, "padded": 674, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|acva:entertainment|0": { "hashes": { "hash_examples": "37077bc00f0ac56a", "hash_full_prompts": "37077bc00f0ac56a", "hash_input_tokens": "105b24bf8d1e4d4d", "hash_cont_tokens": "b704dcf1720c5755" }, "truncated": 0, "non_truncated": 295, "padded": 590, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|alghafa:mcq_exams_test_ar|0": { "hashes": { "hash_examples": "c07a5e78c5c0b8fe", "hash_full_prompts": "c07a5e78c5c0b8fe", "hash_input_tokens": "1f5edf4c184a4bcf", "hash_cont_tokens": "26ae3f89a0edcdfb" }, "truncated": 0, "non_truncated": 557, "padded": 2228, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|alghafa:meta_ar_dialects|0": { "hashes": { "hash_examples": "c0b6081f83e14064", "hash_full_prompts": "c0b6081f83e14064", "hash_input_tokens": "32c5d2dbace09641", "hash_cont_tokens": "03352aec2d5da2f5" }, "truncated": 0, "non_truncated": 5395, "padded": 21572, "non_padded": 8, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|alghafa:meta_ar_msa|0": { "hashes": { "hash_examples": "64eb78a7c5b7484b", "hash_full_prompts": "64eb78a7c5b7484b", "hash_input_tokens": "9173eb4d035c6718", "hash_cont_tokens": "0d40ae6c7006bfbb" }, "truncated": 0, "non_truncated": 895, "padded": 3560, "non_padded": 20, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { "hashes": { "hash_examples": "54fc3502c1c02c06", "hash_full_prompts": "54fc3502c1c02c06", "hash_input_tokens": "a883a4d89637b330", "hash_cont_tokens": "b82b619647644015" }, "truncated": 0, "non_truncated": 75, "padded": 148, "non_padded": 2, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { "hashes": { "hash_examples": "46572d83696552ae", "hash_full_prompts": "46572d83696552ae", "hash_input_tokens": "31dadf018546162e", "hash_cont_tokens": "ac9a83fe9d8d99e7" }, "truncated": 0, "non_truncated": 150, "padded": 747, "non_padded": 3, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { "hashes": { "hash_examples": "f430d97ff715bc1c", "hash_full_prompts": "f430d97ff715bc1c", "hash_input_tokens": "63d1cc51dffb5ee8", "hash_cont_tokens": "247c8a3ba0092d16" }, "truncated": 0, "non_truncated": 150, "padded": 749, "non_padded": 1, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { "hashes": { "hash_examples": "6b70a7416584f98c", "hash_full_prompts": "6b70a7416584f98c", "hash_input_tokens": "ede5a9bced679f1b", "hash_cont_tokens": "de5b69881e081318" }, "truncated": 0, "non_truncated": 7995, "padded": 15990, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_rating_sentiment_task|0": { "hashes": { "hash_examples": "bc2005cc9d2f436e", "hash_full_prompts": "bc2005cc9d2f436e", "hash_input_tokens": "0b6bf06e296192cf", "hash_cont_tokens": "32a650fcf067a32b" }, "truncated": 0, "non_truncated": 5995, "padded": 17985, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|alghafa:multiple_choice_sentiment_task|0": { "hashes": { "hash_examples": "6fb0e254ea5945d8", "hash_full_prompts": "6fb0e254ea5945d8", "hash_input_tokens": "18acd9094d7aa800", "hash_cont_tokens": "cd4fbcc7c800da80" }, "truncated": 0, "non_truncated": 1720, "padded": 5160, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_exams|0": { "hashes": { "hash_examples": "6d721df351722656", "hash_full_prompts": "6d721df351722656", "hash_input_tokens": "7cea382a9aaac5c5", "hash_cont_tokens": "3952fd8478cb2901" }, "truncated": 0, "non_truncated": 537, "padded": 2148, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:abstract_algebra|0": { "hashes": { "hash_examples": "f2ddca8f45c0a511", "hash_full_prompts": "f2ddca8f45c0a511", "hash_input_tokens": "e40d4b65b3a4119b", "hash_cont_tokens": "771d84ba6655ec08" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:anatomy|0": { "hashes": { "hash_examples": "dfdbc1b83107668d", "hash_full_prompts": "dfdbc1b83107668d", "hash_input_tokens": "e21e012859291af8", "hash_cont_tokens": "3b3a04ac2381cf2e" }, "truncated": 0, "non_truncated": 135, "padded": 540, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:astronomy|0": { "hashes": { "hash_examples": "9736a606002a848e", "hash_full_prompts": "9736a606002a848e", "hash_input_tokens": "4578e14bfadb48ed", "hash_cont_tokens": "c4e209dd858f1eb5" }, "truncated": 0, "non_truncated": 152, "padded": 608, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:business_ethics|0": { "hashes": { "hash_examples": "735e452fbb6dc63d", "hash_full_prompts": "735e452fbb6dc63d", "hash_input_tokens": "ad7c86e39c60593d", "hash_cont_tokens": "771d84ba6655ec08" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:clinical_knowledge|0": { "hashes": { "hash_examples": "6ab0ca4da98aedcf", "hash_full_prompts": "6ab0ca4da98aedcf", "hash_input_tokens": "11ca9b79126c255c", "hash_cont_tokens": "27d080ddb72a91fb" }, "truncated": 0, "non_truncated": 265, "padded": 1060, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_biology|0": { "hashes": { "hash_examples": "17e4e390848018a4", "hash_full_prompts": "17e4e390848018a4", "hash_input_tokens": "512847c487ddbeed", "hash_cont_tokens": "36fd225818f99fc4" }, "truncated": 0, "non_truncated": 144, "padded": 576, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_chemistry|0": { "hashes": { "hash_examples": "4abb169f6dfd234b", "hash_full_prompts": "4abb169f6dfd234b", "hash_input_tokens": "12852df3ede4d6c5", "hash_cont_tokens": "771d84ba6655ec08" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_computer_science|0": { "hashes": { "hash_examples": "a369e2e941358a1e", "hash_full_prompts": "a369e2e941358a1e", "hash_input_tokens": "d36d00b5eb20e76c", "hash_cont_tokens": "771d84ba6655ec08" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_mathematics|0": { "hashes": { "hash_examples": "d7be03b8b6020bff", "hash_full_prompts": "d7be03b8b6020bff", "hash_input_tokens": "52ac0451de3d3c61", "hash_cont_tokens": "771d84ba6655ec08" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_medicine|0": { "hashes": { "hash_examples": "0518a00f097346bf", "hash_full_prompts": "0518a00f097346bf", "hash_input_tokens": "7a8f10995695737d", "hash_cont_tokens": "c2807dc27dcf6153" }, "truncated": 0, "non_truncated": 173, "padded": 692, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:college_physics|0": { "hashes": { "hash_examples": "5d842cd49bc70e12", "hash_full_prompts": "5d842cd49bc70e12", "hash_input_tokens": "eb68437d0be0cbb0", "hash_cont_tokens": "7b17d820dbbaa6cb" }, "truncated": 0, "non_truncated": 102, "padded": 408, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:computer_security|0": { "hashes": { "hash_examples": "8e85d9f85be9b32f", "hash_full_prompts": "8e85d9f85be9b32f", "hash_input_tokens": "bc447a9c2b8cc899", "hash_cont_tokens": "771d84ba6655ec08" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:conceptual_physics|0": { "hashes": { "hash_examples": "7964b55a0a49502b", "hash_full_prompts": "7964b55a0a49502b", "hash_input_tokens": "c08fecfa38084b19", "hash_cont_tokens": "2ff86a5d10a2127a" }, "truncated": 0, "non_truncated": 235, "padded": 940, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:econometrics|0": { "hashes": { "hash_examples": "1e192eae38347257", "hash_full_prompts": "1e192eae38347257", "hash_input_tokens": "49b96ebe07e53a46", "hash_cont_tokens": "901aa9a4c60559f3" }, "truncated": 0, "non_truncated": 114, "padded": 456, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:electrical_engineering|0": { "hashes": { "hash_examples": "cf97671d5c441da1", "hash_full_prompts": "cf97671d5c441da1", "hash_input_tokens": "56316458142a2173", "hash_cont_tokens": "5ae653ddb5ac9494" }, "truncated": 0, "non_truncated": 145, "padded": 580, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:elementary_mathematics|0": { "hashes": { "hash_examples": "6f49107ed43c40c5", "hash_full_prompts": "6f49107ed43c40c5", "hash_input_tokens": "aa342128460950fc", "hash_cont_tokens": "d00485b6b9b1a7b2" }, "truncated": 0, "non_truncated": 378, "padded": 1512, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:formal_logic|0": { "hashes": { "hash_examples": "7922c376008ba77b", "hash_full_prompts": "7922c376008ba77b", "hash_input_tokens": "f9ee377b9aa84576", "hash_cont_tokens": "6a74353d78fb2049" }, "truncated": 0, "non_truncated": 126, "padded": 504, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:global_facts|0": { "hashes": { "hash_examples": "11f9813185047d5b", "hash_full_prompts": "11f9813185047d5b", "hash_input_tokens": "7fa879972f011a75", "hash_cont_tokens": "771d84ba6655ec08" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_biology|0": { "hashes": { "hash_examples": "2a804b1d90cbe66e", "hash_full_prompts": "2a804b1d90cbe66e", "hash_input_tokens": "503076ce64fdfdc5", "hash_cont_tokens": "e1b38a431c7cfdf2" }, "truncated": 0, "non_truncated": 310, "padded": 1240, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_chemistry|0": { "hashes": { "hash_examples": "0032168adabc53b4", "hash_full_prompts": "0032168adabc53b4", "hash_input_tokens": "36cc41109c11a208", "hash_cont_tokens": "d30d155b83b8beee" }, "truncated": 0, "non_truncated": 203, "padded": 808, "non_padded": 4, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_computer_science|0": { "hashes": { "hash_examples": "f2fb8740f9df980f", "hash_full_prompts": "f2fb8740f9df980f", "hash_input_tokens": "085f9c895dbe0114", "hash_cont_tokens": "771d84ba6655ec08" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_european_history|0": { "hashes": { "hash_examples": "73509021e7e66435", "hash_full_prompts": "73509021e7e66435", "hash_input_tokens": "87aead297a7400ab", "hash_cont_tokens": "aa387b55778f7d85" }, "truncated": 0, "non_truncated": 165, "padded": 660, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_geography|0": { "hashes": { "hash_examples": "9e08d1894940ff42", "hash_full_prompts": "9e08d1894940ff42", "hash_input_tokens": "a8c31e6e7ead4f0c", "hash_cont_tokens": "ea572b82c41be702" }, "truncated": 0, "non_truncated": 198, "padded": 792, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_government_and_politics|0": { "hashes": { "hash_examples": "64b7e97817ca6c76", "hash_full_prompts": "64b7e97817ca6c76", "hash_input_tokens": "eda86544efcc01e7", "hash_cont_tokens": "d535b26a86b9a9d3" }, "truncated": 0, "non_truncated": 193, "padded": 772, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_macroeconomics|0": { "hashes": { "hash_examples": "9f582da8534bd2ef", "hash_full_prompts": "9f582da8534bd2ef", "hash_input_tokens": "f9ea3a662021650f", "hash_cont_tokens": "606c1a2137551055" }, "truncated": 0, "non_truncated": 390, "padded": 1560, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_mathematics|0": { "hashes": { "hash_examples": "fd54f1c10d423c51", "hash_full_prompts": "fd54f1c10d423c51", "hash_input_tokens": "eccdfc4f47bb9169", "hash_cont_tokens": "f18ea16235393e7a" }, "truncated": 0, "non_truncated": 270, "padded": 1080, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_microeconomics|0": { "hashes": { "hash_examples": "7037896925aaf42f", "hash_full_prompts": "7037896925aaf42f", "hash_input_tokens": "aee1fd256f82ad9b", "hash_cont_tokens": "05a90a8afcf3afc3" }, "truncated": 0, "non_truncated": 238, "padded": 952, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_physics|0": { "hashes": { "hash_examples": "60c3776215167dae", "hash_full_prompts": "60c3776215167dae", "hash_input_tokens": "ac7655ca899fd47c", "hash_cont_tokens": "847282b0877be22e" }, "truncated": 0, "non_truncated": 151, "padded": 604, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_psychology|0": { "hashes": { "hash_examples": "61176bfd5da1298f", "hash_full_prompts": "61176bfd5da1298f", "hash_input_tokens": "539ac9b09445b052", "hash_cont_tokens": "30a296640c9037d1" }, "truncated": 0, "non_truncated": 545, "padded": 2180, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_statistics|0": { "hashes": { "hash_examples": "40dfeebd1ea10f76", "hash_full_prompts": "40dfeebd1ea10f76", "hash_input_tokens": "8ff173bc88036af4", "hash_cont_tokens": "a259777479a52fa3" }, "truncated": 0, "non_truncated": 216, "padded": 864, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_us_history|0": { "hashes": { "hash_examples": "03daa510ba917f4d", "hash_full_prompts": "03daa510ba917f4d", "hash_input_tokens": "ab77cc0771a2c189", "hash_cont_tokens": "35915add7ad519d1" }, "truncated": 0, "non_truncated": 204, "padded": 816, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:high_school_world_history|0": { "hashes": { "hash_examples": "be075ffd579f43c2", "hash_full_prompts": "be075ffd579f43c2", "hash_input_tokens": "c868843e8fcfd10f", "hash_cont_tokens": "4766d81466995bda" }, "truncated": 0, "non_truncated": 237, "padded": 913, "non_padded": 35, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:human_aging|0": { "hashes": { "hash_examples": "caa5b69f640bd1ef", "hash_full_prompts": "caa5b69f640bd1ef", "hash_input_tokens": "61685c7732fef63d", "hash_cont_tokens": "7bf358fd838eb005" }, "truncated": 0, "non_truncated": 223, "padded": 888, "non_padded": 4, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:human_sexuality|0": { "hashes": { "hash_examples": "5ed2e38fb25a3767", "hash_full_prompts": "5ed2e38fb25a3767", "hash_input_tokens": "30a24258106b5f1e", "hash_cont_tokens": "8c45b597fb2c4a20" }, "truncated": 0, "non_truncated": 131, "padded": 520, "non_padded": 4, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:international_law|0": { "hashes": { "hash_examples": "4e3e9e28d1b96484", "hash_full_prompts": "4e3e9e28d1b96484", "hash_input_tokens": "8b97210c1260a491", "hash_cont_tokens": "2c8c36f61bba92e8" }, "truncated": 0, "non_truncated": 121, "padded": 480, "non_padded": 4, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:jurisprudence|0": { "hashes": { "hash_examples": "e264b755366310b3", "hash_full_prompts": "e264b755366310b3", "hash_input_tokens": "14277b404401fffe", "hash_cont_tokens": "43df6122a6ce9ca7" }, "truncated": 0, "non_truncated": 108, "padded": 428, "non_padded": 4, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:logical_fallacies|0": { "hashes": { "hash_examples": "a4ab6965a3e38071", "hash_full_prompts": "a4ab6965a3e38071", "hash_input_tokens": "b6ce552031875395", "hash_cont_tokens": "636d46707e1a84a1" }, "truncated": 0, "non_truncated": 163, "padded": 648, "non_padded": 4, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:machine_learning|0": { "hashes": { "hash_examples": "b92320efa6636b40", "hash_full_prompts": "b92320efa6636b40", "hash_input_tokens": "e942780a12e9ee85", "hash_cont_tokens": "733df8274472cbd6" }, "truncated": 0, "non_truncated": 112, "padded": 440, "non_padded": 8, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:management|0": { "hashes": { "hash_examples": "c9ee4872a850fe20", "hash_full_prompts": "c9ee4872a850fe20", "hash_input_tokens": "62f096ba10218587", "hash_cont_tokens": "fc3cf8f15a104c82" }, "truncated": 0, "non_truncated": 103, "padded": 412, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:marketing|0": { "hashes": { "hash_examples": "0c151b70f6a047e3", "hash_full_prompts": "0c151b70f6a047e3", "hash_input_tokens": "04273cc8f5b3ea59", "hash_cont_tokens": "b8698fec039e309c" }, "truncated": 0, "non_truncated": 234, "padded": 932, "non_padded": 4, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:medical_genetics|0": { "hashes": { "hash_examples": "513f6cb8fca3a24e", "hash_full_prompts": "513f6cb8fca3a24e", "hash_input_tokens": "744f86f0f93b9e64", "hash_cont_tokens": "771d84ba6655ec08" }, "truncated": 0, "non_truncated": 100, "padded": 396, "non_padded": 4, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:miscellaneous|0": { "hashes": { "hash_examples": "259a190d635331db", "hash_full_prompts": "259a190d635331db", "hash_input_tokens": "e32e4bee1cb71f1d", "hash_cont_tokens": "87f2df51ba8a6c8c" }, "truncated": 0, "non_truncated": 783, "padded": 3071, "non_padded": 61, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:moral_disputes|0": { "hashes": { "hash_examples": "b85052c48a0b7bc3", "hash_full_prompts": "b85052c48a0b7bc3", "hash_input_tokens": "58cfba427f3fdda5", "hash_cont_tokens": "2003018f8616dc35" }, "truncated": 0, "non_truncated": 346, "padded": 1364, "non_padded": 20, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:moral_scenarios|0": { "hashes": { "hash_examples": "28d0b069ef00dd00", "hash_full_prompts": "28d0b069ef00dd00", "hash_input_tokens": "5b5a39ca39a3fe4f", "hash_cont_tokens": "1ae2c6d8baa46e2a" }, "truncated": 0, "non_truncated": 895, "padded": 3576, "non_padded": 4, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:nutrition|0": { "hashes": { "hash_examples": "00c9bc5f1d305b2f", "hash_full_prompts": "00c9bc5f1d305b2f", "hash_input_tokens": "8353992ed30113ec", "hash_cont_tokens": "7b7aef3aad672dcb" }, "truncated": 0, "non_truncated": 306, "padded": 1192, "non_padded": 32, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:philosophy|0": { "hashes": { "hash_examples": "a458c08454a3fd5f", "hash_full_prompts": "a458c08454a3fd5f", "hash_input_tokens": "e9bd5aa0506f82af", "hash_cont_tokens": "ffde3f7e9cb8ce4f" }, "truncated": 0, "non_truncated": 311, "padded": 1208, "non_padded": 36, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:prehistory|0": { "hashes": { "hash_examples": "d6a0ecbdbb670e9c", "hash_full_prompts": "d6a0ecbdbb670e9c", "hash_input_tokens": "d8640a15dca26ed9", "hash_cont_tokens": "bda8eff659818de4" }, "truncated": 0, "non_truncated": 324, "padded": 1268, "non_padded": 28, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:professional_accounting|0": { "hashes": { "hash_examples": "b4a95fe480b6540e", "hash_full_prompts": "b4a95fe480b6540e", "hash_input_tokens": "13f7653fdb5d5f15", "hash_cont_tokens": "5f7423e268242363" }, "truncated": 0, "non_truncated": 282, "padded": 1124, "non_padded": 4, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:professional_law|0": { "hashes": { "hash_examples": "c2be9651cdbdde3b", "hash_full_prompts": "c2be9651cdbdde3b", "hash_input_tokens": "2f9cd7897f411db5", "hash_cont_tokens": "60c68d5e3ae45dc1" }, "truncated": 0, "non_truncated": 1534, "padded": 6084, "non_padded": 52, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:professional_medicine|0": { "hashes": { "hash_examples": "26ce92416288f273", "hash_full_prompts": "26ce92416288f273", "hash_input_tokens": "5741e480537dc0a0", "hash_cont_tokens": "f52e4396cb5cdef2" }, "truncated": 0, "non_truncated": 272, "padded": 1068, "non_padded": 20, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:professional_psychology|0": { "hashes": { "hash_examples": "71ea5f182ea9a641", "hash_full_prompts": "71ea5f182ea9a641", "hash_input_tokens": "f6a027b61099744d", "hash_cont_tokens": "a0ebf929cab4bd2c" }, "truncated": 0, "non_truncated": 612, "padded": 2400, "non_padded": 48, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:public_relations|0": { "hashes": { "hash_examples": "125adc21f91f8d77", "hash_full_prompts": "125adc21f91f8d77", "hash_input_tokens": "8f1ec212bb042500", "hash_cont_tokens": "09ba719b073994df" }, "truncated": 0, "non_truncated": 110, "padded": 420, "non_padded": 20, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:security_studies|0": { "hashes": { "hash_examples": "3c18b216c099fb26", "hash_full_prompts": "3c18b216c099fb26", "hash_input_tokens": "17da851776aea479", "hash_cont_tokens": "4854dd7ac1df8c7c" }, "truncated": 0, "non_truncated": 245, "padded": 976, "non_padded": 4, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:sociology|0": { "hashes": { "hash_examples": "3f2a9634cef7417d", "hash_full_prompts": "3f2a9634cef7417d", "hash_input_tokens": "2c4123322029ff01", "hash_cont_tokens": "ea4c0af969f2373c" }, "truncated": 0, "non_truncated": 201, "padded": 796, "non_padded": 8, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:us_foreign_policy|0": { "hashes": { "hash_examples": "22249da54056475e", "hash_full_prompts": "22249da54056475e", "hash_input_tokens": "3cf1baf92463dea2", "hash_cont_tokens": "771d84ba6655ec08" }, "truncated": 0, "non_truncated": 100, "padded": 388, "non_padded": 12, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:virology|0": { "hashes": { "hash_examples": "9d194b9471dc624e", "hash_full_prompts": "9d194b9471dc624e", "hash_input_tokens": "b14f4dc0da742dd9", "hash_cont_tokens": "810369902bd4e47e" }, "truncated": 0, "non_truncated": 166, "padded": 648, "non_padded": 16, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arabic_mmlu:world_religions|0": { "hashes": { "hash_examples": "229e5fe50082b064", "hash_full_prompts": "229e5fe50082b064", "hash_input_tokens": "7f2571e5899f18d1", "hash_cont_tokens": "fd19d689989ad4e2" }, "truncated": 0, "non_truncated": 171, "padded": 664, "non_padded": 20, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arc_challenge_okapi_ar|0": { "hashes": { "hash_examples": "ab893807673bc355", "hash_full_prompts": "ab893807673bc355", "hash_input_tokens": "a8d20998fff1a424", "hash_cont_tokens": "05d6059fd7f0a574" }, "truncated": 0, "non_truncated": 1160, "padded": 4547, "non_padded": 93, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|arc_easy_ar|0": { "hashes": { "hash_examples": "acb688624acc3d04", "hash_full_prompts": "acb688624acc3d04", "hash_input_tokens": "5d785f47c4dad40e", "hash_cont_tokens": "af4f49218caa0c1b" }, "truncated": 0, "non_truncated": 2364, "padded": 9277, "non_padded": 179, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|boolq_ar|0": { "hashes": { "hash_examples": "48355a67867e0c32", "hash_full_prompts": "48355a67867e0c32", "hash_input_tokens": "c96149c3013d36f2", "hash_cont_tokens": "f51d666013e03070" }, "truncated": 0, "non_truncated": 3260, "padded": 6461, "non_padded": 59, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|copa_ext_ar|0": { "hashes": { "hash_examples": "9bb83301bb72eecf", "hash_full_prompts": "9bb83301bb72eecf", "hash_input_tokens": "158522a4a1a07d74", "hash_cont_tokens": "eb3992fbe17ceaa2" }, "truncated": 0, "non_truncated": 90, "padded": 180, "non_padded": 0, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|hellaswag_okapi_ar|0": { "hashes": { "hash_examples": "6e8cf57a322dfadd", "hash_full_prompts": "6e8cf57a322dfadd", "hash_input_tokens": "76469b0abf5c6d58", "hash_cont_tokens": "03680d546b4eb5c5" }, "truncated": 0, "non_truncated": 9171, "padded": 36584, "non_padded": 100, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|openbook_qa_ext_ar|0": { "hashes": { "hash_examples": "923d41eb0aca93eb", "hash_full_prompts": "923d41eb0aca93eb", "hash_input_tokens": "60056163ec7e5f41", "hash_cont_tokens": "764164f5bfb60831" }, "truncated": 0, "non_truncated": 495, "padded": 1951, "non_padded": 29, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|piqa_ar|0": { "hashes": { "hash_examples": "94bc205a520d3ea0", "hash_full_prompts": "94bc205a520d3ea0", "hash_input_tokens": "0db92e9ce7ad44b1", "hash_cont_tokens": "1b912774aa918718" }, "truncated": 0, "non_truncated": 1833, "padded": 3621, "non_padded": 45, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|race_ar|0": { "hashes": { "hash_examples": "de65130bae647516", "hash_full_prompts": "de65130bae647516", "hash_input_tokens": "fd28b2dd87c6b409", "hash_cont_tokens": "86cc015f91d5e5da" }, "truncated": 0, "non_truncated": 4929, "padded": 19693, "non_padded": 23, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|sciq_ar|0": { "hashes": { "hash_examples": "57d50ff7691fc4e1", "hash_full_prompts": "57d50ff7691fc4e1", "hash_input_tokens": "51ed2d04eb855ac4", "hash_cont_tokens": "d5b52e42860d226e" }, "truncated": 0, "non_truncated": 995, "padded": 3959, "non_padded": 21, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "community|toxigen_ar|0": { "hashes": { "hash_examples": "1e139513004a9a2e", "hash_full_prompts": "1e139513004a9a2e", "hash_input_tokens": "0bbda82a75315737", "hash_cont_tokens": "4323a1b5cd5f70c3" }, "truncated": 0, "non_truncated": 935, "padded": 1845, "non_padded": 25, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 }, "lighteval|xstory_cloze:ar|0": { "hashes": { "hash_examples": "865426a22c787481", "hash_full_prompts": "865426a22c787481", "hash_input_tokens": "a2aab90d52e5bd51", "hash_cont_tokens": "103b4ba4d5ea0ee0" }, "truncated": 0, "non_truncated": 1511, "padded": 2974, "non_padded": 48, "effective_few_shots": 0.0, "num_truncated_few_shots": 0 } }, "summary_general": { "hashes": { "hash_examples": "35a5b2f1e5e1ae15", "hash_full_prompts": "35a5b2f1e5e1ae15", "hash_input_tokens": "cb5b86de84210b23", "hash_cont_tokens": "378ab9ab44178280" }, "truncated": 0, "non_truncated": 72964, "padded": 234507, "non_padded": 1116, "num_truncated_few_shots": 0 } }