results
/
EpistemeAI
/Fireball-Alpaca-Llama3.1.07-8B-Philos-Math-KTO-beta
/results_2024-09-15T10-26-52.543193.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 585.618466089, | |
"end_time": 17859.363127112, | |
"total_evaluation_time_secondes": "17273.744661023", | |
"model_name": "EpistemeAI/Fireball-Alpaca-Llama3.1.07-8B-Philos-Math-KTO-beta", | |
"model_sha": "ef3782837ee4996b94f7a6c9bb6c699694df56c0", | |
"model_dtype": "torch.float16", | |
"model_size": "14.96 GB", | |
"config": null | |
}, | |
"results": { | |
"community|acva:Algeria|0": { | |
"acc_norm": 0.5384615384615384, | |
"acc_norm_stderr": 0.03579154352544571 | |
}, | |
"community|acva:Ancient_Egypt|0": { | |
"acc_norm": 0.09841269841269841, | |
"acc_norm_stderr": 0.01680988100419675 | |
}, | |
"community|acva:Arab_Empire|0": { | |
"acc_norm": 0.3584905660377358, | |
"acc_norm_stderr": 0.029514703583981765 | |
}, | |
"community|acva:Arabic_Architecture|0": { | |
"acc_norm": 0.4717948717948718, | |
"acc_norm_stderr": 0.035840746749208334 | |
}, | |
"community|acva:Arabic_Art|0": { | |
"acc_norm": 0.38461538461538464, | |
"acc_norm_stderr": 0.03492896993742304 | |
}, | |
"community|acva:Arabic_Astronomy|0": { | |
"acc_norm": 0.4717948717948718, | |
"acc_norm_stderr": 0.035840746749208334 | |
}, | |
"community|acva:Arabic_Calligraphy|0": { | |
"acc_norm": 0.6352941176470588, | |
"acc_norm_stderr": 0.030202433919892076 | |
}, | |
"community|acva:Arabic_Ceremony|0": { | |
"acc_norm": 0.5351351351351351, | |
"acc_norm_stderr": 0.036769369509486984 | |
}, | |
"community|acva:Arabic_Clothing|0": { | |
"acc_norm": 0.558974358974359, | |
"acc_norm_stderr": 0.035647329318535786 | |
}, | |
"community|acva:Arabic_Culture|0": { | |
"acc_norm": 0.2717948717948718, | |
"acc_norm_stderr": 0.031940861870257214 | |
}, | |
"community|acva:Arabic_Food|0": { | |
"acc_norm": 0.441025641025641, | |
"acc_norm_stderr": 0.0356473293185358 | |
}, | |
"community|acva:Arabic_Funeral|0": { | |
"acc_norm": 0.42105263157894735, | |
"acc_norm_stderr": 0.05092415229967329 | |
}, | |
"community|acva:Arabic_Geography|0": { | |
"acc_norm": 0.6137931034482759, | |
"acc_norm_stderr": 0.04057324734419035 | |
}, | |
"community|acva:Arabic_History|0": { | |
"acc_norm": 0.3435897435897436, | |
"acc_norm_stderr": 0.034096273014098566 | |
}, | |
"community|acva:Arabic_Language_Origin|0": { | |
"acc_norm": 0.631578947368421, | |
"acc_norm_stderr": 0.049753325624911644 | |
}, | |
"community|acva:Arabic_Literature|0": { | |
"acc_norm": 0.6689655172413793, | |
"acc_norm_stderr": 0.039215453124671215 | |
}, | |
"community|acva:Arabic_Math|0": { | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.03384487217112063 | |
}, | |
"community|acva:Arabic_Medicine|0": { | |
"acc_norm": 0.496551724137931, | |
"acc_norm_stderr": 0.04166567577101579 | |
}, | |
"community|acva:Arabic_Music|0": { | |
"acc_norm": 0.2446043165467626, | |
"acc_norm_stderr": 0.03659146222520568 | |
}, | |
"community|acva:Arabic_Ornament|0": { | |
"acc_norm": 0.7538461538461538, | |
"acc_norm_stderr": 0.03092742837122567 | |
}, | |
"community|acva:Arabic_Philosophy|0": { | |
"acc_norm": 0.5793103448275863, | |
"acc_norm_stderr": 0.0411391498118926 | |
}, | |
"community|acva:Arabic_Physics_and_Chemistry|0": { | |
"acc_norm": 0.5333333333333333, | |
"acc_norm_stderr": 0.03581804596782232 | |
}, | |
"community|acva:Arabic_Wedding|0": { | |
"acc_norm": 0.4307692307692308, | |
"acc_norm_stderr": 0.03555213252058761 | |
}, | |
"community|acva:Bahrain|0": { | |
"acc_norm": 0.35555555555555557, | |
"acc_norm_stderr": 0.07216392363431012 | |
}, | |
"community|acva:Comoros|0": { | |
"acc_norm": 0.37777777777777777, | |
"acc_norm_stderr": 0.07309112127323451 | |
}, | |
"community|acva:Egypt_modern|0": { | |
"acc_norm": 0.35789473684210527, | |
"acc_norm_stderr": 0.04944436957628253 | |
}, | |
"community|acva:InfluenceFromAncientEgypt|0": { | |
"acc_norm": 0.6615384615384615, | |
"acc_norm_stderr": 0.03397280032734094 | |
}, | |
"community|acva:InfluenceFromByzantium|0": { | |
"acc_norm": 0.7172413793103448, | |
"acc_norm_stderr": 0.037528339580033376 | |
}, | |
"community|acva:InfluenceFromChina|0": { | |
"acc_norm": 0.28717948717948716, | |
"acc_norm_stderr": 0.03248373338539886 | |
}, | |
"community|acva:InfluenceFromGreece|0": { | |
"acc_norm": 0.7128205128205128, | |
"acc_norm_stderr": 0.032483733385398866 | |
}, | |
"community|acva:InfluenceFromIslam|0": { | |
"acc_norm": 0.3931034482758621, | |
"acc_norm_stderr": 0.0407032901370707 | |
}, | |
"community|acva:InfluenceFromPersia|0": { | |
"acc_norm": 0.7257142857142858, | |
"acc_norm_stderr": 0.033822819375172945 | |
}, | |
"community|acva:InfluenceFromRome|0": { | |
"acc_norm": 0.5794871794871795, | |
"acc_norm_stderr": 0.03544138389303483 | |
}, | |
"community|acva:Iraq|0": { | |
"acc_norm": 0.5176470588235295, | |
"acc_norm_stderr": 0.05452048340661895 | |
}, | |
"community|acva:Islam_Education|0": { | |
"acc_norm": 0.48205128205128206, | |
"acc_norm_stderr": 0.035874770987738294 | |
}, | |
"community|acva:Islam_branches_and_schools|0": { | |
"acc_norm": 0.44, | |
"acc_norm_stderr": 0.03763099724991342 | |
}, | |
"community|acva:Islamic_law_system|0": { | |
"acc_norm": 0.49230769230769234, | |
"acc_norm_stderr": 0.035893659406352134 | |
}, | |
"community|acva:Jordan|0": { | |
"acc_norm": 0.37777777777777777, | |
"acc_norm_stderr": 0.07309112127323451 | |
}, | |
"community|acva:Kuwait|0": { | |
"acc_norm": 0.28888888888888886, | |
"acc_norm_stderr": 0.06832943242540508 | |
}, | |
"community|acva:Lebanon|0": { | |
"acc_norm": 0.17777777777777778, | |
"acc_norm_stderr": 0.05763774795025094 | |
}, | |
"community|acva:Libya|0": { | |
"acc_norm": 0.4888888888888889, | |
"acc_norm_stderr": 0.07535922203472523 | |
}, | |
"community|acva:Mauritania|0": { | |
"acc_norm": 0.4222222222222222, | |
"acc_norm_stderr": 0.07446027270295805 | |
}, | |
"community|acva:Mesopotamia_civilization|0": { | |
"acc_norm": 0.5548387096774193, | |
"acc_norm_stderr": 0.04004808116104031 | |
}, | |
"community|acva:Morocco|0": { | |
"acc_norm": 0.24444444444444444, | |
"acc_norm_stderr": 0.06478835438717 | |
}, | |
"community|acva:Oman|0": { | |
"acc_norm": 0.24444444444444444, | |
"acc_norm_stderr": 0.06478835438717 | |
}, | |
"community|acva:Palestine|0": { | |
"acc_norm": 0.3058823529411765, | |
"acc_norm_stderr": 0.05027523520585574 | |
}, | |
"community|acva:Qatar|0": { | |
"acc_norm": 0.4444444444444444, | |
"acc_norm_stderr": 0.07491109582924914 | |
}, | |
"community|acva:Saudi_Arabia|0": { | |
"acc_norm": 0.38974358974358975, | |
"acc_norm_stderr": 0.035014247762563705 | |
}, | |
"community|acva:Somalia|0": { | |
"acc_norm": 0.37777777777777777, | |
"acc_norm_stderr": 0.07309112127323451 | |
}, | |
"community|acva:Sudan|0": { | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.07385489458759965 | |
}, | |
"community|acva:Syria|0": { | |
"acc_norm": 0.3111111111111111, | |
"acc_norm_stderr": 0.06979205927323111 | |
}, | |
"community|acva:Tunisia|0": { | |
"acc_norm": 0.3111111111111111, | |
"acc_norm_stderr": 0.06979205927323111 | |
}, | |
"community|acva:United_Arab_Emirates|0": { | |
"acc_norm": 0.24705882352941178, | |
"acc_norm_stderr": 0.047058823529411785 | |
}, | |
"community|acva:Yemen|0": { | |
"acc_norm": 0.2, | |
"acc_norm_stderr": 0.13333333333333333 | |
}, | |
"community|acva:communication|0": { | |
"acc_norm": 0.43131868131868134, | |
"acc_norm_stderr": 0.02599443023962308 | |
}, | |
"community|acva:computer_and_phone|0": { | |
"acc_norm": 0.46440677966101696, | |
"acc_norm_stderr": 0.029086612547284615 | |
}, | |
"community|acva:daily_life|0": { | |
"acc_norm": 0.19584569732937684, | |
"acc_norm_stderr": 0.02164995877092107 | |
}, | |
"community|acva:entertainment|0": { | |
"acc_norm": 0.23389830508474577, | |
"acc_norm_stderr": 0.024687839412166384 | |
}, | |
"community|alghafa:mcq_exams_test_ar|0": { | |
"acc_norm": 0.36983842010771995, | |
"acc_norm_stderr": 0.02047361713076544 | |
}, | |
"community|alghafa:meta_ar_dialects|0": { | |
"acc_norm": 0.33679332715477295, | |
"acc_norm_stderr": 0.006435032011787734 | |
}, | |
"community|alghafa:meta_ar_msa|0": { | |
"acc_norm": 0.39217877094972065, | |
"acc_norm_stderr": 0.01632906107320745 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { | |
"acc_norm": 0.72, | |
"acc_norm_stderr": 0.052195060344100805 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { | |
"acc_norm": 0.5533333333333333, | |
"acc_norm_stderr": 0.040727903430234635 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { | |
"acc_norm": 0.4266666666666667, | |
"acc_norm_stderr": 0.04051863621453781 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { | |
"acc_norm": 0.7209505941213258, | |
"acc_norm_stderr": 0.005016620299474791 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": { | |
"acc_norm": 0.511092577147623, | |
"acc_norm_stderr": 0.00645661265440372 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task|0": { | |
"acc_norm": 0.3691860465116279, | |
"acc_norm_stderr": 0.011639527386176487 | |
}, | |
"community|arabic_exams|0": { | |
"acc_norm": 0.4543761638733706, | |
"acc_norm_stderr": 0.021506614115279165 | |
}, | |
"community|arabic_mmlu:abstract_algebra|0": { | |
"acc_norm": 0.37, | |
"acc_norm_stderr": 0.048523658709391 | |
}, | |
"community|arabic_mmlu:anatomy|0": { | |
"acc_norm": 0.35555555555555557, | |
"acc_norm_stderr": 0.04135176749720386 | |
}, | |
"community|arabic_mmlu:astronomy|0": { | |
"acc_norm": 0.4144736842105263, | |
"acc_norm_stderr": 0.04008973785779206 | |
}, | |
"community|arabic_mmlu:business_ethics|0": { | |
"acc_norm": 0.41, | |
"acc_norm_stderr": 0.04943110704237102 | |
}, | |
"community|arabic_mmlu:clinical_knowledge|0": { | |
"acc_norm": 0.5018867924528302, | |
"acc_norm_stderr": 0.030772653642075664 | |
}, | |
"community|arabic_mmlu:college_biology|0": { | |
"acc_norm": 0.3402777777777778, | |
"acc_norm_stderr": 0.03962135573486219 | |
}, | |
"community|arabic_mmlu:college_chemistry|0": { | |
"acc_norm": 0.39, | |
"acc_norm_stderr": 0.04902071300001975 | |
}, | |
"community|arabic_mmlu:college_computer_science|0": { | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.046056618647183814 | |
}, | |
"community|arabic_mmlu:college_mathematics|0": { | |
"acc_norm": 0.34, | |
"acc_norm_stderr": 0.04760952285695235 | |
}, | |
"community|arabic_mmlu:college_medicine|0": { | |
"acc_norm": 0.36416184971098264, | |
"acc_norm_stderr": 0.03669072477416906 | |
}, | |
"community|arabic_mmlu:college_physics|0": { | |
"acc_norm": 0.3235294117647059, | |
"acc_norm_stderr": 0.04655010411319617 | |
}, | |
"community|arabic_mmlu:computer_security|0": { | |
"acc_norm": 0.48, | |
"acc_norm_stderr": 0.050211673156867795 | |
}, | |
"community|arabic_mmlu:conceptual_physics|0": { | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.03202563076101737 | |
}, | |
"community|arabic_mmlu:econometrics|0": { | |
"acc_norm": 0.32456140350877194, | |
"acc_norm_stderr": 0.04404556157374767 | |
}, | |
"community|arabic_mmlu:electrical_engineering|0": { | |
"acc_norm": 0.32413793103448274, | |
"acc_norm_stderr": 0.03900432069185555 | |
}, | |
"community|arabic_mmlu:elementary_mathematics|0": { | |
"acc_norm": 0.3201058201058201, | |
"acc_norm_stderr": 0.024026846392873506 | |
}, | |
"community|arabic_mmlu:formal_logic|0": { | |
"acc_norm": 0.2777777777777778, | |
"acc_norm_stderr": 0.04006168083848878 | |
}, | |
"community|arabic_mmlu:global_facts|0": { | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"community|arabic_mmlu:high_school_biology|0": { | |
"acc_norm": 0.4967741935483871, | |
"acc_norm_stderr": 0.028443414226438316 | |
}, | |
"community|arabic_mmlu:high_school_chemistry|0": { | |
"acc_norm": 0.3694581280788177, | |
"acc_norm_stderr": 0.03395970381998575 | |
}, | |
"community|arabic_mmlu:high_school_computer_science|0": { | |
"acc_norm": 0.41, | |
"acc_norm_stderr": 0.04943110704237102 | |
}, | |
"community|arabic_mmlu:high_school_european_history|0": { | |
"acc_norm": 0.24242424242424243, | |
"acc_norm_stderr": 0.03346409881055953 | |
}, | |
"community|arabic_mmlu:high_school_geography|0": { | |
"acc_norm": 0.5858585858585859, | |
"acc_norm_stderr": 0.03509438348879629 | |
}, | |
"community|arabic_mmlu:high_school_government_and_politics|0": { | |
"acc_norm": 0.41968911917098445, | |
"acc_norm_stderr": 0.03561587327685884 | |
}, | |
"community|arabic_mmlu:high_school_macroeconomics|0": { | |
"acc_norm": 0.43846153846153846, | |
"acc_norm_stderr": 0.025158266016868575 | |
}, | |
"community|arabic_mmlu:high_school_mathematics|0": { | |
"acc_norm": 0.3148148148148148, | |
"acc_norm_stderr": 0.028317533496066475 | |
}, | |
"community|arabic_mmlu:high_school_microeconomics|0": { | |
"acc_norm": 0.4117647058823529, | |
"acc_norm_stderr": 0.031968769891957786 | |
}, | |
"community|arabic_mmlu:high_school_physics|0": { | |
"acc_norm": 0.2980132450331126, | |
"acc_norm_stderr": 0.037345356767871984 | |
}, | |
"community|arabic_mmlu:high_school_psychology|0": { | |
"acc_norm": 0.44220183486238535, | |
"acc_norm_stderr": 0.021293613207520212 | |
}, | |
"community|arabic_mmlu:high_school_statistics|0": { | |
"acc_norm": 0.36574074074074076, | |
"acc_norm_stderr": 0.03284738857647206 | |
}, | |
"community|arabic_mmlu:high_school_us_history|0": { | |
"acc_norm": 0.24019607843137256, | |
"acc_norm_stderr": 0.02998373305591362 | |
}, | |
"community|arabic_mmlu:high_school_world_history|0": { | |
"acc_norm": 0.3206751054852321, | |
"acc_norm_stderr": 0.0303819319499904 | |
}, | |
"community|arabic_mmlu:human_aging|0": { | |
"acc_norm": 0.45739910313901344, | |
"acc_norm_stderr": 0.033435777055830646 | |
}, | |
"community|arabic_mmlu:human_sexuality|0": { | |
"acc_norm": 0.45038167938931295, | |
"acc_norm_stderr": 0.04363643698524779 | |
}, | |
"community|arabic_mmlu:international_law|0": { | |
"acc_norm": 0.5454545454545454, | |
"acc_norm_stderr": 0.04545454545454546 | |
}, | |
"community|arabic_mmlu:jurisprudence|0": { | |
"acc_norm": 0.49074074074074076, | |
"acc_norm_stderr": 0.04832853553437055 | |
}, | |
"community|arabic_mmlu:logical_fallacies|0": { | |
"acc_norm": 0.44171779141104295, | |
"acc_norm_stderr": 0.03901591825836184 | |
}, | |
"community|arabic_mmlu:machine_learning|0": { | |
"acc_norm": 0.30357142857142855, | |
"acc_norm_stderr": 0.04364226155841044 | |
}, | |
"community|arabic_mmlu:management|0": { | |
"acc_norm": 0.5048543689320388, | |
"acc_norm_stderr": 0.04950504382128921 | |
}, | |
"community|arabic_mmlu:marketing|0": { | |
"acc_norm": 0.6196581196581197, | |
"acc_norm_stderr": 0.03180425204384099 | |
}, | |
"community|arabic_mmlu:medical_genetics|0": { | |
"acc_norm": 0.44, | |
"acc_norm_stderr": 0.04988876515698589 | |
}, | |
"community|arabic_mmlu:miscellaneous|0": { | |
"acc_norm": 0.4878671775223499, | |
"acc_norm_stderr": 0.017874698667491338 | |
}, | |
"community|arabic_mmlu:moral_disputes|0": { | |
"acc_norm": 0.40173410404624277, | |
"acc_norm_stderr": 0.02639410417764363 | |
}, | |
"community|arabic_mmlu:moral_scenarios|0": { | |
"acc_norm": 0.2446927374301676, | |
"acc_norm_stderr": 0.014378169884098445 | |
}, | |
"community|arabic_mmlu:nutrition|0": { | |
"acc_norm": 0.4934640522875817, | |
"acc_norm_stderr": 0.028627470550556047 | |
}, | |
"community|arabic_mmlu:philosophy|0": { | |
"acc_norm": 0.45016077170418006, | |
"acc_norm_stderr": 0.028256660723360173 | |
}, | |
"community|arabic_mmlu:prehistory|0": { | |
"acc_norm": 0.41975308641975306, | |
"acc_norm_stderr": 0.027460099557005135 | |
}, | |
"community|arabic_mmlu:professional_accounting|0": { | |
"acc_norm": 0.2907801418439716, | |
"acc_norm_stderr": 0.027090664368353178 | |
}, | |
"community|arabic_mmlu:professional_law|0": { | |
"acc_norm": 0.2953063885267275, | |
"acc_norm_stderr": 0.011651061936208828 | |
}, | |
"community|arabic_mmlu:professional_medicine|0": { | |
"acc_norm": 0.2757352941176471, | |
"acc_norm_stderr": 0.02714627193662517 | |
}, | |
"community|arabic_mmlu:professional_psychology|0": { | |
"acc_norm": 0.35130718954248363, | |
"acc_norm_stderr": 0.019312676065786544 | |
}, | |
"community|arabic_mmlu:public_relations|0": { | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.0469237132203465 | |
}, | |
"community|arabic_mmlu:security_studies|0": { | |
"acc_norm": 0.5102040816326531, | |
"acc_norm_stderr": 0.03200255347893783 | |
}, | |
"community|arabic_mmlu:sociology|0": { | |
"acc_norm": 0.5223880597014925, | |
"acc_norm_stderr": 0.035319879302087305 | |
}, | |
"community|arabic_mmlu:us_foreign_policy|0": { | |
"acc_norm": 0.59, | |
"acc_norm_stderr": 0.04943110704237101 | |
}, | |
"community|arabic_mmlu:virology|0": { | |
"acc_norm": 0.3614457831325301, | |
"acc_norm_stderr": 0.0374005938202932 | |
}, | |
"community|arabic_mmlu:world_religions|0": { | |
"acc_norm": 0.4444444444444444, | |
"acc_norm_stderr": 0.038110796698335316 | |
}, | |
"community|arc_challenge_okapi_ar|0": { | |
"acc_norm": 0.4043103448275862, | |
"acc_norm_stderr": 0.014415368138264006 | |
}, | |
"community|arc_easy_ar|0": { | |
"acc_norm": 0.40143824027072755, | |
"acc_norm_stderr": 0.010083980405603682 | |
}, | |
"community|boolq_ar|0": { | |
"acc_norm": 0.7380368098159509, | |
"acc_norm_stderr": 0.007702238572925226 | |
}, | |
"community|copa_ext_ar|0": { | |
"acc_norm": 0.5333333333333333, | |
"acc_norm_stderr": 0.05288198530254015 | |
}, | |
"community|hellaswag_okapi_ar|0": { | |
"acc_norm": 0.2718351324828263, | |
"acc_norm_stderr": 0.004646040436466082 | |
}, | |
"community|openbook_qa_ext_ar|0": { | |
"acc_norm": 0.43434343434343436, | |
"acc_norm_stderr": 0.022301268794635357 | |
}, | |
"community|piqa_ar|0": { | |
"acc_norm": 0.5897435897435898, | |
"acc_norm_stderr": 0.011492025146854856 | |
}, | |
"community|race_ar|0": { | |
"acc_norm": 0.39460336782308786, | |
"acc_norm_stderr": 0.0069624973758675756 | |
}, | |
"community|sciq_ar|0": { | |
"acc_norm": 0.5296482412060302, | |
"acc_norm_stderr": 0.015831131676997665 | |
}, | |
"community|toxigen_ar|0": { | |
"acc_norm": 0.4737967914438503, | |
"acc_norm_stderr": 0.01633801782359255 | |
}, | |
"lighteval|xstory_cloze:ar|0": { | |
"acc": 0.600264725347452, | |
"acc_stderr": 0.012605764077627148 | |
}, | |
"community|acva:_average|0": { | |
"acc_norm": 0.43152970899264886, | |
"acc_norm_stderr": 0.04622646304672667 | |
}, | |
"community|alghafa:_average|0": { | |
"acc_norm": 0.4888933039991989, | |
"acc_norm_stderr": 0.022199118949409874 | |
}, | |
"community|arabic_mmlu:_average|0": { | |
"acc_norm": 0.39816844607614504, | |
"acc_norm_stderr": 0.03608724961556656 | |
}, | |
"all": { | |
"acc_norm": 0.42481355357829986, | |
"acc_norm_stderr": 0.0379411949861567, | |
"acc": 0.600264725347452, | |
"acc_stderr": 0.012605764077627148 | |
} | |
}, | |
"versions": { | |
"community|acva:Algeria|0": 0, | |
"community|acva:Ancient_Egypt|0": 0, | |
"community|acva:Arab_Empire|0": 0, | |
"community|acva:Arabic_Architecture|0": 0, | |
"community|acva:Arabic_Art|0": 0, | |
"community|acva:Arabic_Astronomy|0": 0, | |
"community|acva:Arabic_Calligraphy|0": 0, | |
"community|acva:Arabic_Ceremony|0": 0, | |
"community|acva:Arabic_Clothing|0": 0, | |
"community|acva:Arabic_Culture|0": 0, | |
"community|acva:Arabic_Food|0": 0, | |
"community|acva:Arabic_Funeral|0": 0, | |
"community|acva:Arabic_Geography|0": 0, | |
"community|acva:Arabic_History|0": 0, | |
"community|acva:Arabic_Language_Origin|0": 0, | |
"community|acva:Arabic_Literature|0": 0, | |
"community|acva:Arabic_Math|0": 0, | |
"community|acva:Arabic_Medicine|0": 0, | |
"community|acva:Arabic_Music|0": 0, | |
"community|acva:Arabic_Ornament|0": 0, | |
"community|acva:Arabic_Philosophy|0": 0, | |
"community|acva:Arabic_Physics_and_Chemistry|0": 0, | |
"community|acva:Arabic_Wedding|0": 0, | |
"community|acva:Bahrain|0": 0, | |
"community|acva:Comoros|0": 0, | |
"community|acva:Egypt_modern|0": 0, | |
"community|acva:InfluenceFromAncientEgypt|0": 0, | |
"community|acva:InfluenceFromByzantium|0": 0, | |
"community|acva:InfluenceFromChina|0": 0, | |
"community|acva:InfluenceFromGreece|0": 0, | |
"community|acva:InfluenceFromIslam|0": 0, | |
"community|acva:InfluenceFromPersia|0": 0, | |
"community|acva:InfluenceFromRome|0": 0, | |
"community|acva:Iraq|0": 0, | |
"community|acva:Islam_Education|0": 0, | |
"community|acva:Islam_branches_and_schools|0": 0, | |
"community|acva:Islamic_law_system|0": 0, | |
"community|acva:Jordan|0": 0, | |
"community|acva:Kuwait|0": 0, | |
"community|acva:Lebanon|0": 0, | |
"community|acva:Libya|0": 0, | |
"community|acva:Mauritania|0": 0, | |
"community|acva:Mesopotamia_civilization|0": 0, | |
"community|acva:Morocco|0": 0, | |
"community|acva:Oman|0": 0, | |
"community|acva:Palestine|0": 0, | |
"community|acva:Qatar|0": 0, | |
"community|acva:Saudi_Arabia|0": 0, | |
"community|acva:Somalia|0": 0, | |
"community|acva:Sudan|0": 0, | |
"community|acva:Syria|0": 0, | |
"community|acva:Tunisia|0": 0, | |
"community|acva:United_Arab_Emirates|0": 0, | |
"community|acva:Yemen|0": 0, | |
"community|acva:communication|0": 0, | |
"community|acva:computer_and_phone|0": 0, | |
"community|acva:daily_life|0": 0, | |
"community|acva:entertainment|0": 0, | |
"community|alghafa:mcq_exams_test_ar|0": 0, | |
"community|alghafa:meta_ar_dialects|0": 0, | |
"community|alghafa:meta_ar_msa|0": 0, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0, | |
"community|alghafa:multiple_choice_sentiment_task|0": 0, | |
"community|arabic_exams|0": 0, | |
"community|arabic_mmlu:abstract_algebra|0": 0, | |
"community|arabic_mmlu:anatomy|0": 0, | |
"community|arabic_mmlu:astronomy|0": 0, | |
"community|arabic_mmlu:business_ethics|0": 0, | |
"community|arabic_mmlu:clinical_knowledge|0": 0, | |
"community|arabic_mmlu:college_biology|0": 0, | |
"community|arabic_mmlu:college_chemistry|0": 0, | |
"community|arabic_mmlu:college_computer_science|0": 0, | |
"community|arabic_mmlu:college_mathematics|0": 0, | |
"community|arabic_mmlu:college_medicine|0": 0, | |
"community|arabic_mmlu:college_physics|0": 0, | |
"community|arabic_mmlu:computer_security|0": 0, | |
"community|arabic_mmlu:conceptual_physics|0": 0, | |
"community|arabic_mmlu:econometrics|0": 0, | |
"community|arabic_mmlu:electrical_engineering|0": 0, | |
"community|arabic_mmlu:elementary_mathematics|0": 0, | |
"community|arabic_mmlu:formal_logic|0": 0, | |
"community|arabic_mmlu:global_facts|0": 0, | |
"community|arabic_mmlu:high_school_biology|0": 0, | |
"community|arabic_mmlu:high_school_chemistry|0": 0, | |
"community|arabic_mmlu:high_school_computer_science|0": 0, | |
"community|arabic_mmlu:high_school_european_history|0": 0, | |
"community|arabic_mmlu:high_school_geography|0": 0, | |
"community|arabic_mmlu:high_school_government_and_politics|0": 0, | |
"community|arabic_mmlu:high_school_macroeconomics|0": 0, | |
"community|arabic_mmlu:high_school_mathematics|0": 0, | |
"community|arabic_mmlu:high_school_microeconomics|0": 0, | |
"community|arabic_mmlu:high_school_physics|0": 0, | |
"community|arabic_mmlu:high_school_psychology|0": 0, | |
"community|arabic_mmlu:high_school_statistics|0": 0, | |
"community|arabic_mmlu:high_school_us_history|0": 0, | |
"community|arabic_mmlu:high_school_world_history|0": 0, | |
"community|arabic_mmlu:human_aging|0": 0, | |
"community|arabic_mmlu:human_sexuality|0": 0, | |
"community|arabic_mmlu:international_law|0": 0, | |
"community|arabic_mmlu:jurisprudence|0": 0, | |
"community|arabic_mmlu:logical_fallacies|0": 0, | |
"community|arabic_mmlu:machine_learning|0": 0, | |
"community|arabic_mmlu:management|0": 0, | |
"community|arabic_mmlu:marketing|0": 0, | |
"community|arabic_mmlu:medical_genetics|0": 0, | |
"community|arabic_mmlu:miscellaneous|0": 0, | |
"community|arabic_mmlu:moral_disputes|0": 0, | |
"community|arabic_mmlu:moral_scenarios|0": 0, | |
"community|arabic_mmlu:nutrition|0": 0, | |
"community|arabic_mmlu:philosophy|0": 0, | |
"community|arabic_mmlu:prehistory|0": 0, | |
"community|arabic_mmlu:professional_accounting|0": 0, | |
"community|arabic_mmlu:professional_law|0": 0, | |
"community|arabic_mmlu:professional_medicine|0": 0, | |
"community|arabic_mmlu:professional_psychology|0": 0, | |
"community|arabic_mmlu:public_relations|0": 0, | |
"community|arabic_mmlu:security_studies|0": 0, | |
"community|arabic_mmlu:sociology|0": 0, | |
"community|arabic_mmlu:us_foreign_policy|0": 0, | |
"community|arabic_mmlu:virology|0": 0, | |
"community|arabic_mmlu:world_religions|0": 0, | |
"community|arc_challenge_okapi_ar|0": 0, | |
"community|arc_easy_ar|0": 0, | |
"community|boolq_ar|0": 0, | |
"community|copa_ext_ar|0": 0, | |
"community|hellaswag_okapi_ar|0": 0, | |
"community|openbook_qa_ext_ar|0": 0, | |
"community|piqa_ar|0": 0, | |
"community|race_ar|0": 0, | |
"community|sciq_ar|0": 0, | |
"community|toxigen_ar|0": 0, | |
"lighteval|xstory_cloze:ar|0": 0 | |
}, | |
"config_tasks": { | |
"community|acva:Algeria": { | |
"name": "acva:Algeria", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Algeria", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Ancient_Egypt": { | |
"name": "acva:Ancient_Egypt", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Ancient_Egypt", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 315, | |
"effective_num_docs": 315, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arab_Empire": { | |
"name": "acva:Arab_Empire", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arab_Empire", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 265, | |
"effective_num_docs": 265, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Architecture": { | |
"name": "acva:Arabic_Architecture", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Architecture", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Art": { | |
"name": "acva:Arabic_Art", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Art", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Astronomy": { | |
"name": "acva:Arabic_Astronomy", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Astronomy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Calligraphy": { | |
"name": "acva:Arabic_Calligraphy", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Calligraphy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 255, | |
"effective_num_docs": 255, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Ceremony": { | |
"name": "acva:Arabic_Ceremony", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Ceremony", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 185, | |
"effective_num_docs": 185, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Clothing": { | |
"name": "acva:Arabic_Clothing", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Clothing", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Culture": { | |
"name": "acva:Arabic_Culture", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Culture", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Food": { | |
"name": "acva:Arabic_Food", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Food", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Funeral": { | |
"name": "acva:Arabic_Funeral", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Funeral", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 95, | |
"effective_num_docs": 95, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Geography": { | |
"name": "acva:Arabic_Geography", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Geography", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_History": { | |
"name": "acva:Arabic_History", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_History", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Language_Origin": { | |
"name": "acva:Arabic_Language_Origin", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Language_Origin", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 95, | |
"effective_num_docs": 95, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Literature": { | |
"name": "acva:Arabic_Literature", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Literature", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Math": { | |
"name": "acva:Arabic_Math", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Math", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Medicine": { | |
"name": "acva:Arabic_Medicine", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Medicine", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Music": { | |
"name": "acva:Arabic_Music", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Music", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 139, | |
"effective_num_docs": 139, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Ornament": { | |
"name": "acva:Arabic_Ornament", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Ornament", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Philosophy": { | |
"name": "acva:Arabic_Philosophy", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Philosophy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Physics_and_Chemistry": { | |
"name": "acva:Arabic_Physics_and_Chemistry", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Physics_and_Chemistry", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Arabic_Wedding": { | |
"name": "acva:Arabic_Wedding", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Arabic_Wedding", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Bahrain": { | |
"name": "acva:Bahrain", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Bahrain", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Comoros": { | |
"name": "acva:Comoros", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Comoros", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Egypt_modern": { | |
"name": "acva:Egypt_modern", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Egypt_modern", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 95, | |
"effective_num_docs": 95, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromAncientEgypt": { | |
"name": "acva:InfluenceFromAncientEgypt", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromAncientEgypt", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromByzantium": { | |
"name": "acva:InfluenceFromByzantium", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromByzantium", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromChina": { | |
"name": "acva:InfluenceFromChina", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromChina", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromGreece": { | |
"name": "acva:InfluenceFromGreece", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromGreece", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromIslam": { | |
"name": "acva:InfluenceFromIslam", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromIslam", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromPersia": { | |
"name": "acva:InfluenceFromPersia", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromPersia", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 175, | |
"effective_num_docs": 175, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:InfluenceFromRome": { | |
"name": "acva:InfluenceFromRome", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "InfluenceFromRome", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Iraq": { | |
"name": "acva:Iraq", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Iraq", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 85, | |
"effective_num_docs": 85, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Islam_Education": { | |
"name": "acva:Islam_Education", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Islam_Education", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Islam_branches_and_schools": { | |
"name": "acva:Islam_branches_and_schools", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Islam_branches_and_schools", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 175, | |
"effective_num_docs": 175, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Islamic_law_system": { | |
"name": "acva:Islamic_law_system", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Islamic_law_system", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Jordan": { | |
"name": "acva:Jordan", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Jordan", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Kuwait": { | |
"name": "acva:Kuwait", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Kuwait", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Lebanon": { | |
"name": "acva:Lebanon", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Lebanon", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Libya": { | |
"name": "acva:Libya", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Libya", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Mauritania": { | |
"name": "acva:Mauritania", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Mauritania", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Mesopotamia_civilization": { | |
"name": "acva:Mesopotamia_civilization", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Mesopotamia_civilization", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 155, | |
"effective_num_docs": 155, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Morocco": { | |
"name": "acva:Morocco", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Morocco", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Oman": { | |
"name": "acva:Oman", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Oman", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Palestine": { | |
"name": "acva:Palestine", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Palestine", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 85, | |
"effective_num_docs": 85, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Qatar": { | |
"name": "acva:Qatar", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Qatar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Saudi_Arabia": { | |
"name": "acva:Saudi_Arabia", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Saudi_Arabia", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 195, | |
"effective_num_docs": 195, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Somalia": { | |
"name": "acva:Somalia", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Somalia", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Sudan": { | |
"name": "acva:Sudan", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Sudan", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Syria": { | |
"name": "acva:Syria", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Syria", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Tunisia": { | |
"name": "acva:Tunisia", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Tunisia", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 45, | |
"effective_num_docs": 45, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:United_Arab_Emirates": { | |
"name": "acva:United_Arab_Emirates", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "United_Arab_Emirates", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 85, | |
"effective_num_docs": 85, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:Yemen": { | |
"name": "acva:Yemen", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "Yemen", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 10, | |
"effective_num_docs": 10, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:communication": { | |
"name": "acva:communication", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "communication", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 364, | |
"effective_num_docs": 364, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:computer_and_phone": { | |
"name": "acva:computer_and_phone", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "computer_and_phone", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 295, | |
"effective_num_docs": 295, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:daily_life": { | |
"name": "acva:daily_life", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "daily_life", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 337, | |
"effective_num_docs": 337, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|acva:entertainment": { | |
"name": "acva:entertainment", | |
"prompt_function": "acva", | |
"hf_repo": "OALL/ACVA", | |
"hf_subset": "entertainment", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 295, | |
"effective_num_docs": 295, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:mcq_exams_test_ar": { | |
"name": "alghafa:mcq_exams_test_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "mcq_exams_test_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 557, | |
"effective_num_docs": 557, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:meta_ar_dialects": { | |
"name": "alghafa:meta_ar_dialects", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "meta_ar_dialects", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 5395, | |
"effective_num_docs": 5395, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:meta_ar_msa": { | |
"name": "alghafa:meta_ar_msa", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "meta_ar_msa", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 895, | |
"effective_num_docs": 895, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": { | |
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_facts_truefalse_balanced_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 75, | |
"effective_num_docs": 75, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task": { | |
"name": "alghafa:multiple_choice_grounded_statement_soqal_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_grounded_statement_soqal_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 150, | |
"effective_num_docs": 150, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { | |
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 150, | |
"effective_num_docs": 150, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { | |
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 7995, | |
"effective_num_docs": 7995, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task": { | |
"name": "alghafa:multiple_choice_rating_sentiment_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_rating_sentiment_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 5995, | |
"effective_num_docs": 5995, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task": { | |
"name": "alghafa:multiple_choice_sentiment_task", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_sentiment_task", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1720, | |
"effective_num_docs": 1720, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_exams": { | |
"name": "arabic_exams", | |
"prompt_function": "arabic_exams", | |
"hf_repo": "OALL/Arabic_EXAMS", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 537, | |
"effective_num_docs": 537, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:abstract_algebra": { | |
"name": "arabic_mmlu:abstract_algebra", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "abstract_algebra", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:anatomy": { | |
"name": "arabic_mmlu:anatomy", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "anatomy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 135, | |
"effective_num_docs": 135, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:astronomy": { | |
"name": "arabic_mmlu:astronomy", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "astronomy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 152, | |
"effective_num_docs": 152, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:business_ethics": { | |
"name": "arabic_mmlu:business_ethics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "business_ethics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:clinical_knowledge": { | |
"name": "arabic_mmlu:clinical_knowledge", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "clinical_knowledge", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 265, | |
"effective_num_docs": 265, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_biology": { | |
"name": "arabic_mmlu:college_biology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_biology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 144, | |
"effective_num_docs": 144, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_chemistry": { | |
"name": "arabic_mmlu:college_chemistry", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_chemistry", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_computer_science": { | |
"name": "arabic_mmlu:college_computer_science", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_computer_science", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_mathematics": { | |
"name": "arabic_mmlu:college_mathematics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_mathematics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_medicine": { | |
"name": "arabic_mmlu:college_medicine", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_medicine", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 173, | |
"effective_num_docs": 173, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:college_physics": { | |
"name": "arabic_mmlu:college_physics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "college_physics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 102, | |
"effective_num_docs": 102, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:computer_security": { | |
"name": "arabic_mmlu:computer_security", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "computer_security", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:conceptual_physics": { | |
"name": "arabic_mmlu:conceptual_physics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "conceptual_physics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 235, | |
"effective_num_docs": 235, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:econometrics": { | |
"name": "arabic_mmlu:econometrics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "econometrics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 114, | |
"effective_num_docs": 114, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:electrical_engineering": { | |
"name": "arabic_mmlu:electrical_engineering", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "electrical_engineering", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:elementary_mathematics": { | |
"name": "arabic_mmlu:elementary_mathematics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "elementary_mathematics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 378, | |
"effective_num_docs": 378, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:formal_logic": { | |
"name": "arabic_mmlu:formal_logic", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "formal_logic", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 126, | |
"effective_num_docs": 126, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:global_facts": { | |
"name": "arabic_mmlu:global_facts", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "global_facts", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_biology": { | |
"name": "arabic_mmlu:high_school_biology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_biology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 310, | |
"effective_num_docs": 310, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_chemistry": { | |
"name": "arabic_mmlu:high_school_chemistry", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_chemistry", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 203, | |
"effective_num_docs": 203, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_computer_science": { | |
"name": "arabic_mmlu:high_school_computer_science", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_computer_science", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_european_history": { | |
"name": "arabic_mmlu:high_school_european_history", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_european_history", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 165, | |
"effective_num_docs": 165, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_geography": { | |
"name": "arabic_mmlu:high_school_geography", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_geography", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 198, | |
"effective_num_docs": 198, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_government_and_politics": { | |
"name": "arabic_mmlu:high_school_government_and_politics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_government_and_politics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 193, | |
"effective_num_docs": 193, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_macroeconomics": { | |
"name": "arabic_mmlu:high_school_macroeconomics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_macroeconomics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 390, | |
"effective_num_docs": 390, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_mathematics": { | |
"name": "arabic_mmlu:high_school_mathematics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_mathematics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 270, | |
"effective_num_docs": 270, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_microeconomics": { | |
"name": "arabic_mmlu:high_school_microeconomics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_microeconomics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 238, | |
"effective_num_docs": 238, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_physics": { | |
"name": "arabic_mmlu:high_school_physics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_physics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 151, | |
"effective_num_docs": 151, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_psychology": { | |
"name": "arabic_mmlu:high_school_psychology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_psychology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 545, | |
"effective_num_docs": 545, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_statistics": { | |
"name": "arabic_mmlu:high_school_statistics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_statistics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 216, | |
"effective_num_docs": 216, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_us_history": { | |
"name": "arabic_mmlu:high_school_us_history", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_us_history", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 204, | |
"effective_num_docs": 204, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:high_school_world_history": { | |
"name": "arabic_mmlu:high_school_world_history", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "high_school_world_history", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 237, | |
"effective_num_docs": 237, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:human_aging": { | |
"name": "arabic_mmlu:human_aging", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "human_aging", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 223, | |
"effective_num_docs": 223, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:human_sexuality": { | |
"name": "arabic_mmlu:human_sexuality", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "human_sexuality", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 131, | |
"effective_num_docs": 131, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:international_law": { | |
"name": "arabic_mmlu:international_law", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "international_law", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 121, | |
"effective_num_docs": 121, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:jurisprudence": { | |
"name": "arabic_mmlu:jurisprudence", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "jurisprudence", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 108, | |
"effective_num_docs": 108, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:logical_fallacies": { | |
"name": "arabic_mmlu:logical_fallacies", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "logical_fallacies", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 163, | |
"effective_num_docs": 163, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:machine_learning": { | |
"name": "arabic_mmlu:machine_learning", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "machine_learning", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 112, | |
"effective_num_docs": 112, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:management": { | |
"name": "arabic_mmlu:management", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "management", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 103, | |
"effective_num_docs": 103, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:marketing": { | |
"name": "arabic_mmlu:marketing", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "marketing", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 234, | |
"effective_num_docs": 234, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:medical_genetics": { | |
"name": "arabic_mmlu:medical_genetics", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "medical_genetics", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:miscellaneous": { | |
"name": "arabic_mmlu:miscellaneous", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "miscellaneous", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 783, | |
"effective_num_docs": 783, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:moral_disputes": { | |
"name": "arabic_mmlu:moral_disputes", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "moral_disputes", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 346, | |
"effective_num_docs": 346, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:moral_scenarios": { | |
"name": "arabic_mmlu:moral_scenarios", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "moral_scenarios", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 895, | |
"effective_num_docs": 895, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:nutrition": { | |
"name": "arabic_mmlu:nutrition", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "nutrition", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 306, | |
"effective_num_docs": 306, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:philosophy": { | |
"name": "arabic_mmlu:philosophy", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "philosophy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 311, | |
"effective_num_docs": 311, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:prehistory": { | |
"name": "arabic_mmlu:prehistory", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "prehistory", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 324, | |
"effective_num_docs": 324, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:professional_accounting": { | |
"name": "arabic_mmlu:professional_accounting", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "professional_accounting", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 282, | |
"effective_num_docs": 282, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:professional_law": { | |
"name": "arabic_mmlu:professional_law", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "professional_law", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1534, | |
"effective_num_docs": 1534, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:professional_medicine": { | |
"name": "arabic_mmlu:professional_medicine", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "professional_medicine", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 272, | |
"effective_num_docs": 272, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:professional_psychology": { | |
"name": "arabic_mmlu:professional_psychology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "professional_psychology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 612, | |
"effective_num_docs": 612, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:public_relations": { | |
"name": "arabic_mmlu:public_relations", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "public_relations", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 110, | |
"effective_num_docs": 110, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:security_studies": { | |
"name": "arabic_mmlu:security_studies", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "security_studies", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 245, | |
"effective_num_docs": 245, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:sociology": { | |
"name": "arabic_mmlu:sociology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "sociology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 201, | |
"effective_num_docs": 201, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:us_foreign_policy": { | |
"name": "arabic_mmlu:us_foreign_policy", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "us_foreign_policy", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:virology": { | |
"name": "arabic_mmlu:virology", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "virology", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 166, | |
"effective_num_docs": 166, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:world_religions": { | |
"name": "arabic_mmlu:world_religions", | |
"prompt_function": "mmlu_arabic", | |
"hf_repo": "OALL/Arabic_MMLU", | |
"hf_subset": "world_religions", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 171, | |
"effective_num_docs": 171, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arc_challenge_okapi_ar": { | |
"name": "arc_challenge_okapi_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "arc_challenge_okapi_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1160, | |
"effective_num_docs": 1160, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|arc_easy_ar": { | |
"name": "arc_easy_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "arc_easy_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 2364, | |
"effective_num_docs": 2364, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|boolq_ar": { | |
"name": "boolq_ar", | |
"prompt_function": "boolq_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "boolq_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 3260, | |
"effective_num_docs": 3260, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|copa_ext_ar": { | |
"name": "copa_ext_ar", | |
"prompt_function": "copa_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "copa_ext_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 90, | |
"effective_num_docs": 90, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|hellaswag_okapi_ar": { | |
"name": "hellaswag_okapi_ar", | |
"prompt_function": "hellaswag_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "hellaswag_okapi_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 9171, | |
"effective_num_docs": 9171, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|openbook_qa_ext_ar": { | |
"name": "openbook_qa_ext_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "openbook_qa_ext_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 495, | |
"effective_num_docs": 495, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|piqa_ar": { | |
"name": "piqa_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "piqa_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1833, | |
"effective_num_docs": 1833, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|race_ar": { | |
"name": "race_ar", | |
"prompt_function": "alghafa_prompt", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "race_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 4929, | |
"effective_num_docs": 4929, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|sciq_ar": { | |
"name": "sciq_ar", | |
"prompt_function": "sciq_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "sciq_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 995, | |
"effective_num_docs": 995, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"community|toxigen_ar": { | |
"name": "toxigen_ar", | |
"prompt_function": "toxigen_prompt_arabic", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", | |
"hf_subset": "toxigen_ar", | |
"metric": [ | |
"loglikelihood_acc_norm" | |
], | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"stop_sequence": null, | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 935, | |
"effective_num_docs": 935, | |
"trust_dataset": null, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
}, | |
"lighteval|xstory_cloze:ar": { | |
"name": "xstory_cloze:ar", | |
"prompt_function": "storycloze", | |
"hf_repo": "juletxara/xstory_cloze", | |
"hf_subset": "ar", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"training", | |
"eval" | |
], | |
"evaluation_splits": [ | |
"eval" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"num_samples": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 1511, | |
"effective_num_docs": 1511, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null, | |
"version": 0 | |
} | |
}, | |
"summary_tasks": { | |
"community|acva:Algeria|0": { | |
"hashes": { | |
"hash_examples": "da5a3003cd46f6f9", | |
"hash_full_prompts": "da5a3003cd46f6f9", | |
"hash_input_tokens": "cf34e1b6d37b7c0e", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Ancient_Egypt|0": { | |
"hashes": { | |
"hash_examples": "52d6f767fede195b", | |
"hash_full_prompts": "52d6f767fede195b", | |
"hash_input_tokens": "d8226de9658f889a", | |
"hash_cont_tokens": "c1e2b54cf8250f31" | |
}, | |
"truncated": 0, | |
"non_truncated": 315, | |
"padded": 630, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arab_Empire|0": { | |
"hashes": { | |
"hash_examples": "8dacff6a79804a75", | |
"hash_full_prompts": "8dacff6a79804a75", | |
"hash_input_tokens": "7ce1513245465395", | |
"hash_cont_tokens": "a57d793a5ea04c42" | |
}, | |
"truncated": 0, | |
"non_truncated": 265, | |
"padded": 530, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Architecture|0": { | |
"hashes": { | |
"hash_examples": "df286cd862d9f6bb", | |
"hash_full_prompts": "df286cd862d9f6bb", | |
"hash_input_tokens": "d6adf8f407240a2d", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Art|0": { | |
"hashes": { | |
"hash_examples": "112883d764118a49", | |
"hash_full_prompts": "112883d764118a49", | |
"hash_input_tokens": "46002e32a931f1e6", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Astronomy|0": { | |
"hashes": { | |
"hash_examples": "20dcdf2454bf8671", | |
"hash_full_prompts": "20dcdf2454bf8671", | |
"hash_input_tokens": "71caf6a354be7be7", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Calligraphy|0": { | |
"hashes": { | |
"hash_examples": "3a9f9d1ebe868a15", | |
"hash_full_prompts": "3a9f9d1ebe868a15", | |
"hash_input_tokens": "9ff6f9e6aaa6c4f2", | |
"hash_cont_tokens": "b6820eb4f3eef8a0" | |
}, | |
"truncated": 0, | |
"non_truncated": 255, | |
"padded": 510, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Ceremony|0": { | |
"hashes": { | |
"hash_examples": "c927630f8d2f44da", | |
"hash_full_prompts": "c927630f8d2f44da", | |
"hash_input_tokens": "306a3a67f2704ea7", | |
"hash_cont_tokens": "4bdb062f9ac7e83c" | |
}, | |
"truncated": 0, | |
"non_truncated": 185, | |
"padded": 370, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Clothing|0": { | |
"hashes": { | |
"hash_examples": "6ad0740c2ac6ac92", | |
"hash_full_prompts": "6ad0740c2ac6ac92", | |
"hash_input_tokens": "5c79224709257fd5", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Culture|0": { | |
"hashes": { | |
"hash_examples": "2177bd857ad872ae", | |
"hash_full_prompts": "2177bd857ad872ae", | |
"hash_input_tokens": "d60877153b87530a", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Food|0": { | |
"hashes": { | |
"hash_examples": "a6ada65b71d7c9c5", | |
"hash_full_prompts": "a6ada65b71d7c9c5", | |
"hash_input_tokens": "a42577fd5569bbb0", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Funeral|0": { | |
"hashes": { | |
"hash_examples": "fcee39dc29eaae91", | |
"hash_full_prompts": "fcee39dc29eaae91", | |
"hash_input_tokens": "4609e744beefc6f0", | |
"hash_cont_tokens": "d00f5e9bb7608898" | |
}, | |
"truncated": 0, | |
"non_truncated": 95, | |
"padded": 190, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Geography|0": { | |
"hashes": { | |
"hash_examples": "d36eda7c89231c02", | |
"hash_full_prompts": "d36eda7c89231c02", | |
"hash_input_tokens": "506a4e62243003fe", | |
"hash_cont_tokens": "fe3a24e435a5cdd7" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_History|0": { | |
"hashes": { | |
"hash_examples": "6354ac0d6db6a5fc", | |
"hash_full_prompts": "6354ac0d6db6a5fc", | |
"hash_input_tokens": "baf62d077286c23d", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Language_Origin|0": { | |
"hashes": { | |
"hash_examples": "ddc967c8aca34402", | |
"hash_full_prompts": "ddc967c8aca34402", | |
"hash_input_tokens": "d09aa2d7d0c65002", | |
"hash_cont_tokens": "d00f5e9bb7608898" | |
}, | |
"truncated": 0, | |
"non_truncated": 95, | |
"padded": 190, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Literature|0": { | |
"hashes": { | |
"hash_examples": "4305379fd46be5d8", | |
"hash_full_prompts": "4305379fd46be5d8", | |
"hash_input_tokens": "9bb5c06aaa887b70", | |
"hash_cont_tokens": "fe3a24e435a5cdd7" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Math|0": { | |
"hashes": { | |
"hash_examples": "dec621144f4d28be", | |
"hash_full_prompts": "dec621144f4d28be", | |
"hash_input_tokens": "a7e1347e7e1feecb", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Medicine|0": { | |
"hashes": { | |
"hash_examples": "2b344cdae9495ff2", | |
"hash_full_prompts": "2b344cdae9495ff2", | |
"hash_input_tokens": "49d2a1c07a81154f", | |
"hash_cont_tokens": "fe3a24e435a5cdd7" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Music|0": { | |
"hashes": { | |
"hash_examples": "0c54624d881944ce", | |
"hash_full_prompts": "0c54624d881944ce", | |
"hash_input_tokens": "6c1aebc27f16011d", | |
"hash_cont_tokens": "4b866375ab9b5507" | |
}, | |
"truncated": 0, | |
"non_truncated": 139, | |
"padded": 278, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Ornament|0": { | |
"hashes": { | |
"hash_examples": "251a4a84289d8bc1", | |
"hash_full_prompts": "251a4a84289d8bc1", | |
"hash_input_tokens": "513ee5fb0138ddb8", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Philosophy|0": { | |
"hashes": { | |
"hash_examples": "3f86fb9c94c13d22", | |
"hash_full_prompts": "3f86fb9c94c13d22", | |
"hash_input_tokens": "587bb56d058150aa", | |
"hash_cont_tokens": "fe3a24e435a5cdd7" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Physics_and_Chemistry|0": { | |
"hashes": { | |
"hash_examples": "8fec65af3695b62a", | |
"hash_full_prompts": "8fec65af3695b62a", | |
"hash_input_tokens": "7850774cb7eab47a", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Arabic_Wedding|0": { | |
"hashes": { | |
"hash_examples": "9cc3477184d7a4b8", | |
"hash_full_prompts": "9cc3477184d7a4b8", | |
"hash_input_tokens": "dedb00957f40e337", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Bahrain|0": { | |
"hashes": { | |
"hash_examples": "c92e803a0fa8b9e2", | |
"hash_full_prompts": "c92e803a0fa8b9e2", | |
"hash_input_tokens": "1bf9e92b0e76edcd", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Comoros|0": { | |
"hashes": { | |
"hash_examples": "06e5d4bba8e54cae", | |
"hash_full_prompts": "06e5d4bba8e54cae", | |
"hash_input_tokens": "20458e111686a065", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Egypt_modern|0": { | |
"hashes": { | |
"hash_examples": "c6ec369164f93446", | |
"hash_full_prompts": "c6ec369164f93446", | |
"hash_input_tokens": "73c27ab927da9bfa", | |
"hash_cont_tokens": "d00f5e9bb7608898" | |
}, | |
"truncated": 0, | |
"non_truncated": 95, | |
"padded": 190, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromAncientEgypt|0": { | |
"hashes": { | |
"hash_examples": "b9d56d74818b9bd4", | |
"hash_full_prompts": "b9d56d74818b9bd4", | |
"hash_input_tokens": "07698ab12d5143e6", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromByzantium|0": { | |
"hashes": { | |
"hash_examples": "5316c9624e7e59b8", | |
"hash_full_prompts": "5316c9624e7e59b8", | |
"hash_input_tokens": "199b2df634561c56", | |
"hash_cont_tokens": "fe3a24e435a5cdd7" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromChina|0": { | |
"hashes": { | |
"hash_examples": "87894bce95a56411", | |
"hash_full_prompts": "87894bce95a56411", | |
"hash_input_tokens": "662f80be3f86cee4", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromGreece|0": { | |
"hashes": { | |
"hash_examples": "0baa78a27e469312", | |
"hash_full_prompts": "0baa78a27e469312", | |
"hash_input_tokens": "6ca07feafef87986", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromIslam|0": { | |
"hashes": { | |
"hash_examples": "0c2532cde6541ff2", | |
"hash_full_prompts": "0c2532cde6541ff2", | |
"hash_input_tokens": "9f74df34a85822cf", | |
"hash_cont_tokens": "fe3a24e435a5cdd7" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 290, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromPersia|0": { | |
"hashes": { | |
"hash_examples": "efcd8112dc53c6e5", | |
"hash_full_prompts": "efcd8112dc53c6e5", | |
"hash_input_tokens": "aa5a19dba48d992e", | |
"hash_cont_tokens": "919736d4992ad983" | |
}, | |
"truncated": 0, | |
"non_truncated": 175, | |
"padded": 350, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:InfluenceFromRome|0": { | |
"hashes": { | |
"hash_examples": "9db61480e2e85fd3", | |
"hash_full_prompts": "9db61480e2e85fd3", | |
"hash_input_tokens": "d643945eeb920f37", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Iraq|0": { | |
"hashes": { | |
"hash_examples": "96dac3dfa8d2f41f", | |
"hash_full_prompts": "96dac3dfa8d2f41f", | |
"hash_input_tokens": "975f313b5f3f2bcd", | |
"hash_cont_tokens": "13c8aae5240b62db" | |
}, | |
"truncated": 0, | |
"non_truncated": 85, | |
"padded": 170, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Islam_Education|0": { | |
"hashes": { | |
"hash_examples": "0d80355f6a4cb51b", | |
"hash_full_prompts": "0d80355f6a4cb51b", | |
"hash_input_tokens": "19c9483d2c247172", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Islam_branches_and_schools|0": { | |
"hashes": { | |
"hash_examples": "5cedce1be2c3ad50", | |
"hash_full_prompts": "5cedce1be2c3ad50", | |
"hash_input_tokens": "3bc5dc3898302a2b", | |
"hash_cont_tokens": "919736d4992ad983" | |
}, | |
"truncated": 0, | |
"non_truncated": 175, | |
"padded": 350, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Islamic_law_system|0": { | |
"hashes": { | |
"hash_examples": "c0e6db8bc84e105e", | |
"hash_full_prompts": "c0e6db8bc84e105e", | |
"hash_input_tokens": "df3a0d6bb325ab10", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Jordan|0": { | |
"hashes": { | |
"hash_examples": "33deb5b4e5ddd6a1", | |
"hash_full_prompts": "33deb5b4e5ddd6a1", | |
"hash_input_tokens": "d2a44d7cbaba13ba", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Kuwait|0": { | |
"hashes": { | |
"hash_examples": "eb41773346d7c46c", | |
"hash_full_prompts": "eb41773346d7c46c", | |
"hash_input_tokens": "f63e71ac4468c114", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Lebanon|0": { | |
"hashes": { | |
"hash_examples": "25932dbf4c13d34f", | |
"hash_full_prompts": "25932dbf4c13d34f", | |
"hash_input_tokens": "fd9adf7827ad6168", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Libya|0": { | |
"hashes": { | |
"hash_examples": "f2c4db63cd402926", | |
"hash_full_prompts": "f2c4db63cd402926", | |
"hash_input_tokens": "362d4d5e8b251e05", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Mauritania|0": { | |
"hashes": { | |
"hash_examples": "8723ab5fdf286b54", | |
"hash_full_prompts": "8723ab5fdf286b54", | |
"hash_input_tokens": "e585e3661ee56928", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Mesopotamia_civilization|0": { | |
"hashes": { | |
"hash_examples": "c33f5502a6130ca9", | |
"hash_full_prompts": "c33f5502a6130ca9", | |
"hash_input_tokens": "22c84aa8bd9e0e29", | |
"hash_cont_tokens": "e00b82159a687ad7" | |
}, | |
"truncated": 0, | |
"non_truncated": 155, | |
"padded": 310, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Morocco|0": { | |
"hashes": { | |
"hash_examples": "588a5ed27904b1ae", | |
"hash_full_prompts": "588a5ed27904b1ae", | |
"hash_input_tokens": "2f8ee6e76e091c82", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Oman|0": { | |
"hashes": { | |
"hash_examples": "d447c52b94248b69", | |
"hash_full_prompts": "d447c52b94248b69", | |
"hash_input_tokens": "99f4fa805c8b00fc", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Palestine|0": { | |
"hashes": { | |
"hash_examples": "19197e076ad14ff5", | |
"hash_full_prompts": "19197e076ad14ff5", | |
"hash_input_tokens": "d4783451f2e67a9c", | |
"hash_cont_tokens": "13c8aae5240b62db" | |
}, | |
"truncated": 0, | |
"non_truncated": 85, | |
"padded": 170, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Qatar|0": { | |
"hashes": { | |
"hash_examples": "cf0736fa185b28f6", | |
"hash_full_prompts": "cf0736fa185b28f6", | |
"hash_input_tokens": "154be05ae57ec630", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Saudi_Arabia|0": { | |
"hashes": { | |
"hash_examples": "69beda6e1b85a08d", | |
"hash_full_prompts": "69beda6e1b85a08d", | |
"hash_input_tokens": "f7510b0e427465bc", | |
"hash_cont_tokens": "56dfe27ee01362a4" | |
}, | |
"truncated": 0, | |
"non_truncated": 195, | |
"padded": 390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Somalia|0": { | |
"hashes": { | |
"hash_examples": "b387940c65784fbf", | |
"hash_full_prompts": "b387940c65784fbf", | |
"hash_input_tokens": "7dbf0b0c1c33c102", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Sudan|0": { | |
"hashes": { | |
"hash_examples": "e02c32b9d2dd0c3f", | |
"hash_full_prompts": "e02c32b9d2dd0c3f", | |
"hash_input_tokens": "8ff3b32dda81c075", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Syria|0": { | |
"hashes": { | |
"hash_examples": "60a6f8fe73bda4bb", | |
"hash_full_prompts": "60a6f8fe73bda4bb", | |
"hash_input_tokens": "775fcbf3f1d05d59", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Tunisia|0": { | |
"hashes": { | |
"hash_examples": "34bb15d3830c5649", | |
"hash_full_prompts": "34bb15d3830c5649", | |
"hash_input_tokens": "a529a532a1fc2d78", | |
"hash_cont_tokens": "f2c5b8cf6c0e0976" | |
}, | |
"truncated": 0, | |
"non_truncated": 45, | |
"padded": 90, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:United_Arab_Emirates|0": { | |
"hashes": { | |
"hash_examples": "98a0ba78172718ce", | |
"hash_full_prompts": "98a0ba78172718ce", | |
"hash_input_tokens": "e6a54a883a0ea60c", | |
"hash_cont_tokens": "13c8aae5240b62db" | |
}, | |
"truncated": 0, | |
"non_truncated": 85, | |
"padded": 170, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:Yemen|0": { | |
"hashes": { | |
"hash_examples": "18e9bcccbb4ced7a", | |
"hash_full_prompts": "18e9bcccbb4ced7a", | |
"hash_input_tokens": "dc65955e0203a4b4", | |
"hash_cont_tokens": "12d6d46b075f79eb" | |
}, | |
"truncated": 0, | |
"non_truncated": 10, | |
"padded": 20, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:communication|0": { | |
"hashes": { | |
"hash_examples": "9ff28ab5eab5c97b", | |
"hash_full_prompts": "9ff28ab5eab5c97b", | |
"hash_input_tokens": "11d0ed676503e646", | |
"hash_cont_tokens": "b43f6e0ab3067882" | |
}, | |
"truncated": 0, | |
"non_truncated": 364, | |
"padded": 728, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:computer_and_phone|0": { | |
"hashes": { | |
"hash_examples": "37bac2f086aaf6c2", | |
"hash_full_prompts": "37bac2f086aaf6c2", | |
"hash_input_tokens": "23c0c09b3d0bc262", | |
"hash_cont_tokens": "b704dcf1720c5755" | |
}, | |
"truncated": 0, | |
"non_truncated": 295, | |
"padded": 590, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:daily_life|0": { | |
"hashes": { | |
"hash_examples": "bf07363c1c252e2f", | |
"hash_full_prompts": "bf07363c1c252e2f", | |
"hash_input_tokens": "fea9d47c055a4f03", | |
"hash_cont_tokens": "7308a3845f72e43e" | |
}, | |
"truncated": 0, | |
"non_truncated": 337, | |
"padded": 674, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|acva:entertainment|0": { | |
"hashes": { | |
"hash_examples": "37077bc00f0ac56a", | |
"hash_full_prompts": "37077bc00f0ac56a", | |
"hash_input_tokens": "bd9798a73c1157d8", | |
"hash_cont_tokens": "b704dcf1720c5755" | |
}, | |
"truncated": 0, | |
"non_truncated": 295, | |
"padded": 590, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:mcq_exams_test_ar|0": { | |
"hashes": { | |
"hash_examples": "c07a5e78c5c0b8fe", | |
"hash_full_prompts": "c07a5e78c5c0b8fe", | |
"hash_input_tokens": "3fc638e51e19efe9", | |
"hash_cont_tokens": "26ae3f89a0edcdfb" | |
}, | |
"truncated": 0, | |
"non_truncated": 557, | |
"padded": 2228, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:meta_ar_dialects|0": { | |
"hashes": { | |
"hash_examples": "c0b6081f83e14064", | |
"hash_full_prompts": "c0b6081f83e14064", | |
"hash_input_tokens": "431b791d068c6a9c", | |
"hash_cont_tokens": "03352aec2d5da2f5" | |
}, | |
"truncated": 0, | |
"non_truncated": 5395, | |
"padded": 21572, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:meta_ar_msa|0": { | |
"hashes": { | |
"hash_examples": "64eb78a7c5b7484b", | |
"hash_full_prompts": "64eb78a7c5b7484b", | |
"hash_input_tokens": "1cb34f5fb89dd3bd", | |
"hash_cont_tokens": "0d40ae6c7006bfbb" | |
}, | |
"truncated": 0, | |
"non_truncated": 895, | |
"padded": 3560, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { | |
"hashes": { | |
"hash_examples": "54fc3502c1c02c06", | |
"hash_full_prompts": "54fc3502c1c02c06", | |
"hash_input_tokens": "03a06ee3c7bb2722", | |
"hash_cont_tokens": "b82b619647644015" | |
}, | |
"truncated": 0, | |
"non_truncated": 75, | |
"padded": 148, | |
"non_padded": 2, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { | |
"hashes": { | |
"hash_examples": "46572d83696552ae", | |
"hash_full_prompts": "46572d83696552ae", | |
"hash_input_tokens": "68d66c15a284507a", | |
"hash_cont_tokens": "ac9a83fe9d8d99e7" | |
}, | |
"truncated": 0, | |
"non_truncated": 150, | |
"padded": 747, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { | |
"hashes": { | |
"hash_examples": "f430d97ff715bc1c", | |
"hash_full_prompts": "f430d97ff715bc1c", | |
"hash_input_tokens": "d17c09fab24697c8", | |
"hash_cont_tokens": "247c8a3ba0092d16" | |
}, | |
"truncated": 0, | |
"non_truncated": 150, | |
"padded": 749, | |
"non_padded": 1, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { | |
"hashes": { | |
"hash_examples": "6b70a7416584f98c", | |
"hash_full_prompts": "6b70a7416584f98c", | |
"hash_input_tokens": "fae2d3b37b00a29b", | |
"hash_cont_tokens": "de5b69881e081318" | |
}, | |
"truncated": 0, | |
"non_truncated": 7995, | |
"padded": 15990, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": { | |
"hashes": { | |
"hash_examples": "bc2005cc9d2f436e", | |
"hash_full_prompts": "bc2005cc9d2f436e", | |
"hash_input_tokens": "e54ee9c8f839a713", | |
"hash_cont_tokens": "32a650fcf067a32b" | |
}, | |
"truncated": 0, | |
"non_truncated": 5995, | |
"padded": 17985, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task|0": { | |
"hashes": { | |
"hash_examples": "6fb0e254ea5945d8", | |
"hash_full_prompts": "6fb0e254ea5945d8", | |
"hash_input_tokens": "bf686a5fc6722c17", | |
"hash_cont_tokens": "cd4fbcc7c800da80" | |
}, | |
"truncated": 0, | |
"non_truncated": 1720, | |
"padded": 5160, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_exams|0": { | |
"hashes": { | |
"hash_examples": "6d721df351722656", | |
"hash_full_prompts": "6d721df351722656", | |
"hash_input_tokens": "8e3f4b1e4c12be4e", | |
"hash_cont_tokens": "3952fd8478cb2901" | |
}, | |
"truncated": 0, | |
"non_truncated": 537, | |
"padded": 2148, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:abstract_algebra|0": { | |
"hashes": { | |
"hash_examples": "f2ddca8f45c0a511", | |
"hash_full_prompts": "f2ddca8f45c0a511", | |
"hash_input_tokens": "28ef6d8bbc1a5869", | |
"hash_cont_tokens": "771d84ba6655ec08" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:anatomy|0": { | |
"hashes": { | |
"hash_examples": "dfdbc1b83107668d", | |
"hash_full_prompts": "dfdbc1b83107668d", | |
"hash_input_tokens": "9e0ba43b6a8c173a", | |
"hash_cont_tokens": "3b3a04ac2381cf2e" | |
}, | |
"truncated": 0, | |
"non_truncated": 135, | |
"padded": 540, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:astronomy|0": { | |
"hashes": { | |
"hash_examples": "9736a606002a848e", | |
"hash_full_prompts": "9736a606002a848e", | |
"hash_input_tokens": "32df8c5603b3c57f", | |
"hash_cont_tokens": "c4e209dd858f1eb5" | |
}, | |
"truncated": 0, | |
"non_truncated": 152, | |
"padded": 608, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:business_ethics|0": { | |
"hashes": { | |
"hash_examples": "735e452fbb6dc63d", | |
"hash_full_prompts": "735e452fbb6dc63d", | |
"hash_input_tokens": "9c2c8c6ea665e676", | |
"hash_cont_tokens": "771d84ba6655ec08" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:clinical_knowledge|0": { | |
"hashes": { | |
"hash_examples": "6ab0ca4da98aedcf", | |
"hash_full_prompts": "6ab0ca4da98aedcf", | |
"hash_input_tokens": "5fd4f2d37eb6a7a6", | |
"hash_cont_tokens": "27d080ddb72a91fb" | |
}, | |
"truncated": 0, | |
"non_truncated": 265, | |
"padded": 1060, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_biology|0": { | |
"hashes": { | |
"hash_examples": "17e4e390848018a4", | |
"hash_full_prompts": "17e4e390848018a4", | |
"hash_input_tokens": "7da9f9821d061ac1", | |
"hash_cont_tokens": "36fd225818f99fc4" | |
}, | |
"truncated": 0, | |
"non_truncated": 144, | |
"padded": 576, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_chemistry|0": { | |
"hashes": { | |
"hash_examples": "4abb169f6dfd234b", | |
"hash_full_prompts": "4abb169f6dfd234b", | |
"hash_input_tokens": "b97f25b9f7385046", | |
"hash_cont_tokens": "771d84ba6655ec08" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_computer_science|0": { | |
"hashes": { | |
"hash_examples": "a369e2e941358a1e", | |
"hash_full_prompts": "a369e2e941358a1e", | |
"hash_input_tokens": "befad991e0c748cd", | |
"hash_cont_tokens": "771d84ba6655ec08" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_mathematics|0": { | |
"hashes": { | |
"hash_examples": "d7be03b8b6020bff", | |
"hash_full_prompts": "d7be03b8b6020bff", | |
"hash_input_tokens": "952cc72316765527", | |
"hash_cont_tokens": "771d84ba6655ec08" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_medicine|0": { | |
"hashes": { | |
"hash_examples": "0518a00f097346bf", | |
"hash_full_prompts": "0518a00f097346bf", | |
"hash_input_tokens": "e0df2c6a6efb258b", | |
"hash_cont_tokens": "c2807dc27dcf6153" | |
}, | |
"truncated": 0, | |
"non_truncated": 173, | |
"padded": 692, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:college_physics|0": { | |
"hashes": { | |
"hash_examples": "5d842cd49bc70e12", | |
"hash_full_prompts": "5d842cd49bc70e12", | |
"hash_input_tokens": "87123b22c916340b", | |
"hash_cont_tokens": "7b17d820dbbaa6cb" | |
}, | |
"truncated": 0, | |
"non_truncated": 102, | |
"padded": 408, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:computer_security|0": { | |
"hashes": { | |
"hash_examples": "8e85d9f85be9b32f", | |
"hash_full_prompts": "8e85d9f85be9b32f", | |
"hash_input_tokens": "6c465013fb9cda40", | |
"hash_cont_tokens": "771d84ba6655ec08" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:conceptual_physics|0": { | |
"hashes": { | |
"hash_examples": "7964b55a0a49502b", | |
"hash_full_prompts": "7964b55a0a49502b", | |
"hash_input_tokens": "0d41a07353984246", | |
"hash_cont_tokens": "2ff86a5d10a2127a" | |
}, | |
"truncated": 0, | |
"non_truncated": 235, | |
"padded": 940, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:econometrics|0": { | |
"hashes": { | |
"hash_examples": "1e192eae38347257", | |
"hash_full_prompts": "1e192eae38347257", | |
"hash_input_tokens": "5a533a7cc9d6aa1a", | |
"hash_cont_tokens": "901aa9a4c60559f3" | |
}, | |
"truncated": 0, | |
"non_truncated": 114, | |
"padded": 456, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:electrical_engineering|0": { | |
"hashes": { | |
"hash_examples": "cf97671d5c441da1", | |
"hash_full_prompts": "cf97671d5c441da1", | |
"hash_input_tokens": "3f6d24e6dfe99578", | |
"hash_cont_tokens": "5ae653ddb5ac9494" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 580, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:elementary_mathematics|0": { | |
"hashes": { | |
"hash_examples": "6f49107ed43c40c5", | |
"hash_full_prompts": "6f49107ed43c40c5", | |
"hash_input_tokens": "38e0feea97b26cfa", | |
"hash_cont_tokens": "d00485b6b9b1a7b2" | |
}, | |
"truncated": 0, | |
"non_truncated": 378, | |
"padded": 1512, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:formal_logic|0": { | |
"hashes": { | |
"hash_examples": "7922c376008ba77b", | |
"hash_full_prompts": "7922c376008ba77b", | |
"hash_input_tokens": "e5bf445488e6945b", | |
"hash_cont_tokens": "6a74353d78fb2049" | |
}, | |
"truncated": 0, | |
"non_truncated": 126, | |
"padded": 504, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:global_facts|0": { | |
"hashes": { | |
"hash_examples": "11f9813185047d5b", | |
"hash_full_prompts": "11f9813185047d5b", | |
"hash_input_tokens": "d1b2ef6d7402dbcf", | |
"hash_cont_tokens": "771d84ba6655ec08" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_biology|0": { | |
"hashes": { | |
"hash_examples": "2a804b1d90cbe66e", | |
"hash_full_prompts": "2a804b1d90cbe66e", | |
"hash_input_tokens": "3147e97d56fccd88", | |
"hash_cont_tokens": "e1b38a431c7cfdf2" | |
}, | |
"truncated": 0, | |
"non_truncated": 310, | |
"padded": 1240, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_chemistry|0": { | |
"hashes": { | |
"hash_examples": "0032168adabc53b4", | |
"hash_full_prompts": "0032168adabc53b4", | |
"hash_input_tokens": "2a7b1a64c4ff33f4", | |
"hash_cont_tokens": "d30d155b83b8beee" | |
}, | |
"truncated": 0, | |
"non_truncated": 203, | |
"padded": 808, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_computer_science|0": { | |
"hashes": { | |
"hash_examples": "f2fb8740f9df980f", | |
"hash_full_prompts": "f2fb8740f9df980f", | |
"hash_input_tokens": "d7a5c74ea45e0945", | |
"hash_cont_tokens": "771d84ba6655ec08" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_european_history|0": { | |
"hashes": { | |
"hash_examples": "73509021e7e66435", | |
"hash_full_prompts": "73509021e7e66435", | |
"hash_input_tokens": "e1243077f4a1ac72", | |
"hash_cont_tokens": "aa387b55778f7d85" | |
}, | |
"truncated": 0, | |
"non_truncated": 165, | |
"padded": 660, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_geography|0": { | |
"hashes": { | |
"hash_examples": "9e08d1894940ff42", | |
"hash_full_prompts": "9e08d1894940ff42", | |
"hash_input_tokens": "72914e2fc6446159", | |
"hash_cont_tokens": "ea572b82c41be702" | |
}, | |
"truncated": 0, | |
"non_truncated": 198, | |
"padded": 792, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_government_and_politics|0": { | |
"hashes": { | |
"hash_examples": "64b7e97817ca6c76", | |
"hash_full_prompts": "64b7e97817ca6c76", | |
"hash_input_tokens": "ccee857c955b9112", | |
"hash_cont_tokens": "d535b26a86b9a9d3" | |
}, | |
"truncated": 0, | |
"non_truncated": 193, | |
"padded": 772, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_macroeconomics|0": { | |
"hashes": { | |
"hash_examples": "9f582da8534bd2ef", | |
"hash_full_prompts": "9f582da8534bd2ef", | |
"hash_input_tokens": "0a9008c385e3fc83", | |
"hash_cont_tokens": "606c1a2137551055" | |
}, | |
"truncated": 0, | |
"non_truncated": 390, | |
"padded": 1560, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_mathematics|0": { | |
"hashes": { | |
"hash_examples": "fd54f1c10d423c51", | |
"hash_full_prompts": "fd54f1c10d423c51", | |
"hash_input_tokens": "228c2db842b37740", | |
"hash_cont_tokens": "f18ea16235393e7a" | |
}, | |
"truncated": 0, | |
"non_truncated": 270, | |
"padded": 1080, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_microeconomics|0": { | |
"hashes": { | |
"hash_examples": "7037896925aaf42f", | |
"hash_full_prompts": "7037896925aaf42f", | |
"hash_input_tokens": "b26a7b218413e6f5", | |
"hash_cont_tokens": "05a90a8afcf3afc3" | |
}, | |
"truncated": 0, | |
"non_truncated": 238, | |
"padded": 952, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_physics|0": { | |
"hashes": { | |
"hash_examples": "60c3776215167dae", | |
"hash_full_prompts": "60c3776215167dae", | |
"hash_input_tokens": "7252f5b7dd02c812", | |
"hash_cont_tokens": "847282b0877be22e" | |
}, | |
"truncated": 0, | |
"non_truncated": 151, | |
"padded": 604, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_psychology|0": { | |
"hashes": { | |
"hash_examples": "61176bfd5da1298f", | |
"hash_full_prompts": "61176bfd5da1298f", | |
"hash_input_tokens": "4aa67f55df6a9d04", | |
"hash_cont_tokens": "30a296640c9037d1" | |
}, | |
"truncated": 0, | |
"non_truncated": 545, | |
"padded": 2180, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_statistics|0": { | |
"hashes": { | |
"hash_examples": "40dfeebd1ea10f76", | |
"hash_full_prompts": "40dfeebd1ea10f76", | |
"hash_input_tokens": "6d96cadedcfa9cd5", | |
"hash_cont_tokens": "a259777479a52fa3" | |
}, | |
"truncated": 0, | |
"non_truncated": 216, | |
"padded": 864, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_us_history|0": { | |
"hashes": { | |
"hash_examples": "03daa510ba917f4d", | |
"hash_full_prompts": "03daa510ba917f4d", | |
"hash_input_tokens": "de9a577c60478fad", | |
"hash_cont_tokens": "35915add7ad519d1" | |
}, | |
"truncated": 0, | |
"non_truncated": 204, | |
"padded": 816, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:high_school_world_history|0": { | |
"hashes": { | |
"hash_examples": "be075ffd579f43c2", | |
"hash_full_prompts": "be075ffd579f43c2", | |
"hash_input_tokens": "6b330929383210fc", | |
"hash_cont_tokens": "4766d81466995bda" | |
}, | |
"truncated": 0, | |
"non_truncated": 237, | |
"padded": 913, | |
"non_padded": 35, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:human_aging|0": { | |
"hashes": { | |
"hash_examples": "caa5b69f640bd1ef", | |
"hash_full_prompts": "caa5b69f640bd1ef", | |
"hash_input_tokens": "b07ed541d5abed48", | |
"hash_cont_tokens": "7bf358fd838eb005" | |
}, | |
"truncated": 0, | |
"non_truncated": 223, | |
"padded": 888, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:human_sexuality|0": { | |
"hashes": { | |
"hash_examples": "5ed2e38fb25a3767", | |
"hash_full_prompts": "5ed2e38fb25a3767", | |
"hash_input_tokens": "176ebd4b4e9dad9b", | |
"hash_cont_tokens": "8c45b597fb2c4a20" | |
}, | |
"truncated": 0, | |
"non_truncated": 131, | |
"padded": 520, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:international_law|0": { | |
"hashes": { | |
"hash_examples": "4e3e9e28d1b96484", | |
"hash_full_prompts": "4e3e9e28d1b96484", | |
"hash_input_tokens": "731ede4c4adba370", | |
"hash_cont_tokens": "2c8c36f61bba92e8" | |
}, | |
"truncated": 0, | |
"non_truncated": 121, | |
"padded": 480, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:jurisprudence|0": { | |
"hashes": { | |
"hash_examples": "e264b755366310b3", | |
"hash_full_prompts": "e264b755366310b3", | |
"hash_input_tokens": "0f93ab3bfd264e9f", | |
"hash_cont_tokens": "43df6122a6ce9ca7" | |
}, | |
"truncated": 0, | |
"non_truncated": 108, | |
"padded": 428, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:logical_fallacies|0": { | |
"hashes": { | |
"hash_examples": "a4ab6965a3e38071", | |
"hash_full_prompts": "a4ab6965a3e38071", | |
"hash_input_tokens": "18b1691d3fb72961", | |
"hash_cont_tokens": "636d46707e1a84a1" | |
}, | |
"truncated": 0, | |
"non_truncated": 163, | |
"padded": 648, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:machine_learning|0": { | |
"hashes": { | |
"hash_examples": "b92320efa6636b40", | |
"hash_full_prompts": "b92320efa6636b40", | |
"hash_input_tokens": "5779ee529e8378eb", | |
"hash_cont_tokens": "733df8274472cbd6" | |
}, | |
"truncated": 0, | |
"non_truncated": 112, | |
"padded": 440, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:management|0": { | |
"hashes": { | |
"hash_examples": "c9ee4872a850fe20", | |
"hash_full_prompts": "c9ee4872a850fe20", | |
"hash_input_tokens": "5215cd6223fbf745", | |
"hash_cont_tokens": "fc3cf8f15a104c82" | |
}, | |
"truncated": 0, | |
"non_truncated": 103, | |
"padded": 412, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:marketing|0": { | |
"hashes": { | |
"hash_examples": "0c151b70f6a047e3", | |
"hash_full_prompts": "0c151b70f6a047e3", | |
"hash_input_tokens": "9bf5280d207b7241", | |
"hash_cont_tokens": "b8698fec039e309c" | |
}, | |
"truncated": 0, | |
"non_truncated": 234, | |
"padded": 932, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:medical_genetics|0": { | |
"hashes": { | |
"hash_examples": "513f6cb8fca3a24e", | |
"hash_full_prompts": "513f6cb8fca3a24e", | |
"hash_input_tokens": "953c500bd7b829c1", | |
"hash_cont_tokens": "771d84ba6655ec08" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 396, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:miscellaneous|0": { | |
"hashes": { | |
"hash_examples": "259a190d635331db", | |
"hash_full_prompts": "259a190d635331db", | |
"hash_input_tokens": "7f998c85e7bfcb93", | |
"hash_cont_tokens": "87f2df51ba8a6c8c" | |
}, | |
"truncated": 0, | |
"non_truncated": 783, | |
"padded": 3071, | |
"non_padded": 61, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:moral_disputes|0": { | |
"hashes": { | |
"hash_examples": "b85052c48a0b7bc3", | |
"hash_full_prompts": "b85052c48a0b7bc3", | |
"hash_input_tokens": "081ea785aa250037", | |
"hash_cont_tokens": "2003018f8616dc35" | |
}, | |
"truncated": 0, | |
"non_truncated": 346, | |
"padded": 1364, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:moral_scenarios|0": { | |
"hashes": { | |
"hash_examples": "28d0b069ef00dd00", | |
"hash_full_prompts": "28d0b069ef00dd00", | |
"hash_input_tokens": "46777ed2d0482771", | |
"hash_cont_tokens": "1ae2c6d8baa46e2a" | |
}, | |
"truncated": 0, | |
"non_truncated": 895, | |
"padded": 3576, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:nutrition|0": { | |
"hashes": { | |
"hash_examples": "00c9bc5f1d305b2f", | |
"hash_full_prompts": "00c9bc5f1d305b2f", | |
"hash_input_tokens": "4dac156fd3ebaaf3", | |
"hash_cont_tokens": "7b7aef3aad672dcb" | |
}, | |
"truncated": 0, | |
"non_truncated": 306, | |
"padded": 1192, | |
"non_padded": 32, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:philosophy|0": { | |
"hashes": { | |
"hash_examples": "a458c08454a3fd5f", | |
"hash_full_prompts": "a458c08454a3fd5f", | |
"hash_input_tokens": "8703cdd7b9048fff", | |
"hash_cont_tokens": "ffde3f7e9cb8ce4f" | |
}, | |
"truncated": 0, | |
"non_truncated": 311, | |
"padded": 1208, | |
"non_padded": 36, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:prehistory|0": { | |
"hashes": { | |
"hash_examples": "d6a0ecbdbb670e9c", | |
"hash_full_prompts": "d6a0ecbdbb670e9c", | |
"hash_input_tokens": "e13aa1f329a53084", | |
"hash_cont_tokens": "bda8eff659818de4" | |
}, | |
"truncated": 0, | |
"non_truncated": 324, | |
"padded": 1268, | |
"non_padded": 28, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:professional_accounting|0": { | |
"hashes": { | |
"hash_examples": "b4a95fe480b6540e", | |
"hash_full_prompts": "b4a95fe480b6540e", | |
"hash_input_tokens": "4e74ad202296ba2f", | |
"hash_cont_tokens": "5f7423e268242363" | |
}, | |
"truncated": 0, | |
"non_truncated": 282, | |
"padded": 1124, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:professional_law|0": { | |
"hashes": { | |
"hash_examples": "c2be9651cdbdde3b", | |
"hash_full_prompts": "c2be9651cdbdde3b", | |
"hash_input_tokens": "bf4cf3473a77df22", | |
"hash_cont_tokens": "60c68d5e3ae45dc1" | |
}, | |
"truncated": 0, | |
"non_truncated": 1534, | |
"padded": 6084, | |
"non_padded": 52, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:professional_medicine|0": { | |
"hashes": { | |
"hash_examples": "26ce92416288f273", | |
"hash_full_prompts": "26ce92416288f273", | |
"hash_input_tokens": "5530d00e58227b41", | |
"hash_cont_tokens": "f52e4396cb5cdef2" | |
}, | |
"truncated": 0, | |
"non_truncated": 272, | |
"padded": 1068, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:professional_psychology|0": { | |
"hashes": { | |
"hash_examples": "71ea5f182ea9a641", | |
"hash_full_prompts": "71ea5f182ea9a641", | |
"hash_input_tokens": "4e1c2585289a0aa8", | |
"hash_cont_tokens": "a0ebf929cab4bd2c" | |
}, | |
"truncated": 0, | |
"non_truncated": 612, | |
"padded": 2400, | |
"non_padded": 48, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:public_relations|0": { | |
"hashes": { | |
"hash_examples": "125adc21f91f8d77", | |
"hash_full_prompts": "125adc21f91f8d77", | |
"hash_input_tokens": "2e24aa41f72783fd", | |
"hash_cont_tokens": "09ba719b073994df" | |
}, | |
"truncated": 0, | |
"non_truncated": 110, | |
"padded": 420, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:security_studies|0": { | |
"hashes": { | |
"hash_examples": "3c18b216c099fb26", | |
"hash_full_prompts": "3c18b216c099fb26", | |
"hash_input_tokens": "bfc192cebdb10f2e", | |
"hash_cont_tokens": "4854dd7ac1df8c7c" | |
}, | |
"truncated": 0, | |
"non_truncated": 245, | |
"padded": 976, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:sociology|0": { | |
"hashes": { | |
"hash_examples": "3f2a9634cef7417d", | |
"hash_full_prompts": "3f2a9634cef7417d", | |
"hash_input_tokens": "66b409f696c5c1ea", | |
"hash_cont_tokens": "ea4c0af969f2373c" | |
}, | |
"truncated": 0, | |
"non_truncated": 201, | |
"padded": 796, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:us_foreign_policy|0": { | |
"hashes": { | |
"hash_examples": "22249da54056475e", | |
"hash_full_prompts": "22249da54056475e", | |
"hash_input_tokens": "5409fbc7f9321f4d", | |
"hash_cont_tokens": "771d84ba6655ec08" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 388, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:virology|0": { | |
"hashes": { | |
"hash_examples": "9d194b9471dc624e", | |
"hash_full_prompts": "9d194b9471dc624e", | |
"hash_input_tokens": "d05cdb39f29e3bca", | |
"hash_cont_tokens": "810369902bd4e47e" | |
}, | |
"truncated": 0, | |
"non_truncated": 166, | |
"padded": 648, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:world_religions|0": { | |
"hashes": { | |
"hash_examples": "229e5fe50082b064", | |
"hash_full_prompts": "229e5fe50082b064", | |
"hash_input_tokens": "e3e5c12907179a34", | |
"hash_cont_tokens": "fd19d689989ad4e2" | |
}, | |
"truncated": 0, | |
"non_truncated": 171, | |
"padded": 664, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arc_challenge_okapi_ar|0": { | |
"hashes": { | |
"hash_examples": "ab893807673bc355", | |
"hash_full_prompts": "ab893807673bc355", | |
"hash_input_tokens": "b5aac7e0dd2d9fc3", | |
"hash_cont_tokens": "05d6059fd7f0a574" | |
}, | |
"truncated": 0, | |
"non_truncated": 1160, | |
"padded": 4547, | |
"non_padded": 93, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arc_easy_ar|0": { | |
"hashes": { | |
"hash_examples": "acb688624acc3d04", | |
"hash_full_prompts": "acb688624acc3d04", | |
"hash_input_tokens": "5bf55ab37beffe71", | |
"hash_cont_tokens": "af4f49218caa0c1b" | |
}, | |
"truncated": 0, | |
"non_truncated": 2364, | |
"padded": 9277, | |
"non_padded": 179, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|boolq_ar|0": { | |
"hashes": { | |
"hash_examples": "48355a67867e0c32", | |
"hash_full_prompts": "48355a67867e0c32", | |
"hash_input_tokens": "e8d8ce1ebd403aa8", | |
"hash_cont_tokens": "f51d666013e03070" | |
}, | |
"truncated": 0, | |
"non_truncated": 3260, | |
"padded": 6461, | |
"non_padded": 59, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|copa_ext_ar|0": { | |
"hashes": { | |
"hash_examples": "9bb83301bb72eecf", | |
"hash_full_prompts": "9bb83301bb72eecf", | |
"hash_input_tokens": "6c2e94b565ba9a59", | |
"hash_cont_tokens": "eb3992fbe17ceaa2" | |
}, | |
"truncated": 0, | |
"non_truncated": 90, | |
"padded": 180, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|hellaswag_okapi_ar|0": { | |
"hashes": { | |
"hash_examples": "6e8cf57a322dfadd", | |
"hash_full_prompts": "6e8cf57a322dfadd", | |
"hash_input_tokens": "15c9b2f7a9253f86", | |
"hash_cont_tokens": "03680d546b4eb5c5" | |
}, | |
"truncated": 0, | |
"non_truncated": 9171, | |
"padded": 36584, | |
"non_padded": 100, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|openbook_qa_ext_ar|0": { | |
"hashes": { | |
"hash_examples": "923d41eb0aca93eb", | |
"hash_full_prompts": "923d41eb0aca93eb", | |
"hash_input_tokens": "468b7a3b4cdb4f3d", | |
"hash_cont_tokens": "764164f5bfb60831" | |
}, | |
"truncated": 0, | |
"non_truncated": 495, | |
"padded": 1951, | |
"non_padded": 29, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|piqa_ar|0": { | |
"hashes": { | |
"hash_examples": "94bc205a520d3ea0", | |
"hash_full_prompts": "94bc205a520d3ea0", | |
"hash_input_tokens": "cf5b4ba39ad65645", | |
"hash_cont_tokens": "1b912774aa918718" | |
}, | |
"truncated": 0, | |
"non_truncated": 1833, | |
"padded": 3621, | |
"non_padded": 45, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|race_ar|0": { | |
"hashes": { | |
"hash_examples": "de65130bae647516", | |
"hash_full_prompts": "de65130bae647516", | |
"hash_input_tokens": "014cddb6e149d719", | |
"hash_cont_tokens": "86cc015f91d5e5da" | |
}, | |
"truncated": 0, | |
"non_truncated": 4929, | |
"padded": 19693, | |
"non_padded": 23, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|sciq_ar|0": { | |
"hashes": { | |
"hash_examples": "179d4dac7d8921be", | |
"hash_full_prompts": "179d4dac7d8921be", | |
"hash_input_tokens": "dec896c885561962", | |
"hash_cont_tokens": "646158c8a4cec322" | |
}, | |
"truncated": 0, | |
"non_truncated": 995, | |
"padded": 3959, | |
"non_padded": 21, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|toxigen_ar|0": { | |
"hashes": { | |
"hash_examples": "1e139513004a9a2e", | |
"hash_full_prompts": "1e139513004a9a2e", | |
"hash_input_tokens": "50eed599a9692288", | |
"hash_cont_tokens": "4323a1b5cd5f70c3" | |
}, | |
"truncated": 0, | |
"non_truncated": 935, | |
"padded": 1845, | |
"non_padded": 25, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|xstory_cloze:ar|0": { | |
"hashes": { | |
"hash_examples": "865426a22c787481", | |
"hash_full_prompts": "865426a22c787481", | |
"hash_input_tokens": "a48b1f741e516e76", | |
"hash_cont_tokens": "103b4ba4d5ea0ee0" | |
}, | |
"truncated": 0, | |
"non_truncated": 1511, | |
"padded": 2974, | |
"non_padded": 48, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "25f08289c783449a", | |
"hash_full_prompts": "25f08289c783449a", | |
"hash_input_tokens": "8f41f2f320137465", | |
"hash_cont_tokens": "4ed6b0c63b793715" | |
}, | |
"truncated": 0, | |
"non_truncated": 72964, | |
"padded": 234507, | |
"non_padded": 1116, | |
"num_truncated_few_shots": 0 | |
} | |
} |