|
{ |
|
"config_general": { |
|
"lighteval_sha": "?", |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null, |
|
"job_id": "", |
|
"start_time": 591.993522852, |
|
"end_time": 17552.919336291, |
|
"total_evaluation_time_secondes": "16960.925813439", |
|
"model_name": "MaziyarPanahi/Llama-3-8B-Instruct-v0.9", |
|
"model_sha": "ddf91fdc0a3ab5e5d76864f1c4cf44e5adacd565", |
|
"model_dtype": "torch.bfloat16", |
|
"model_size": "14.96 GB", |
|
"config": null |
|
}, |
|
"results": { |
|
"community|acva:Algeria|0": { |
|
"acc_norm": 0.5384615384615384, |
|
"acc_norm_stderr": 0.0357915435254457 |
|
}, |
|
"community|acva:Ancient_Egypt|0": { |
|
"acc_norm": 0.06031746031746032, |
|
"acc_norm_stderr": 0.013435297210747581 |
|
}, |
|
"community|acva:Arab_Empire|0": { |
|
"acc_norm": 0.3169811320754717, |
|
"acc_norm_stderr": 0.028637235639800925 |
|
}, |
|
"community|acva:Arabic_Architecture|0": { |
|
"acc_norm": 0.46153846153846156, |
|
"acc_norm_stderr": 0.0357915435254457 |
|
}, |
|
"community|acva:Arabic_Art|0": { |
|
"acc_norm": 0.38461538461538464, |
|
"acc_norm_stderr": 0.03492896993742304 |
|
}, |
|
"community|acva:Arabic_Astronomy|0": { |
|
"acc_norm": 0.4717948717948718, |
|
"acc_norm_stderr": 0.035840746749208334 |
|
}, |
|
"community|acva:Arabic_Calligraphy|0": { |
|
"acc_norm": 0.6078431372549019, |
|
"acc_norm_stderr": 0.030634359906451972 |
|
}, |
|
"community|acva:Arabic_Ceremony|0": { |
|
"acc_norm": 0.5351351351351351, |
|
"acc_norm_stderr": 0.036769369509486984 |
|
}, |
|
"community|acva:Arabic_Clothing|0": { |
|
"acc_norm": 0.5076923076923077, |
|
"acc_norm_stderr": 0.03589365940635212 |
|
}, |
|
"community|acva:Arabic_Culture|0": { |
|
"acc_norm": 0.3076923076923077, |
|
"acc_norm_stderr": 0.03313653039774173 |
|
}, |
|
"community|acva:Arabic_Food|0": { |
|
"acc_norm": 0.49230769230769234, |
|
"acc_norm_stderr": 0.03589365940635213 |
|
}, |
|
"community|acva:Arabic_Funeral|0": { |
|
"acc_norm": 0.4105263157894737, |
|
"acc_norm_stderr": 0.050738635645512106 |
|
}, |
|
"community|acva:Arabic_Geography|0": { |
|
"acc_norm": 0.6068965517241379, |
|
"acc_norm_stderr": 0.040703290137070705 |
|
}, |
|
"community|acva:Arabic_History|0": { |
|
"acc_norm": 0.3076923076923077, |
|
"acc_norm_stderr": 0.03313653039774173 |
|
}, |
|
"community|acva:Arabic_Language_Origin|0": { |
|
"acc_norm": 0.5684210526315789, |
|
"acc_norm_stderr": 0.051085926733089475 |
|
}, |
|
"community|acva:Arabic_Literature|0": { |
|
"acc_norm": 0.6275862068965518, |
|
"acc_norm_stderr": 0.04028731532947559 |
|
}, |
|
"community|acva:Arabic_Math|0": { |
|
"acc_norm": 0.3128205128205128, |
|
"acc_norm_stderr": 0.03328755065724854 |
|
}, |
|
"community|acva:Arabic_Medicine|0": { |
|
"acc_norm": 0.4896551724137931, |
|
"acc_norm_stderr": 0.04165774775728763 |
|
}, |
|
"community|acva:Arabic_Music|0": { |
|
"acc_norm": 0.23741007194244604, |
|
"acc_norm_stderr": 0.036220593237998276 |
|
}, |
|
"community|acva:Arabic_Ornament|0": { |
|
"acc_norm": 0.5435897435897435, |
|
"acc_norm_stderr": 0.03576123096991215 |
|
}, |
|
"community|acva:Arabic_Philosophy|0": { |
|
"acc_norm": 0.5862068965517241, |
|
"acc_norm_stderr": 0.04104269211806232 |
|
}, |
|
"community|acva:Arabic_Physics_and_Chemistry|0": { |
|
"acc_norm": 0.5333333333333333, |
|
"acc_norm_stderr": 0.03581804596782232 |
|
}, |
|
"community|acva:Arabic_Wedding|0": { |
|
"acc_norm": 0.4307692307692308, |
|
"acc_norm_stderr": 0.03555213252058761 |
|
}, |
|
"community|acva:Bahrain|0": { |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.07106690545187012 |
|
}, |
|
"community|acva:Comoros|0": { |
|
"acc_norm": 0.37777777777777777, |
|
"acc_norm_stderr": 0.07309112127323451 |
|
}, |
|
"community|acva:Egypt_modern|0": { |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.050529115263991134 |
|
}, |
|
"community|acva:InfluenceFromAncientEgypt|0": { |
|
"acc_norm": 0.6205128205128205, |
|
"acc_norm_stderr": 0.034839592663653586 |
|
}, |
|
"community|acva:InfluenceFromByzantium|0": { |
|
"acc_norm": 0.7241379310344828, |
|
"acc_norm_stderr": 0.03724563619774632 |
|
}, |
|
"community|acva:InfluenceFromChina|0": { |
|
"acc_norm": 0.27692307692307694, |
|
"acc_norm_stderr": 0.032127058190759304 |
|
}, |
|
"community|acva:InfluenceFromGreece|0": { |
|
"acc_norm": 0.6461538461538462, |
|
"acc_norm_stderr": 0.03433004254147036 |
|
}, |
|
"community|acva:InfluenceFromIslam|0": { |
|
"acc_norm": 0.3931034482758621, |
|
"acc_norm_stderr": 0.0407032901370707 |
|
}, |
|
"community|acva:InfluenceFromPersia|0": { |
|
"acc_norm": 0.7142857142857143, |
|
"acc_norm_stderr": 0.03424737867752743 |
|
}, |
|
"community|acva:InfluenceFromRome|0": { |
|
"acc_norm": 0.5897435897435898, |
|
"acc_norm_stderr": 0.0353149371232667 |
|
}, |
|
"community|acva:Iraq|0": { |
|
"acc_norm": 0.5647058823529412, |
|
"acc_norm_stderr": 0.05409572080481032 |
|
}, |
|
"community|acva:Islam_Education|0": { |
|
"acc_norm": 0.47692307692307695, |
|
"acc_norm_stderr": 0.03585965308947409 |
|
}, |
|
"community|acva:Islam_branches_and_schools|0": { |
|
"acc_norm": 0.4342857142857143, |
|
"acc_norm_stderr": 0.037576101528126626 |
|
}, |
|
"community|acva:Islamic_law_system|0": { |
|
"acc_norm": 0.4461538461538462, |
|
"acc_norm_stderr": 0.03568913546569232 |
|
}, |
|
"community|acva:Jordan|0": { |
|
"acc_norm": 0.35555555555555557, |
|
"acc_norm_stderr": 0.07216392363431012 |
|
}, |
|
"community|acva:Kuwait|0": { |
|
"acc_norm": 0.3111111111111111, |
|
"acc_norm_stderr": 0.06979205927323111 |
|
}, |
|
"community|acva:Lebanon|0": { |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.06666666666666667 |
|
}, |
|
"community|acva:Libya|0": { |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.07491109582924914 |
|
}, |
|
"community|acva:Mauritania|0": { |
|
"acc_norm": 0.4222222222222222, |
|
"acc_norm_stderr": 0.07446027270295805 |
|
}, |
|
"community|acva:Mesopotamia_civilization|0": { |
|
"acc_norm": 0.5483870967741935, |
|
"acc_norm_stderr": 0.040102036161810406 |
|
}, |
|
"community|acva:Morocco|0": { |
|
"acc_norm": 0.3111111111111111, |
|
"acc_norm_stderr": 0.06979205927323111 |
|
}, |
|
"community|acva:Oman|0": { |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.06267511942419626 |
|
}, |
|
"community|acva:Palestine|0": { |
|
"acc_norm": 0.3176470588235294, |
|
"acc_norm_stderr": 0.05079691179733582 |
|
}, |
|
"community|acva:Qatar|0": { |
|
"acc_norm": 0.4222222222222222, |
|
"acc_norm_stderr": 0.07446027270295806 |
|
}, |
|
"community|acva:Saudi_Arabia|0": { |
|
"acc_norm": 0.38974358974358975, |
|
"acc_norm_stderr": 0.035014247762563705 |
|
}, |
|
"community|acva:Somalia|0": { |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.07385489458759965 |
|
}, |
|
"community|acva:Sudan|0": { |
|
"acc_norm": 0.37777777777777777, |
|
"acc_norm_stderr": 0.07309112127323451 |
|
}, |
|
"community|acva:Syria|0": { |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.07385489458759964 |
|
}, |
|
"community|acva:Tunisia|0": { |
|
"acc_norm": 0.3111111111111111, |
|
"acc_norm_stderr": 0.06979205927323111 |
|
}, |
|
"community|acva:United_Arab_Emirates|0": { |
|
"acc_norm": 0.24705882352941178, |
|
"acc_norm_stderr": 0.047058823529411785 |
|
}, |
|
"community|acva:Yemen|0": { |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.13333333333333333 |
|
}, |
|
"community|acva:communication|0": { |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.025974025974025955 |
|
}, |
|
"community|acva:computer_and_phone|0": { |
|
"acc_norm": 0.45084745762711864, |
|
"acc_norm_stderr": 0.02901934773187137 |
|
}, |
|
"community|acva:daily_life|0": { |
|
"acc_norm": 0.19584569732937684, |
|
"acc_norm_stderr": 0.02164995877092107 |
|
}, |
|
"community|acva:entertainment|0": { |
|
"acc_norm": 0.23728813559322035, |
|
"acc_norm_stderr": 0.024811018803776317 |
|
}, |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"acc_norm": 0.348294434470377, |
|
"acc_norm_stderr": 0.020205127091126894 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"acc_norm": 0.3562557924003707, |
|
"acc_norm_stderr": 0.006520518977697724 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"acc_norm": 0.42793296089385474, |
|
"acc_norm_stderr": 0.01654788799741611 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"acc_norm": 0.5466666666666666, |
|
"acc_norm_stderr": 0.05787010410349153 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"acc_norm": 0.62, |
|
"acc_norm_stderr": 0.03976440686960229 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"acc_norm": 0.4533333333333333, |
|
"acc_norm_stderr": 0.04078279527880808 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"acc_norm": 0.7798624140087554, |
|
"acc_norm_stderr": 0.004634191995886568 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"acc_norm": 0.5172643869891577, |
|
"acc_norm_stderr": 0.006454351152965721 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"acc_norm": 0.38488372093023254, |
|
"acc_norm_stderr": 0.01173560391457323 |
|
}, |
|
"community|arabic_exams|0": { |
|
"acc_norm": 0.4227188081936685, |
|
"acc_norm_stderr": 0.021337183637583045 |
|
}, |
|
"community|arabic_mmlu:abstract_algebra|0": { |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"community|arabic_mmlu:anatomy|0": { |
|
"acc_norm": 0.3037037037037037, |
|
"acc_norm_stderr": 0.03972552884785137 |
|
}, |
|
"community|arabic_mmlu:astronomy|0": { |
|
"acc_norm": 0.4407894736842105, |
|
"acc_norm_stderr": 0.04040311062490437 |
|
}, |
|
"community|arabic_mmlu:business_ethics|0": { |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"community|arabic_mmlu:clinical_knowledge|0": { |
|
"acc_norm": 0.5433962264150943, |
|
"acc_norm_stderr": 0.030656748696739435 |
|
}, |
|
"community|arabic_mmlu:college_biology|0": { |
|
"acc_norm": 0.3958333333333333, |
|
"acc_norm_stderr": 0.04089465449325582 |
|
}, |
|
"community|arabic_mmlu:college_chemistry|0": { |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"community|arabic_mmlu:college_computer_science|0": { |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"community|arabic_mmlu:college_mathematics|0": { |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"community|arabic_mmlu:college_medicine|0": { |
|
"acc_norm": 0.37572254335260113, |
|
"acc_norm_stderr": 0.036928207672648664 |
|
}, |
|
"community|arabic_mmlu:college_physics|0": { |
|
"acc_norm": 0.28431372549019607, |
|
"acc_norm_stderr": 0.04488482852329017 |
|
}, |
|
"community|arabic_mmlu:computer_security|0": { |
|
"acc_norm": 0.51, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"community|arabic_mmlu:conceptual_physics|0": { |
|
"acc_norm": 0.41702127659574467, |
|
"acc_norm_stderr": 0.03223276266711712 |
|
}, |
|
"community|arabic_mmlu:econometrics|0": { |
|
"acc_norm": 0.2894736842105263, |
|
"acc_norm_stderr": 0.04266339443159394 |
|
}, |
|
"community|arabic_mmlu:electrical_engineering|0": { |
|
"acc_norm": 0.4482758620689655, |
|
"acc_norm_stderr": 0.04144311810878151 |
|
}, |
|
"community|arabic_mmlu:elementary_mathematics|0": { |
|
"acc_norm": 0.3968253968253968, |
|
"acc_norm_stderr": 0.0251971010742465 |
|
}, |
|
"community|arabic_mmlu:formal_logic|0": { |
|
"acc_norm": 0.31746031746031744, |
|
"acc_norm_stderr": 0.04163453031302859 |
|
}, |
|
"community|arabic_mmlu:global_facts|0": { |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"community|arabic_mmlu:high_school_biology|0": { |
|
"acc_norm": 0.4838709677419355, |
|
"acc_norm_stderr": 0.028429203176724555 |
|
}, |
|
"community|arabic_mmlu:high_school_chemistry|0": { |
|
"acc_norm": 0.35467980295566504, |
|
"acc_norm_stderr": 0.0336612448905145 |
|
}, |
|
"community|arabic_mmlu:high_school_computer_science|0": { |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"community|arabic_mmlu:high_school_european_history|0": { |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.031234752377721175 |
|
}, |
|
"community|arabic_mmlu:high_school_geography|0": { |
|
"acc_norm": 0.51010101010101, |
|
"acc_norm_stderr": 0.035616254886737454 |
|
}, |
|
"community|arabic_mmlu:high_school_government_and_politics|0": { |
|
"acc_norm": 0.43523316062176165, |
|
"acc_norm_stderr": 0.03578038165008586 |
|
}, |
|
"community|arabic_mmlu:high_school_macroeconomics|0": { |
|
"acc_norm": 0.4282051282051282, |
|
"acc_norm_stderr": 0.025088301454694834 |
|
}, |
|
"community|arabic_mmlu:high_school_mathematics|0": { |
|
"acc_norm": 0.29259259259259257, |
|
"acc_norm_stderr": 0.02773896963217609 |
|
}, |
|
"community|arabic_mmlu:high_school_microeconomics|0": { |
|
"acc_norm": 0.39915966386554624, |
|
"acc_norm_stderr": 0.03181110032413925 |
|
}, |
|
"community|arabic_mmlu:high_school_physics|0": { |
|
"acc_norm": 0.32450331125827814, |
|
"acc_norm_stderr": 0.038227469376587525 |
|
}, |
|
"community|arabic_mmlu:high_school_psychology|0": { |
|
"acc_norm": 0.47522935779816516, |
|
"acc_norm_stderr": 0.021410999753635918 |
|
}, |
|
"community|arabic_mmlu:high_school_statistics|0": { |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.0321495214780275 |
|
}, |
|
"community|arabic_mmlu:high_school_us_history|0": { |
|
"acc_norm": 0.29901960784313725, |
|
"acc_norm_stderr": 0.03213325717373617 |
|
}, |
|
"community|arabic_mmlu:high_school_world_history|0": { |
|
"acc_norm": 0.34177215189873417, |
|
"acc_norm_stderr": 0.030874537537553617 |
|
}, |
|
"community|arabic_mmlu:human_aging|0": { |
|
"acc_norm": 0.48878923766816146, |
|
"acc_norm_stderr": 0.033549366530984746 |
|
}, |
|
"community|arabic_mmlu:human_sexuality|0": { |
|
"acc_norm": 0.5190839694656488, |
|
"acc_norm_stderr": 0.04382094705550989 |
|
}, |
|
"community|arabic_mmlu:international_law|0": { |
|
"acc_norm": 0.6942148760330579, |
|
"acc_norm_stderr": 0.04205953933884122 |
|
}, |
|
"community|arabic_mmlu:jurisprudence|0": { |
|
"acc_norm": 0.5648148148148148, |
|
"acc_norm_stderr": 0.04792898170907061 |
|
}, |
|
"community|arabic_mmlu:logical_fallacies|0": { |
|
"acc_norm": 0.4601226993865031, |
|
"acc_norm_stderr": 0.03915857291436971 |
|
}, |
|
"community|arabic_mmlu:machine_learning|0": { |
|
"acc_norm": 0.32142857142857145, |
|
"acc_norm_stderr": 0.04432804055291519 |
|
}, |
|
"community|arabic_mmlu:management|0": { |
|
"acc_norm": 0.5631067961165048, |
|
"acc_norm_stderr": 0.049111471073657764 |
|
}, |
|
"community|arabic_mmlu:marketing|0": { |
|
"acc_norm": 0.6495726495726496, |
|
"acc_norm_stderr": 0.031256108244218796 |
|
}, |
|
"community|arabic_mmlu:medical_genetics|0": { |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.04960449637488584 |
|
}, |
|
"community|arabic_mmlu:miscellaneous|0": { |
|
"acc_norm": 0.5185185185185185, |
|
"acc_norm_stderr": 0.017867695938429774 |
|
}, |
|
"community|arabic_mmlu:moral_disputes|0": { |
|
"acc_norm": 0.4595375722543353, |
|
"acc_norm_stderr": 0.026830805998952236 |
|
}, |
|
"community|arabic_mmlu:moral_scenarios|0": { |
|
"acc_norm": 0.26145251396648045, |
|
"acc_norm_stderr": 0.014696599650364548 |
|
}, |
|
"community|arabic_mmlu:nutrition|0": { |
|
"acc_norm": 0.5392156862745098, |
|
"acc_norm_stderr": 0.028541722692618874 |
|
}, |
|
"community|arabic_mmlu:philosophy|0": { |
|
"acc_norm": 0.4887459807073955, |
|
"acc_norm_stderr": 0.028390897396863526 |
|
}, |
|
"community|arabic_mmlu:prehistory|0": { |
|
"acc_norm": 0.45987654320987653, |
|
"acc_norm_stderr": 0.027731022753539274 |
|
}, |
|
"community|arabic_mmlu:professional_accounting|0": { |
|
"acc_norm": 0.3191489361702128, |
|
"acc_norm_stderr": 0.027807990141320193 |
|
}, |
|
"community|arabic_mmlu:professional_law|0": { |
|
"acc_norm": 0.3109517601043025, |
|
"acc_norm_stderr": 0.011822252917799193 |
|
}, |
|
"community|arabic_mmlu:professional_medicine|0": { |
|
"acc_norm": 0.23897058823529413, |
|
"acc_norm_stderr": 0.025905280644893006 |
|
}, |
|
"community|arabic_mmlu:professional_psychology|0": { |
|
"acc_norm": 0.369281045751634, |
|
"acc_norm_stderr": 0.01952431674486635 |
|
}, |
|
"community|arabic_mmlu:public_relations|0": { |
|
"acc_norm": 0.41818181818181815, |
|
"acc_norm_stderr": 0.0472457740573157 |
|
}, |
|
"community|arabic_mmlu:security_studies|0": { |
|
"acc_norm": 0.5428571428571428, |
|
"acc_norm_stderr": 0.031891418324213966 |
|
}, |
|
"community|arabic_mmlu:sociology|0": { |
|
"acc_norm": 0.6019900497512438, |
|
"acc_norm_stderr": 0.03461199429040013 |
|
}, |
|
"community|arabic_mmlu:us_foreign_policy|0": { |
|
"acc_norm": 0.7, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"community|arabic_mmlu:virology|0": { |
|
"acc_norm": 0.43373493975903615, |
|
"acc_norm_stderr": 0.03858158940685517 |
|
}, |
|
"community|arabic_mmlu:world_religions|0": { |
|
"acc_norm": 0.4678362573099415, |
|
"acc_norm_stderr": 0.038268824176603704 |
|
}, |
|
"community|arc_challenge_okapi_ar|0": { |
|
"acc_norm": 0.41551724137931034, |
|
"acc_norm_stderr": 0.014475669495063619 |
|
}, |
|
"community|arc_easy_ar|0": { |
|
"acc_norm": 0.4196277495769882, |
|
"acc_norm_stderr": 0.010152045574825986 |
|
}, |
|
"community|boolq_ar|0": { |
|
"acc_norm": 0.6420245398773006, |
|
"acc_norm_stderr": 0.008397698183880534 |
|
}, |
|
"community|copa_ext_ar|0": { |
|
"acc_norm": 0.5555555555555556, |
|
"acc_norm_stderr": 0.05267171812666418 |
|
}, |
|
"community|hellaswag_okapi_ar|0": { |
|
"acc_norm": 0.29974920946461675, |
|
"acc_norm_stderr": 0.004784330349269958 |
|
}, |
|
"community|openbook_qa_ext_ar|0": { |
|
"acc_norm": 0.4505050505050505, |
|
"acc_norm_stderr": 0.022385572727654967 |
|
}, |
|
"community|piqa_ar|0": { |
|
"acc_norm": 0.6066557555919257, |
|
"acc_norm_stderr": 0.011412868842576702 |
|
}, |
|
"community|race_ar|0": { |
|
"acc_norm": 0.44816392777439645, |
|
"acc_norm_stderr": 0.007084156424271756 |
|
}, |
|
"community|sciq_ar|0": { |
|
"acc_norm": 0.628140703517588, |
|
"acc_norm_stderr": 0.015329380271198706 |
|
}, |
|
"community|toxigen_ar|0": { |
|
"acc_norm": 0.4320855614973262, |
|
"acc_norm_stderr": 0.01620887578524445 |
|
}, |
|
"lighteval|xstory_cloze:ar|0": { |
|
"acc": 0.6379880873593646, |
|
"acc_stderr": 0.012367423769456432 |
|
}, |
|
"community|acva:_average|0": { |
|
"acc_norm": 0.4240889761247024, |
|
"acc_norm_stderr": 0.04658680065842195 |
|
}, |
|
"community|alghafa:_average|0": { |
|
"acc_norm": 0.4927215232991942, |
|
"acc_norm_stderr": 0.022723887486840906 |
|
}, |
|
"community|arabic_mmlu:_average|0": { |
|
"acc_norm": 0.4231920806823339, |
|
"acc_norm_stderr": 0.03606406891570439 |
|
}, |
|
"all": { |
|
"acc_norm": 0.43314331130927597, |
|
"acc_norm_stderr": 0.038121784097655, |
|
"acc": 0.6379880873593646, |
|
"acc_stderr": 0.012367423769456432 |
|
} |
|
}, |
|
"versions": { |
|
"community|acva:Algeria|0": 0, |
|
"community|acva:Ancient_Egypt|0": 0, |
|
"community|acva:Arab_Empire|0": 0, |
|
"community|acva:Arabic_Architecture|0": 0, |
|
"community|acva:Arabic_Art|0": 0, |
|
"community|acva:Arabic_Astronomy|0": 0, |
|
"community|acva:Arabic_Calligraphy|0": 0, |
|
"community|acva:Arabic_Ceremony|0": 0, |
|
"community|acva:Arabic_Clothing|0": 0, |
|
"community|acva:Arabic_Culture|0": 0, |
|
"community|acva:Arabic_Food|0": 0, |
|
"community|acva:Arabic_Funeral|0": 0, |
|
"community|acva:Arabic_Geography|0": 0, |
|
"community|acva:Arabic_History|0": 0, |
|
"community|acva:Arabic_Language_Origin|0": 0, |
|
"community|acva:Arabic_Literature|0": 0, |
|
"community|acva:Arabic_Math|0": 0, |
|
"community|acva:Arabic_Medicine|0": 0, |
|
"community|acva:Arabic_Music|0": 0, |
|
"community|acva:Arabic_Ornament|0": 0, |
|
"community|acva:Arabic_Philosophy|0": 0, |
|
"community|acva:Arabic_Physics_and_Chemistry|0": 0, |
|
"community|acva:Arabic_Wedding|0": 0, |
|
"community|acva:Bahrain|0": 0, |
|
"community|acva:Comoros|0": 0, |
|
"community|acva:Egypt_modern|0": 0, |
|
"community|acva:InfluenceFromAncientEgypt|0": 0, |
|
"community|acva:InfluenceFromByzantium|0": 0, |
|
"community|acva:InfluenceFromChina|0": 0, |
|
"community|acva:InfluenceFromGreece|0": 0, |
|
"community|acva:InfluenceFromIslam|0": 0, |
|
"community|acva:InfluenceFromPersia|0": 0, |
|
"community|acva:InfluenceFromRome|0": 0, |
|
"community|acva:Iraq|0": 0, |
|
"community|acva:Islam_Education|0": 0, |
|
"community|acva:Islam_branches_and_schools|0": 0, |
|
"community|acva:Islamic_law_system|0": 0, |
|
"community|acva:Jordan|0": 0, |
|
"community|acva:Kuwait|0": 0, |
|
"community|acva:Lebanon|0": 0, |
|
"community|acva:Libya|0": 0, |
|
"community|acva:Mauritania|0": 0, |
|
"community|acva:Mesopotamia_civilization|0": 0, |
|
"community|acva:Morocco|0": 0, |
|
"community|acva:Oman|0": 0, |
|
"community|acva:Palestine|0": 0, |
|
"community|acva:Qatar|0": 0, |
|
"community|acva:Saudi_Arabia|0": 0, |
|
"community|acva:Somalia|0": 0, |
|
"community|acva:Sudan|0": 0, |
|
"community|acva:Syria|0": 0, |
|
"community|acva:Tunisia|0": 0, |
|
"community|acva:United_Arab_Emirates|0": 0, |
|
"community|acva:Yemen|0": 0, |
|
"community|acva:communication|0": 0, |
|
"community|acva:computer_and_phone|0": 0, |
|
"community|acva:daily_life|0": 0, |
|
"community|acva:entertainment|0": 0, |
|
"community|alghafa:mcq_exams_test_ar|0": 0, |
|
"community|alghafa:meta_ar_dialects|0": 0, |
|
"community|alghafa:meta_ar_msa|0": 0, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0, |
|
"community|alghafa:multiple_choice_sentiment_task|0": 0, |
|
"community|arabic_exams|0": 0, |
|
"community|arabic_mmlu:abstract_algebra|0": 0, |
|
"community|arabic_mmlu:anatomy|0": 0, |
|
"community|arabic_mmlu:astronomy|0": 0, |
|
"community|arabic_mmlu:business_ethics|0": 0, |
|
"community|arabic_mmlu:clinical_knowledge|0": 0, |
|
"community|arabic_mmlu:college_biology|0": 0, |
|
"community|arabic_mmlu:college_chemistry|0": 0, |
|
"community|arabic_mmlu:college_computer_science|0": 0, |
|
"community|arabic_mmlu:college_mathematics|0": 0, |
|
"community|arabic_mmlu:college_medicine|0": 0, |
|
"community|arabic_mmlu:college_physics|0": 0, |
|
"community|arabic_mmlu:computer_security|0": 0, |
|
"community|arabic_mmlu:conceptual_physics|0": 0, |
|
"community|arabic_mmlu:econometrics|0": 0, |
|
"community|arabic_mmlu:electrical_engineering|0": 0, |
|
"community|arabic_mmlu:elementary_mathematics|0": 0, |
|
"community|arabic_mmlu:formal_logic|0": 0, |
|
"community|arabic_mmlu:global_facts|0": 0, |
|
"community|arabic_mmlu:high_school_biology|0": 0, |
|
"community|arabic_mmlu:high_school_chemistry|0": 0, |
|
"community|arabic_mmlu:high_school_computer_science|0": 0, |
|
"community|arabic_mmlu:high_school_european_history|0": 0, |
|
"community|arabic_mmlu:high_school_geography|0": 0, |
|
"community|arabic_mmlu:high_school_government_and_politics|0": 0, |
|
"community|arabic_mmlu:high_school_macroeconomics|0": 0, |
|
"community|arabic_mmlu:high_school_mathematics|0": 0, |
|
"community|arabic_mmlu:high_school_microeconomics|0": 0, |
|
"community|arabic_mmlu:high_school_physics|0": 0, |
|
"community|arabic_mmlu:high_school_psychology|0": 0, |
|
"community|arabic_mmlu:high_school_statistics|0": 0, |
|
"community|arabic_mmlu:high_school_us_history|0": 0, |
|
"community|arabic_mmlu:high_school_world_history|0": 0, |
|
"community|arabic_mmlu:human_aging|0": 0, |
|
"community|arabic_mmlu:human_sexuality|0": 0, |
|
"community|arabic_mmlu:international_law|0": 0, |
|
"community|arabic_mmlu:jurisprudence|0": 0, |
|
"community|arabic_mmlu:logical_fallacies|0": 0, |
|
"community|arabic_mmlu:machine_learning|0": 0, |
|
"community|arabic_mmlu:management|0": 0, |
|
"community|arabic_mmlu:marketing|0": 0, |
|
"community|arabic_mmlu:medical_genetics|0": 0, |
|
"community|arabic_mmlu:miscellaneous|0": 0, |
|
"community|arabic_mmlu:moral_disputes|0": 0, |
|
"community|arabic_mmlu:moral_scenarios|0": 0, |
|
"community|arabic_mmlu:nutrition|0": 0, |
|
"community|arabic_mmlu:philosophy|0": 0, |
|
"community|arabic_mmlu:prehistory|0": 0, |
|
"community|arabic_mmlu:professional_accounting|0": 0, |
|
"community|arabic_mmlu:professional_law|0": 0, |
|
"community|arabic_mmlu:professional_medicine|0": 0, |
|
"community|arabic_mmlu:professional_psychology|0": 0, |
|
"community|arabic_mmlu:public_relations|0": 0, |
|
"community|arabic_mmlu:security_studies|0": 0, |
|
"community|arabic_mmlu:sociology|0": 0, |
|
"community|arabic_mmlu:us_foreign_policy|0": 0, |
|
"community|arabic_mmlu:virology|0": 0, |
|
"community|arabic_mmlu:world_religions|0": 0, |
|
"community|arc_challenge_okapi_ar|0": 0, |
|
"community|arc_easy_ar|0": 0, |
|
"community|boolq_ar|0": 0, |
|
"community|copa_ext_ar|0": 0, |
|
"community|hellaswag_okapi_ar|0": 0, |
|
"community|openbook_qa_ext_ar|0": 0, |
|
"community|piqa_ar|0": 0, |
|
"community|race_ar|0": 0, |
|
"community|sciq_ar|0": 0, |
|
"community|toxigen_ar|0": 0, |
|
"lighteval|xstory_cloze:ar|0": 0 |
|
}, |
|
"config_tasks": { |
|
"community|acva:Algeria": { |
|
"name": "acva:Algeria", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Algeria", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Ancient_Egypt": { |
|
"name": "acva:Ancient_Egypt", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Ancient_Egypt", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 315, |
|
"effective_num_docs": 315, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arab_Empire": { |
|
"name": "acva:Arab_Empire", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arab_Empire", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 265, |
|
"effective_num_docs": 265, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Architecture": { |
|
"name": "acva:Arabic_Architecture", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Architecture", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Art": { |
|
"name": "acva:Arabic_Art", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Art", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Astronomy": { |
|
"name": "acva:Arabic_Astronomy", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Astronomy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Calligraphy": { |
|
"name": "acva:Arabic_Calligraphy", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Calligraphy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 255, |
|
"effective_num_docs": 255, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Ceremony": { |
|
"name": "acva:Arabic_Ceremony", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Ceremony", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 185, |
|
"effective_num_docs": 185, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Clothing": { |
|
"name": "acva:Arabic_Clothing", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Clothing", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Culture": { |
|
"name": "acva:Arabic_Culture", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Culture", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Food": { |
|
"name": "acva:Arabic_Food", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Food", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Funeral": { |
|
"name": "acva:Arabic_Funeral", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Funeral", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 95, |
|
"effective_num_docs": 95, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Geography": { |
|
"name": "acva:Arabic_Geography", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Geography", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_History": { |
|
"name": "acva:Arabic_History", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_History", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Language_Origin": { |
|
"name": "acva:Arabic_Language_Origin", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Language_Origin", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 95, |
|
"effective_num_docs": 95, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Literature": { |
|
"name": "acva:Arabic_Literature", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Literature", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Math": { |
|
"name": "acva:Arabic_Math", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Math", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Medicine": { |
|
"name": "acva:Arabic_Medicine", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Medicine", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Music": { |
|
"name": "acva:Arabic_Music", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Music", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 139, |
|
"effective_num_docs": 139, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Ornament": { |
|
"name": "acva:Arabic_Ornament", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Ornament", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Philosophy": { |
|
"name": "acva:Arabic_Philosophy", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Philosophy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Physics_and_Chemistry": { |
|
"name": "acva:Arabic_Physics_and_Chemistry", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Physics_and_Chemistry", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Wedding": { |
|
"name": "acva:Arabic_Wedding", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Wedding", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Bahrain": { |
|
"name": "acva:Bahrain", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Bahrain", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Comoros": { |
|
"name": "acva:Comoros", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Comoros", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Egypt_modern": { |
|
"name": "acva:Egypt_modern", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Egypt_modern", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 95, |
|
"effective_num_docs": 95, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromAncientEgypt": { |
|
"name": "acva:InfluenceFromAncientEgypt", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromAncientEgypt", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromByzantium": { |
|
"name": "acva:InfluenceFromByzantium", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromByzantium", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromChina": { |
|
"name": "acva:InfluenceFromChina", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromChina", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromGreece": { |
|
"name": "acva:InfluenceFromGreece", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromGreece", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromIslam": { |
|
"name": "acva:InfluenceFromIslam", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromIslam", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromPersia": { |
|
"name": "acva:InfluenceFromPersia", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromPersia", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 175, |
|
"effective_num_docs": 175, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromRome": { |
|
"name": "acva:InfluenceFromRome", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromRome", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Iraq": { |
|
"name": "acva:Iraq", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Iraq", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 85, |
|
"effective_num_docs": 85, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Islam_Education": { |
|
"name": "acva:Islam_Education", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Islam_Education", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Islam_branches_and_schools": { |
|
"name": "acva:Islam_branches_and_schools", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Islam_branches_and_schools", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 175, |
|
"effective_num_docs": 175, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Islamic_law_system": { |
|
"name": "acva:Islamic_law_system", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Islamic_law_system", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Jordan": { |
|
"name": "acva:Jordan", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Jordan", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Kuwait": { |
|
"name": "acva:Kuwait", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Kuwait", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Lebanon": { |
|
"name": "acva:Lebanon", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Lebanon", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Libya": { |
|
"name": "acva:Libya", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Libya", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Mauritania": { |
|
"name": "acva:Mauritania", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Mauritania", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Mesopotamia_civilization": { |
|
"name": "acva:Mesopotamia_civilization", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Mesopotamia_civilization", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 155, |
|
"effective_num_docs": 155, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Morocco": { |
|
"name": "acva:Morocco", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Morocco", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Oman": { |
|
"name": "acva:Oman", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Oman", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Palestine": { |
|
"name": "acva:Palestine", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Palestine", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 85, |
|
"effective_num_docs": 85, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Qatar": { |
|
"name": "acva:Qatar", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Qatar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Saudi_Arabia": { |
|
"name": "acva:Saudi_Arabia", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Saudi_Arabia", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Somalia": { |
|
"name": "acva:Somalia", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Somalia", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Sudan": { |
|
"name": "acva:Sudan", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Sudan", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Syria": { |
|
"name": "acva:Syria", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Syria", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Tunisia": { |
|
"name": "acva:Tunisia", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Tunisia", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:United_Arab_Emirates": { |
|
"name": "acva:United_Arab_Emirates", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "United_Arab_Emirates", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 85, |
|
"effective_num_docs": 85, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Yemen": { |
|
"name": "acva:Yemen", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Yemen", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 10, |
|
"effective_num_docs": 10, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:communication": { |
|
"name": "acva:communication", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "communication", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 364, |
|
"effective_num_docs": 364, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:computer_and_phone": { |
|
"name": "acva:computer_and_phone", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "computer_and_phone", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 295, |
|
"effective_num_docs": 295, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:daily_life": { |
|
"name": "acva:daily_life", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "daily_life", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 337, |
|
"effective_num_docs": 337, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:entertainment": { |
|
"name": "acva:entertainment", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "entertainment", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 295, |
|
"effective_num_docs": 295, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:mcq_exams_test_ar": { |
|
"name": "alghafa:mcq_exams_test_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "mcq_exams_test_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 557, |
|
"effective_num_docs": 557, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects": { |
|
"name": "alghafa:meta_ar_dialects", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_dialects", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5395, |
|
"effective_num_docs": 5395, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa": { |
|
"name": "alghafa:meta_ar_msa", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_msa", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": { |
|
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_facts_truefalse_balanced_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 75, |
|
"effective_num_docs": 75, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_soqal_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_soqal_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 7995, |
|
"effective_num_docs": 7995, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5995, |
|
"effective_num_docs": 5995, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task": { |
|
"name": "alghafa:multiple_choice_sentiment_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_sentiment_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1720, |
|
"effective_num_docs": 1720, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_exams": { |
|
"name": "arabic_exams", |
|
"prompt_function": "arabic_exams", |
|
"hf_repo": "OALL/Arabic_EXAMS", |
|
"hf_subset": "default", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 537, |
|
"effective_num_docs": 537, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:abstract_algebra": { |
|
"name": "arabic_mmlu:abstract_algebra", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "abstract_algebra", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:anatomy": { |
|
"name": "arabic_mmlu:anatomy", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "anatomy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 135, |
|
"effective_num_docs": 135, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:astronomy": { |
|
"name": "arabic_mmlu:astronomy", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "astronomy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 152, |
|
"effective_num_docs": 152, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:business_ethics": { |
|
"name": "arabic_mmlu:business_ethics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "business_ethics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:clinical_knowledge": { |
|
"name": "arabic_mmlu:clinical_knowledge", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "clinical_knowledge", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 265, |
|
"effective_num_docs": 265, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_biology": { |
|
"name": "arabic_mmlu:college_biology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_biology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 144, |
|
"effective_num_docs": 144, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_chemistry": { |
|
"name": "arabic_mmlu:college_chemistry", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_chemistry", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_computer_science": { |
|
"name": "arabic_mmlu:college_computer_science", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_computer_science", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_mathematics": { |
|
"name": "arabic_mmlu:college_mathematics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_medicine": { |
|
"name": "arabic_mmlu:college_medicine", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_medicine", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 173, |
|
"effective_num_docs": 173, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_physics": { |
|
"name": "arabic_mmlu:college_physics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_physics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 102, |
|
"effective_num_docs": 102, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:computer_security": { |
|
"name": "arabic_mmlu:computer_security", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "computer_security", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:conceptual_physics": { |
|
"name": "arabic_mmlu:conceptual_physics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "conceptual_physics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 235, |
|
"effective_num_docs": 235, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:econometrics": { |
|
"name": "arabic_mmlu:econometrics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "econometrics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 114, |
|
"effective_num_docs": 114, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:electrical_engineering": { |
|
"name": "arabic_mmlu:electrical_engineering", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "electrical_engineering", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:elementary_mathematics": { |
|
"name": "arabic_mmlu:elementary_mathematics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "elementary_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 378, |
|
"effective_num_docs": 378, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:formal_logic": { |
|
"name": "arabic_mmlu:formal_logic", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "formal_logic", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 126, |
|
"effective_num_docs": 126, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:global_facts": { |
|
"name": "arabic_mmlu:global_facts", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "global_facts", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_biology": { |
|
"name": "arabic_mmlu:high_school_biology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_biology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 310, |
|
"effective_num_docs": 310, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_chemistry": { |
|
"name": "arabic_mmlu:high_school_chemistry", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_chemistry", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 203, |
|
"effective_num_docs": 203, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_computer_science": { |
|
"name": "arabic_mmlu:high_school_computer_science", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_computer_science", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_european_history": { |
|
"name": "arabic_mmlu:high_school_european_history", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_european_history", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 165, |
|
"effective_num_docs": 165, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_geography": { |
|
"name": "arabic_mmlu:high_school_geography", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_geography", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 198, |
|
"effective_num_docs": 198, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_government_and_politics": { |
|
"name": "arabic_mmlu:high_school_government_and_politics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_government_and_politics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 193, |
|
"effective_num_docs": 193, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_macroeconomics": { |
|
"name": "arabic_mmlu:high_school_macroeconomics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_macroeconomics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 390, |
|
"effective_num_docs": 390, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_mathematics": { |
|
"name": "arabic_mmlu:high_school_mathematics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 270, |
|
"effective_num_docs": 270, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_microeconomics": { |
|
"name": "arabic_mmlu:high_school_microeconomics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_microeconomics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 238, |
|
"effective_num_docs": 238, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_physics": { |
|
"name": "arabic_mmlu:high_school_physics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_physics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 151, |
|
"effective_num_docs": 151, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_psychology": { |
|
"name": "arabic_mmlu:high_school_psychology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_psychology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 545, |
|
"effective_num_docs": 545, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_statistics": { |
|
"name": "arabic_mmlu:high_school_statistics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_statistics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 216, |
|
"effective_num_docs": 216, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_us_history": { |
|
"name": "arabic_mmlu:high_school_us_history", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_us_history", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 204, |
|
"effective_num_docs": 204, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_world_history": { |
|
"name": "arabic_mmlu:high_school_world_history", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_world_history", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 237, |
|
"effective_num_docs": 237, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:human_aging": { |
|
"name": "arabic_mmlu:human_aging", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "human_aging", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 223, |
|
"effective_num_docs": 223, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:human_sexuality": { |
|
"name": "arabic_mmlu:human_sexuality", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "human_sexuality", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 131, |
|
"effective_num_docs": 131, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:international_law": { |
|
"name": "arabic_mmlu:international_law", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "international_law", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 121, |
|
"effective_num_docs": 121, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:jurisprudence": { |
|
"name": "arabic_mmlu:jurisprudence", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "jurisprudence", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 108, |
|
"effective_num_docs": 108, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:logical_fallacies": { |
|
"name": "arabic_mmlu:logical_fallacies", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "logical_fallacies", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 163, |
|
"effective_num_docs": 163, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:machine_learning": { |
|
"name": "arabic_mmlu:machine_learning", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "machine_learning", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 112, |
|
"effective_num_docs": 112, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:management": { |
|
"name": "arabic_mmlu:management", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "management", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 103, |
|
"effective_num_docs": 103, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:marketing": { |
|
"name": "arabic_mmlu:marketing", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "marketing", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 234, |
|
"effective_num_docs": 234, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:medical_genetics": { |
|
"name": "arabic_mmlu:medical_genetics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "medical_genetics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:miscellaneous": { |
|
"name": "arabic_mmlu:miscellaneous", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "miscellaneous", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 783, |
|
"effective_num_docs": 783, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:moral_disputes": { |
|
"name": "arabic_mmlu:moral_disputes", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "moral_disputes", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 346, |
|
"effective_num_docs": 346, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:moral_scenarios": { |
|
"name": "arabic_mmlu:moral_scenarios", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "moral_scenarios", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:nutrition": { |
|
"name": "arabic_mmlu:nutrition", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "nutrition", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 306, |
|
"effective_num_docs": 306, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:philosophy": { |
|
"name": "arabic_mmlu:philosophy", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "philosophy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 311, |
|
"effective_num_docs": 311, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:prehistory": { |
|
"name": "arabic_mmlu:prehistory", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "prehistory", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 324, |
|
"effective_num_docs": 324, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:professional_accounting": { |
|
"name": "arabic_mmlu:professional_accounting", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "professional_accounting", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 282, |
|
"effective_num_docs": 282, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:professional_law": { |
|
"name": "arabic_mmlu:professional_law", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "professional_law", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1534, |
|
"effective_num_docs": 1534, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:professional_medicine": { |
|
"name": "arabic_mmlu:professional_medicine", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "professional_medicine", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 272, |
|
"effective_num_docs": 272, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:professional_psychology": { |
|
"name": "arabic_mmlu:professional_psychology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "professional_psychology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 612, |
|
"effective_num_docs": 612, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:public_relations": { |
|
"name": "arabic_mmlu:public_relations", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "public_relations", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 110, |
|
"effective_num_docs": 110, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:security_studies": { |
|
"name": "arabic_mmlu:security_studies", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "security_studies", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 245, |
|
"effective_num_docs": 245, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:sociology": { |
|
"name": "arabic_mmlu:sociology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "sociology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 201, |
|
"effective_num_docs": 201, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:us_foreign_policy": { |
|
"name": "arabic_mmlu:us_foreign_policy", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "us_foreign_policy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:virology": { |
|
"name": "arabic_mmlu:virology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "virology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 166, |
|
"effective_num_docs": 166, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:world_religions": { |
|
"name": "arabic_mmlu:world_religions", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "world_religions", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 171, |
|
"effective_num_docs": 171, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arc_challenge_okapi_ar": { |
|
"name": "arc_challenge_okapi_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "arc_challenge_okapi_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1160, |
|
"effective_num_docs": 1160, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arc_easy_ar": { |
|
"name": "arc_easy_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "arc_easy_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2364, |
|
"effective_num_docs": 2364, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|boolq_ar": { |
|
"name": "boolq_ar", |
|
"prompt_function": "boolq_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "boolq_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 3260, |
|
"effective_num_docs": 3260, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|copa_ext_ar": { |
|
"name": "copa_ext_ar", |
|
"prompt_function": "copa_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "copa_ext_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 90, |
|
"effective_num_docs": 90, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|hellaswag_okapi_ar": { |
|
"name": "hellaswag_okapi_ar", |
|
"prompt_function": "hellaswag_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "hellaswag_okapi_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 9171, |
|
"effective_num_docs": 9171, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|openbook_qa_ext_ar": { |
|
"name": "openbook_qa_ext_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "openbook_qa_ext_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 495, |
|
"effective_num_docs": 495, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|piqa_ar": { |
|
"name": "piqa_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "piqa_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1833, |
|
"effective_num_docs": 1833, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|race_ar": { |
|
"name": "race_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "race_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4929, |
|
"effective_num_docs": 4929, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|sciq_ar": { |
|
"name": "sciq_ar", |
|
"prompt_function": "sciq_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "sciq_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 995, |
|
"effective_num_docs": 995, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|toxigen_ar": { |
|
"name": "toxigen_ar", |
|
"prompt_function": "toxigen_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "toxigen_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 935, |
|
"effective_num_docs": 935, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"lighteval|xstory_cloze:ar": { |
|
"name": "xstory_cloze:ar", |
|
"prompt_function": "storycloze", |
|
"hf_repo": "juletxara/xstory_cloze", |
|
"hf_subset": "ar", |
|
"metric": [ |
|
"loglikelihood_acc" |
|
], |
|
"hf_avail_splits": [ |
|
"training", |
|
"eval" |
|
], |
|
"evaluation_splits": [ |
|
"eval" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": [ |
|
"\n" |
|
], |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"lighteval" |
|
], |
|
"original_num_docs": 1511, |
|
"effective_num_docs": 1511, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
} |
|
}, |
|
"summary_tasks": { |
|
"community|acva:Algeria|0": { |
|
"hashes": { |
|
"hash_examples": "da5a3003cd46f6f9", |
|
"hash_full_prompts": "da5a3003cd46f6f9", |
|
"hash_input_tokens": "7fcc6254c7e0cbbf", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Ancient_Egypt|0": { |
|
"hashes": { |
|
"hash_examples": "52d6f767fede195b", |
|
"hash_full_prompts": "52d6f767fede195b", |
|
"hash_input_tokens": "03108e199882436c", |
|
"hash_cont_tokens": "c1e2b54cf8250f31" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 315, |
|
"padded": 630, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arab_Empire|0": { |
|
"hashes": { |
|
"hash_examples": "8dacff6a79804a75", |
|
"hash_full_prompts": "8dacff6a79804a75", |
|
"hash_input_tokens": "dae63550e645f79b", |
|
"hash_cont_tokens": "a57d793a5ea04c42" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 265, |
|
"padded": 530, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Architecture|0": { |
|
"hashes": { |
|
"hash_examples": "df286cd862d9f6bb", |
|
"hash_full_prompts": "df286cd862d9f6bb", |
|
"hash_input_tokens": "e72b82b55054e570", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Art|0": { |
|
"hashes": { |
|
"hash_examples": "112883d764118a49", |
|
"hash_full_prompts": "112883d764118a49", |
|
"hash_input_tokens": "90943644470c8c02", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Astronomy|0": { |
|
"hashes": { |
|
"hash_examples": "20dcdf2454bf8671", |
|
"hash_full_prompts": "20dcdf2454bf8671", |
|
"hash_input_tokens": "233bd37783eef347", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Calligraphy|0": { |
|
"hashes": { |
|
"hash_examples": "3a9f9d1ebe868a15", |
|
"hash_full_prompts": "3a9f9d1ebe868a15", |
|
"hash_input_tokens": "e2afb66c3924b710", |
|
"hash_cont_tokens": "b6820eb4f3eef8a0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 255, |
|
"padded": 510, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Ceremony|0": { |
|
"hashes": { |
|
"hash_examples": "c927630f8d2f44da", |
|
"hash_full_prompts": "c927630f8d2f44da", |
|
"hash_input_tokens": "6cc69991c70d472e", |
|
"hash_cont_tokens": "4bdb062f9ac7e83c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 185, |
|
"padded": 370, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Clothing|0": { |
|
"hashes": { |
|
"hash_examples": "6ad0740c2ac6ac92", |
|
"hash_full_prompts": "6ad0740c2ac6ac92", |
|
"hash_input_tokens": "be0affc187a4a1e8", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Culture|0": { |
|
"hashes": { |
|
"hash_examples": "2177bd857ad872ae", |
|
"hash_full_prompts": "2177bd857ad872ae", |
|
"hash_input_tokens": "c7bddd0bbcc55e1c", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Food|0": { |
|
"hashes": { |
|
"hash_examples": "a6ada65b71d7c9c5", |
|
"hash_full_prompts": "a6ada65b71d7c9c5", |
|
"hash_input_tokens": "a9ca2fd5ce74ddfd", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Funeral|0": { |
|
"hashes": { |
|
"hash_examples": "fcee39dc29eaae91", |
|
"hash_full_prompts": "fcee39dc29eaae91", |
|
"hash_input_tokens": "13b5f66fe98f3575", |
|
"hash_cont_tokens": "d00f5e9bb7608898" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 95, |
|
"padded": 190, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Geography|0": { |
|
"hashes": { |
|
"hash_examples": "d36eda7c89231c02", |
|
"hash_full_prompts": "d36eda7c89231c02", |
|
"hash_input_tokens": "47288cdb54766454", |
|
"hash_cont_tokens": "fe3a24e435a5cdd7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_History|0": { |
|
"hashes": { |
|
"hash_examples": "6354ac0d6db6a5fc", |
|
"hash_full_prompts": "6354ac0d6db6a5fc", |
|
"hash_input_tokens": "bc1e0af52fc9c876", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Language_Origin|0": { |
|
"hashes": { |
|
"hash_examples": "ddc967c8aca34402", |
|
"hash_full_prompts": "ddc967c8aca34402", |
|
"hash_input_tokens": "cee5d4d18418b7a7", |
|
"hash_cont_tokens": "d00f5e9bb7608898" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 95, |
|
"padded": 190, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Literature|0": { |
|
"hashes": { |
|
"hash_examples": "4305379fd46be5d8", |
|
"hash_full_prompts": "4305379fd46be5d8", |
|
"hash_input_tokens": "ce96f3933ce59133", |
|
"hash_cont_tokens": "fe3a24e435a5cdd7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Math|0": { |
|
"hashes": { |
|
"hash_examples": "dec621144f4d28be", |
|
"hash_full_prompts": "dec621144f4d28be", |
|
"hash_input_tokens": "7f13e1e77b013cbc", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Medicine|0": { |
|
"hashes": { |
|
"hash_examples": "2b344cdae9495ff2", |
|
"hash_full_prompts": "2b344cdae9495ff2", |
|
"hash_input_tokens": "024969eff295a9fd", |
|
"hash_cont_tokens": "fe3a24e435a5cdd7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Music|0": { |
|
"hashes": { |
|
"hash_examples": "0c54624d881944ce", |
|
"hash_full_prompts": "0c54624d881944ce", |
|
"hash_input_tokens": "b8362e0cb4b56edb", |
|
"hash_cont_tokens": "4b866375ab9b5507" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 139, |
|
"padded": 278, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Ornament|0": { |
|
"hashes": { |
|
"hash_examples": "251a4a84289d8bc1", |
|
"hash_full_prompts": "251a4a84289d8bc1", |
|
"hash_input_tokens": "f4eff94eef35db99", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Philosophy|0": { |
|
"hashes": { |
|
"hash_examples": "3f86fb9c94c13d22", |
|
"hash_full_prompts": "3f86fb9c94c13d22", |
|
"hash_input_tokens": "1f60efcd15a439e6", |
|
"hash_cont_tokens": "fe3a24e435a5cdd7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Physics_and_Chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "8fec65af3695b62a", |
|
"hash_full_prompts": "8fec65af3695b62a", |
|
"hash_input_tokens": "512646bfbf2db61a", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Wedding|0": { |
|
"hashes": { |
|
"hash_examples": "9cc3477184d7a4b8", |
|
"hash_full_prompts": "9cc3477184d7a4b8", |
|
"hash_input_tokens": "e0b098a0e4514e6a", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Bahrain|0": { |
|
"hashes": { |
|
"hash_examples": "c92e803a0fa8b9e2", |
|
"hash_full_prompts": "c92e803a0fa8b9e2", |
|
"hash_input_tokens": "c78d7d8930391dee", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Comoros|0": { |
|
"hashes": { |
|
"hash_examples": "06e5d4bba8e54cae", |
|
"hash_full_prompts": "06e5d4bba8e54cae", |
|
"hash_input_tokens": "b22c0de56699769e", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Egypt_modern|0": { |
|
"hashes": { |
|
"hash_examples": "c6ec369164f93446", |
|
"hash_full_prompts": "c6ec369164f93446", |
|
"hash_input_tokens": "0ab15d0c92e39b4c", |
|
"hash_cont_tokens": "d00f5e9bb7608898" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 95, |
|
"padded": 190, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromAncientEgypt|0": { |
|
"hashes": { |
|
"hash_examples": "b9d56d74818b9bd4", |
|
"hash_full_prompts": "b9d56d74818b9bd4", |
|
"hash_input_tokens": "6229c4202390291e", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromByzantium|0": { |
|
"hashes": { |
|
"hash_examples": "5316c9624e7e59b8", |
|
"hash_full_prompts": "5316c9624e7e59b8", |
|
"hash_input_tokens": "46d7a1cd492b6853", |
|
"hash_cont_tokens": "fe3a24e435a5cdd7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromChina|0": { |
|
"hashes": { |
|
"hash_examples": "87894bce95a56411", |
|
"hash_full_prompts": "87894bce95a56411", |
|
"hash_input_tokens": "84773df4f094bba4", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromGreece|0": { |
|
"hashes": { |
|
"hash_examples": "0baa78a27e469312", |
|
"hash_full_prompts": "0baa78a27e469312", |
|
"hash_input_tokens": "d0fb24068dec54e4", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromIslam|0": { |
|
"hashes": { |
|
"hash_examples": "0c2532cde6541ff2", |
|
"hash_full_prompts": "0c2532cde6541ff2", |
|
"hash_input_tokens": "909d15422101a00c", |
|
"hash_cont_tokens": "fe3a24e435a5cdd7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromPersia|0": { |
|
"hashes": { |
|
"hash_examples": "efcd8112dc53c6e5", |
|
"hash_full_prompts": "efcd8112dc53c6e5", |
|
"hash_input_tokens": "b1108e7e8063f4c4", |
|
"hash_cont_tokens": "919736d4992ad983" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 175, |
|
"padded": 350, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromRome|0": { |
|
"hashes": { |
|
"hash_examples": "9db61480e2e85fd3", |
|
"hash_full_prompts": "9db61480e2e85fd3", |
|
"hash_input_tokens": "5869b404d6f50a41", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Iraq|0": { |
|
"hashes": { |
|
"hash_examples": "96dac3dfa8d2f41f", |
|
"hash_full_prompts": "96dac3dfa8d2f41f", |
|
"hash_input_tokens": "401a91f732e3fe1b", |
|
"hash_cont_tokens": "13c8aae5240b62db" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 85, |
|
"padded": 170, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Islam_Education|0": { |
|
"hashes": { |
|
"hash_examples": "0d80355f6a4cb51b", |
|
"hash_full_prompts": "0d80355f6a4cb51b", |
|
"hash_input_tokens": "2e98573a0f935911", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Islam_branches_and_schools|0": { |
|
"hashes": { |
|
"hash_examples": "5cedce1be2c3ad50", |
|
"hash_full_prompts": "5cedce1be2c3ad50", |
|
"hash_input_tokens": "cf0f135115422477", |
|
"hash_cont_tokens": "919736d4992ad983" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 175, |
|
"padded": 350, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Islamic_law_system|0": { |
|
"hashes": { |
|
"hash_examples": "c0e6db8bc84e105e", |
|
"hash_full_prompts": "c0e6db8bc84e105e", |
|
"hash_input_tokens": "aa632a3ac7629f47", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Jordan|0": { |
|
"hashes": { |
|
"hash_examples": "33deb5b4e5ddd6a1", |
|
"hash_full_prompts": "33deb5b4e5ddd6a1", |
|
"hash_input_tokens": "a24d62298cea38d6", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Kuwait|0": { |
|
"hashes": { |
|
"hash_examples": "eb41773346d7c46c", |
|
"hash_full_prompts": "eb41773346d7c46c", |
|
"hash_input_tokens": "a3ef3ee37280c6dd", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Lebanon|0": { |
|
"hashes": { |
|
"hash_examples": "25932dbf4c13d34f", |
|
"hash_full_prompts": "25932dbf4c13d34f", |
|
"hash_input_tokens": "f010434995af78b8", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Libya|0": { |
|
"hashes": { |
|
"hash_examples": "f2c4db63cd402926", |
|
"hash_full_prompts": "f2c4db63cd402926", |
|
"hash_input_tokens": "044d79c3404bc5a4", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Mauritania|0": { |
|
"hashes": { |
|
"hash_examples": "8723ab5fdf286b54", |
|
"hash_full_prompts": "8723ab5fdf286b54", |
|
"hash_input_tokens": "8388ef724cfe06d8", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Mesopotamia_civilization|0": { |
|
"hashes": { |
|
"hash_examples": "c33f5502a6130ca9", |
|
"hash_full_prompts": "c33f5502a6130ca9", |
|
"hash_input_tokens": "bbaefb964b71df11", |
|
"hash_cont_tokens": "e00b82159a687ad7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 155, |
|
"padded": 310, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Morocco|0": { |
|
"hashes": { |
|
"hash_examples": "588a5ed27904b1ae", |
|
"hash_full_prompts": "588a5ed27904b1ae", |
|
"hash_input_tokens": "22604753ffa996d7", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Oman|0": { |
|
"hashes": { |
|
"hash_examples": "d447c52b94248b69", |
|
"hash_full_prompts": "d447c52b94248b69", |
|
"hash_input_tokens": "a44be99cf0e66d89", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Palestine|0": { |
|
"hashes": { |
|
"hash_examples": "19197e076ad14ff5", |
|
"hash_full_prompts": "19197e076ad14ff5", |
|
"hash_input_tokens": "3388028624388ab1", |
|
"hash_cont_tokens": "13c8aae5240b62db" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 85, |
|
"padded": 170, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Qatar|0": { |
|
"hashes": { |
|
"hash_examples": "cf0736fa185b28f6", |
|
"hash_full_prompts": "cf0736fa185b28f6", |
|
"hash_input_tokens": "a94acec88e9a873c", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Saudi_Arabia|0": { |
|
"hashes": { |
|
"hash_examples": "69beda6e1b85a08d", |
|
"hash_full_prompts": "69beda6e1b85a08d", |
|
"hash_input_tokens": "17c0fbfc47085680", |
|
"hash_cont_tokens": "56dfe27ee01362a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Somalia|0": { |
|
"hashes": { |
|
"hash_examples": "b387940c65784fbf", |
|
"hash_full_prompts": "b387940c65784fbf", |
|
"hash_input_tokens": "f9400af2d2d430ce", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Sudan|0": { |
|
"hashes": { |
|
"hash_examples": "e02c32b9d2dd0c3f", |
|
"hash_full_prompts": "e02c32b9d2dd0c3f", |
|
"hash_input_tokens": "e7ab3be1e0eaea16", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Syria|0": { |
|
"hashes": { |
|
"hash_examples": "60a6f8fe73bda4bb", |
|
"hash_full_prompts": "60a6f8fe73bda4bb", |
|
"hash_input_tokens": "f313a08053168152", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Tunisia|0": { |
|
"hashes": { |
|
"hash_examples": "34bb15d3830c5649", |
|
"hash_full_prompts": "34bb15d3830c5649", |
|
"hash_input_tokens": "49d828ef8ec0d99c", |
|
"hash_cont_tokens": "f2c5b8cf6c0e0976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:United_Arab_Emirates|0": { |
|
"hashes": { |
|
"hash_examples": "98a0ba78172718ce", |
|
"hash_full_prompts": "98a0ba78172718ce", |
|
"hash_input_tokens": "4a4712a25491fc8e", |
|
"hash_cont_tokens": "13c8aae5240b62db" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 85, |
|
"padded": 170, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Yemen|0": { |
|
"hashes": { |
|
"hash_examples": "18e9bcccbb4ced7a", |
|
"hash_full_prompts": "18e9bcccbb4ced7a", |
|
"hash_input_tokens": "e619d0da72e18a26", |
|
"hash_cont_tokens": "12d6d46b075f79eb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 10, |
|
"padded": 20, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:communication|0": { |
|
"hashes": { |
|
"hash_examples": "9ff28ab5eab5c97b", |
|
"hash_full_prompts": "9ff28ab5eab5c97b", |
|
"hash_input_tokens": "b18e7ff2df94e8c1", |
|
"hash_cont_tokens": "b43f6e0ab3067882" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 364, |
|
"padded": 728, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:computer_and_phone|0": { |
|
"hashes": { |
|
"hash_examples": "37bac2f086aaf6c2", |
|
"hash_full_prompts": "37bac2f086aaf6c2", |
|
"hash_input_tokens": "0e867a0a24b61983", |
|
"hash_cont_tokens": "b704dcf1720c5755" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 295, |
|
"padded": 590, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:daily_life|0": { |
|
"hashes": { |
|
"hash_examples": "bf07363c1c252e2f", |
|
"hash_full_prompts": "bf07363c1c252e2f", |
|
"hash_input_tokens": "8ec19b91bc955a97", |
|
"hash_cont_tokens": "7308a3845f72e43e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 337, |
|
"padded": 674, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:entertainment|0": { |
|
"hashes": { |
|
"hash_examples": "37077bc00f0ac56a", |
|
"hash_full_prompts": "37077bc00f0ac56a", |
|
"hash_input_tokens": "f2d7cbfa92d849bc", |
|
"hash_cont_tokens": "b704dcf1720c5755" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 295, |
|
"padded": 590, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"hashes": { |
|
"hash_examples": "c07a5e78c5c0b8fe", |
|
"hash_full_prompts": "c07a5e78c5c0b8fe", |
|
"hash_input_tokens": "e770f6e9af69007e", |
|
"hash_cont_tokens": "3a646a29874382f3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 557, |
|
"padded": 2228, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"hashes": { |
|
"hash_examples": "c0b6081f83e14064", |
|
"hash_full_prompts": "c0b6081f83e14064", |
|
"hash_input_tokens": "9d457c76cccb6fb5", |
|
"hash_cont_tokens": "81ba60c5b355d0a3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5395, |
|
"padded": 21580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"hashes": { |
|
"hash_examples": "64eb78a7c5b7484b", |
|
"hash_full_prompts": "64eb78a7c5b7484b", |
|
"hash_input_tokens": "9cbac34eb3ffdc1e", |
|
"hash_cont_tokens": "8c2a1519bf6bfb42" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"hashes": { |
|
"hash_examples": "54fc3502c1c02c06", |
|
"hash_full_prompts": "54fc3502c1c02c06", |
|
"hash_input_tokens": "663ea71784e2e0f7", |
|
"hash_cont_tokens": "b82b619647644015" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 75, |
|
"padded": 150, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"hashes": { |
|
"hash_examples": "46572d83696552ae", |
|
"hash_full_prompts": "46572d83696552ae", |
|
"hash_input_tokens": "60add19a1b34a118", |
|
"hash_cont_tokens": "60c21d49af597e5a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 750, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"hashes": { |
|
"hash_examples": "f430d97ff715bc1c", |
|
"hash_full_prompts": "f430d97ff715bc1c", |
|
"hash_input_tokens": "1ddaa075e1eb4d5f", |
|
"hash_cont_tokens": "203ee8614d91fd35" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 750, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"hashes": { |
|
"hash_examples": "6b70a7416584f98c", |
|
"hash_full_prompts": "6b70a7416584f98c", |
|
"hash_input_tokens": "581cd0d418989ed8", |
|
"hash_cont_tokens": "de5b69881e081318" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 7995, |
|
"padded": 15990, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "bc2005cc9d2f436e", |
|
"hash_full_prompts": "bc2005cc9d2f436e", |
|
"hash_input_tokens": "0b9f900f650226cb", |
|
"hash_cont_tokens": "32a650fcf067a32b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5995, |
|
"padded": 17985, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "6fb0e254ea5945d8", |
|
"hash_full_prompts": "6fb0e254ea5945d8", |
|
"hash_input_tokens": "c73b462ba1747899", |
|
"hash_cont_tokens": "cd4fbcc7c800da80" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1720, |
|
"padded": 5160, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_exams|0": { |
|
"hashes": { |
|
"hash_examples": "6d721df351722656", |
|
"hash_full_prompts": "6d721df351722656", |
|
"hash_input_tokens": "b43a3ee29d77e94f", |
|
"hash_cont_tokens": "3952fd8478cb2901" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 537, |
|
"padded": 2148, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:abstract_algebra|0": { |
|
"hashes": { |
|
"hash_examples": "f2ddca8f45c0a511", |
|
"hash_full_prompts": "f2ddca8f45c0a511", |
|
"hash_input_tokens": "72c50248541c75f1", |
|
"hash_cont_tokens": "771d84ba6655ec08" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:anatomy|0": { |
|
"hashes": { |
|
"hash_examples": "dfdbc1b83107668d", |
|
"hash_full_prompts": "dfdbc1b83107668d", |
|
"hash_input_tokens": "7ae9507815209099", |
|
"hash_cont_tokens": "3b3a04ac2381cf2e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 135, |
|
"padded": 540, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:astronomy|0": { |
|
"hashes": { |
|
"hash_examples": "9736a606002a848e", |
|
"hash_full_prompts": "9736a606002a848e", |
|
"hash_input_tokens": "7e58b365854d8d0b", |
|
"hash_cont_tokens": "c4e209dd858f1eb5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 152, |
|
"padded": 608, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:business_ethics|0": { |
|
"hashes": { |
|
"hash_examples": "735e452fbb6dc63d", |
|
"hash_full_prompts": "735e452fbb6dc63d", |
|
"hash_input_tokens": "2506176645d94ab6", |
|
"hash_cont_tokens": "771d84ba6655ec08" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:clinical_knowledge|0": { |
|
"hashes": { |
|
"hash_examples": "6ab0ca4da98aedcf", |
|
"hash_full_prompts": "6ab0ca4da98aedcf", |
|
"hash_input_tokens": "0844022f867ea36a", |
|
"hash_cont_tokens": "27d080ddb72a91fb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 265, |
|
"padded": 1060, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_biology|0": { |
|
"hashes": { |
|
"hash_examples": "17e4e390848018a4", |
|
"hash_full_prompts": "17e4e390848018a4", |
|
"hash_input_tokens": "60f835ec5aa80b14", |
|
"hash_cont_tokens": "36fd225818f99fc4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 144, |
|
"padded": 576, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "4abb169f6dfd234b", |
|
"hash_full_prompts": "4abb169f6dfd234b", |
|
"hash_input_tokens": "825e8038e6f148f5", |
|
"hash_cont_tokens": "771d84ba6655ec08" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "a369e2e941358a1e", |
|
"hash_full_prompts": "a369e2e941358a1e", |
|
"hash_input_tokens": "0e42307841a0cbaf", |
|
"hash_cont_tokens": "771d84ba6655ec08" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "d7be03b8b6020bff", |
|
"hash_full_prompts": "d7be03b8b6020bff", |
|
"hash_input_tokens": "83857a62d31ea855", |
|
"hash_cont_tokens": "771d84ba6655ec08" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "0518a00f097346bf", |
|
"hash_full_prompts": "0518a00f097346bf", |
|
"hash_input_tokens": "43dd26674091a2dc", |
|
"hash_cont_tokens": "c2807dc27dcf6153" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 173, |
|
"padded": 692, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_physics|0": { |
|
"hashes": { |
|
"hash_examples": "5d842cd49bc70e12", |
|
"hash_full_prompts": "5d842cd49bc70e12", |
|
"hash_input_tokens": "d2fce0dad7215df5", |
|
"hash_cont_tokens": "7b17d820dbbaa6cb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 102, |
|
"padded": 408, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:computer_security|0": { |
|
"hashes": { |
|
"hash_examples": "8e85d9f85be9b32f", |
|
"hash_full_prompts": "8e85d9f85be9b32f", |
|
"hash_input_tokens": "1711b8c9c43f3b02", |
|
"hash_cont_tokens": "771d84ba6655ec08" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:conceptual_physics|0": { |
|
"hashes": { |
|
"hash_examples": "7964b55a0a49502b", |
|
"hash_full_prompts": "7964b55a0a49502b", |
|
"hash_input_tokens": "95dfa6768b2c7daf", |
|
"hash_cont_tokens": "2ff86a5d10a2127a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 235, |
|
"padded": 940, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:econometrics|0": { |
|
"hashes": { |
|
"hash_examples": "1e192eae38347257", |
|
"hash_full_prompts": "1e192eae38347257", |
|
"hash_input_tokens": "43dca0d2277eb879", |
|
"hash_cont_tokens": "901aa9a4c60559f3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 114, |
|
"padded": 456, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:electrical_engineering|0": { |
|
"hashes": { |
|
"hash_examples": "cf97671d5c441da1", |
|
"hash_full_prompts": "cf97671d5c441da1", |
|
"hash_input_tokens": "5874efb19dfc9fd9", |
|
"hash_cont_tokens": "5ae653ddb5ac9494" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:elementary_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "6f49107ed43c40c5", |
|
"hash_full_prompts": "6f49107ed43c40c5", |
|
"hash_input_tokens": "d26e02f2e21e760d", |
|
"hash_cont_tokens": "d00485b6b9b1a7b2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 378, |
|
"padded": 1512, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:formal_logic|0": { |
|
"hashes": { |
|
"hash_examples": "7922c376008ba77b", |
|
"hash_full_prompts": "7922c376008ba77b", |
|
"hash_input_tokens": "f7d01a8a739c71d3", |
|
"hash_cont_tokens": "6a74353d78fb2049" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 126, |
|
"padded": 504, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:global_facts|0": { |
|
"hashes": { |
|
"hash_examples": "11f9813185047d5b", |
|
"hash_full_prompts": "11f9813185047d5b", |
|
"hash_input_tokens": "a38a27a3743cf620", |
|
"hash_cont_tokens": "771d84ba6655ec08" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_biology|0": { |
|
"hashes": { |
|
"hash_examples": "2a804b1d90cbe66e", |
|
"hash_full_prompts": "2a804b1d90cbe66e", |
|
"hash_input_tokens": "fda1a01da057c8d9", |
|
"hash_cont_tokens": "e1b38a431c7cfdf2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 310, |
|
"padded": 1240, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "0032168adabc53b4", |
|
"hash_full_prompts": "0032168adabc53b4", |
|
"hash_input_tokens": "9b065984bf6d6c5d", |
|
"hash_cont_tokens": "d30d155b83b8beee" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 203, |
|
"padded": 812, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "f2fb8740f9df980f", |
|
"hash_full_prompts": "f2fb8740f9df980f", |
|
"hash_input_tokens": "6b4eb526da1cfab0", |
|
"hash_cont_tokens": "771d84ba6655ec08" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_european_history|0": { |
|
"hashes": { |
|
"hash_examples": "73509021e7e66435", |
|
"hash_full_prompts": "73509021e7e66435", |
|
"hash_input_tokens": "fe5909e4022f5311", |
|
"hash_cont_tokens": "aa387b55778f7d85" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 165, |
|
"padded": 660, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_geography|0": { |
|
"hashes": { |
|
"hash_examples": "9e08d1894940ff42", |
|
"hash_full_prompts": "9e08d1894940ff42", |
|
"hash_input_tokens": "698e9d4550508356", |
|
"hash_cont_tokens": "ea572b82c41be702" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 198, |
|
"padded": 792, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_government_and_politics|0": { |
|
"hashes": { |
|
"hash_examples": "64b7e97817ca6c76", |
|
"hash_full_prompts": "64b7e97817ca6c76", |
|
"hash_input_tokens": "16fad7a4610605be", |
|
"hash_cont_tokens": "d535b26a86b9a9d3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 193, |
|
"padded": 772, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_macroeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "9f582da8534bd2ef", |
|
"hash_full_prompts": "9f582da8534bd2ef", |
|
"hash_input_tokens": "a09c497677dffc86", |
|
"hash_cont_tokens": "606c1a2137551055" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 390, |
|
"padded": 1548, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "fd54f1c10d423c51", |
|
"hash_full_prompts": "fd54f1c10d423c51", |
|
"hash_input_tokens": "06bf6644bc02f5c1", |
|
"hash_cont_tokens": "f18ea16235393e7a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 270, |
|
"padded": 1076, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_microeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "7037896925aaf42f", |
|
"hash_full_prompts": "7037896925aaf42f", |
|
"hash_input_tokens": "d1cde41948d3ab90", |
|
"hash_cont_tokens": "05a90a8afcf3afc3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 238, |
|
"padded": 944, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_physics|0": { |
|
"hashes": { |
|
"hash_examples": "60c3776215167dae", |
|
"hash_full_prompts": "60c3776215167dae", |
|
"hash_input_tokens": "4636673f567c3f94", |
|
"hash_cont_tokens": "847282b0877be22e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 151, |
|
"padded": 600, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "61176bfd5da1298f", |
|
"hash_full_prompts": "61176bfd5da1298f", |
|
"hash_input_tokens": "fe0ca8b3453ed9c7", |
|
"hash_cont_tokens": "30a296640c9037d1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 545, |
|
"padded": 2156, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_statistics|0": { |
|
"hashes": { |
|
"hash_examples": "40dfeebd1ea10f76", |
|
"hash_full_prompts": "40dfeebd1ea10f76", |
|
"hash_input_tokens": "dda8ed3f1c5aa340", |
|
"hash_cont_tokens": "a259777479a52fa3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 216, |
|
"padded": 860, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_us_history|0": { |
|
"hashes": { |
|
"hash_examples": "03daa510ba917f4d", |
|
"hash_full_prompts": "03daa510ba917f4d", |
|
"hash_input_tokens": "9edead90eb0dbe8e", |
|
"hash_cont_tokens": "35915add7ad519d1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 204, |
|
"padded": 804, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_world_history|0": { |
|
"hashes": { |
|
"hash_examples": "be075ffd579f43c2", |
|
"hash_full_prompts": "be075ffd579f43c2", |
|
"hash_input_tokens": "0b3231ed9aac7bcb", |
|
"hash_cont_tokens": "4766d81466995bda" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 237, |
|
"padded": 912, |
|
"non_padded": 36, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:human_aging|0": { |
|
"hashes": { |
|
"hash_examples": "caa5b69f640bd1ef", |
|
"hash_full_prompts": "caa5b69f640bd1ef", |
|
"hash_input_tokens": "93baf8240fdb3e27", |
|
"hash_cont_tokens": "7bf358fd838eb005" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 223, |
|
"padded": 881, |
|
"non_padded": 11, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:human_sexuality|0": { |
|
"hashes": { |
|
"hash_examples": "5ed2e38fb25a3767", |
|
"hash_full_prompts": "5ed2e38fb25a3767", |
|
"hash_input_tokens": "8749f80a99c3e491", |
|
"hash_cont_tokens": "8c45b597fb2c4a20" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 131, |
|
"padded": 508, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:international_law|0": { |
|
"hashes": { |
|
"hash_examples": "4e3e9e28d1b96484", |
|
"hash_full_prompts": "4e3e9e28d1b96484", |
|
"hash_input_tokens": "8abf958a46103079", |
|
"hash_cont_tokens": "2c8c36f61bba92e8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 121, |
|
"padded": 480, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:jurisprudence|0": { |
|
"hashes": { |
|
"hash_examples": "e264b755366310b3", |
|
"hash_full_prompts": "e264b755366310b3", |
|
"hash_input_tokens": "94de697614f8d487", |
|
"hash_cont_tokens": "43df6122a6ce9ca7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 108, |
|
"padded": 416, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:logical_fallacies|0": { |
|
"hashes": { |
|
"hash_examples": "a4ab6965a3e38071", |
|
"hash_full_prompts": "a4ab6965a3e38071", |
|
"hash_input_tokens": "65affeda4e8cec04", |
|
"hash_cont_tokens": "636d46707e1a84a1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 163, |
|
"padded": 636, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:machine_learning|0": { |
|
"hashes": { |
|
"hash_examples": "b92320efa6636b40", |
|
"hash_full_prompts": "b92320efa6636b40", |
|
"hash_input_tokens": "a9049aa5d9de462a", |
|
"hash_cont_tokens": "733df8274472cbd6" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 112, |
|
"padded": 440, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:management|0": { |
|
"hashes": { |
|
"hash_examples": "c9ee4872a850fe20", |
|
"hash_full_prompts": "c9ee4872a850fe20", |
|
"hash_input_tokens": "27f1542525a09352", |
|
"hash_cont_tokens": "fc3cf8f15a104c82" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 103, |
|
"padded": 408, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:marketing|0": { |
|
"hashes": { |
|
"hash_examples": "0c151b70f6a047e3", |
|
"hash_full_prompts": "0c151b70f6a047e3", |
|
"hash_input_tokens": "395f8360842cb800", |
|
"hash_cont_tokens": "b8698fec039e309c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 234, |
|
"padded": 924, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:medical_genetics|0": { |
|
"hashes": { |
|
"hash_examples": "513f6cb8fca3a24e", |
|
"hash_full_prompts": "513f6cb8fca3a24e", |
|
"hash_input_tokens": "685d019ae3599f92", |
|
"hash_cont_tokens": "771d84ba6655ec08" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 384, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:miscellaneous|0": { |
|
"hashes": { |
|
"hash_examples": "259a190d635331db", |
|
"hash_full_prompts": "259a190d635331db", |
|
"hash_input_tokens": "8ddda3435385c096", |
|
"hash_cont_tokens": "87f2df51ba8a6c8c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 783, |
|
"padded": 3048, |
|
"non_padded": 84, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:moral_disputes|0": { |
|
"hashes": { |
|
"hash_examples": "b85052c48a0b7bc3", |
|
"hash_full_prompts": "b85052c48a0b7bc3", |
|
"hash_input_tokens": "92e260be63808f31", |
|
"hash_cont_tokens": "2003018f8616dc35" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 346, |
|
"padded": 1364, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:moral_scenarios|0": { |
|
"hashes": { |
|
"hash_examples": "28d0b069ef00dd00", |
|
"hash_full_prompts": "28d0b069ef00dd00", |
|
"hash_input_tokens": "6f78b838bec59a5d", |
|
"hash_cont_tokens": "1ae2c6d8baa46e2a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3576, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:nutrition|0": { |
|
"hashes": { |
|
"hash_examples": "00c9bc5f1d305b2f", |
|
"hash_full_prompts": "00c9bc5f1d305b2f", |
|
"hash_input_tokens": "52d0e83724337374", |
|
"hash_cont_tokens": "7b7aef3aad672dcb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 306, |
|
"padded": 1188, |
|
"non_padded": 36, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:philosophy|0": { |
|
"hashes": { |
|
"hash_examples": "a458c08454a3fd5f", |
|
"hash_full_prompts": "a458c08454a3fd5f", |
|
"hash_input_tokens": "16c4c3b6464cfdf2", |
|
"hash_cont_tokens": "ffde3f7e9cb8ce4f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 311, |
|
"padded": 1212, |
|
"non_padded": 32, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:prehistory|0": { |
|
"hashes": { |
|
"hash_examples": "d6a0ecbdbb670e9c", |
|
"hash_full_prompts": "d6a0ecbdbb670e9c", |
|
"hash_input_tokens": "689e2d703d82df40", |
|
"hash_cont_tokens": "bda8eff659818de4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 324, |
|
"padded": 1272, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:professional_accounting|0": { |
|
"hashes": { |
|
"hash_examples": "b4a95fe480b6540e", |
|
"hash_full_prompts": "b4a95fe480b6540e", |
|
"hash_input_tokens": "e80bdd3c9f5ed97f", |
|
"hash_cont_tokens": "5f7423e268242363" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 282, |
|
"padded": 1124, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:professional_law|0": { |
|
"hashes": { |
|
"hash_examples": "c2be9651cdbdde3b", |
|
"hash_full_prompts": "c2be9651cdbdde3b", |
|
"hash_input_tokens": "3c50bea308a6fbd4", |
|
"hash_cont_tokens": "60c68d5e3ae45dc1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1534, |
|
"padded": 6100, |
|
"non_padded": 36, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:professional_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "26ce92416288f273", |
|
"hash_full_prompts": "26ce92416288f273", |
|
"hash_input_tokens": "e619b339657c7299", |
|
"hash_cont_tokens": "f52e4396cb5cdef2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 272, |
|
"padded": 1068, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:professional_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "71ea5f182ea9a641", |
|
"hash_full_prompts": "71ea5f182ea9a641", |
|
"hash_input_tokens": "96a19f428e3ba44b", |
|
"hash_cont_tokens": "a0ebf929cab4bd2c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 612, |
|
"padded": 2400, |
|
"non_padded": 48, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:public_relations|0": { |
|
"hashes": { |
|
"hash_examples": "125adc21f91f8d77", |
|
"hash_full_prompts": "125adc21f91f8d77", |
|
"hash_input_tokens": "8c1f28320074210a", |
|
"hash_cont_tokens": "09ba719b073994df" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 110, |
|
"padded": 420, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:security_studies|0": { |
|
"hashes": { |
|
"hash_examples": "3c18b216c099fb26", |
|
"hash_full_prompts": "3c18b216c099fb26", |
|
"hash_input_tokens": "2346245574222ad9", |
|
"hash_cont_tokens": "4854dd7ac1df8c7c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 245, |
|
"padded": 976, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:sociology|0": { |
|
"hashes": { |
|
"hash_examples": "3f2a9634cef7417d", |
|
"hash_full_prompts": "3f2a9634cef7417d", |
|
"hash_input_tokens": "37b0dfad7840e359", |
|
"hash_cont_tokens": "ea4c0af969f2373c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 201, |
|
"padded": 796, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:us_foreign_policy|0": { |
|
"hashes": { |
|
"hash_examples": "22249da54056475e", |
|
"hash_full_prompts": "22249da54056475e", |
|
"hash_input_tokens": "df94aad9b0290e32", |
|
"hash_cont_tokens": "771d84ba6655ec08" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 388, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:virology|0": { |
|
"hashes": { |
|
"hash_examples": "9d194b9471dc624e", |
|
"hash_full_prompts": "9d194b9471dc624e", |
|
"hash_input_tokens": "e67b796398812e02", |
|
"hash_cont_tokens": "810369902bd4e47e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 166, |
|
"padded": 648, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:world_religions|0": { |
|
"hashes": { |
|
"hash_examples": "229e5fe50082b064", |
|
"hash_full_prompts": "229e5fe50082b064", |
|
"hash_input_tokens": "b2552ff06662bdce", |
|
"hash_cont_tokens": "fd19d689989ad4e2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 171, |
|
"padded": 664, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arc_challenge_okapi_ar|0": { |
|
"hashes": { |
|
"hash_examples": "ab893807673bc355", |
|
"hash_full_prompts": "ab893807673bc355", |
|
"hash_input_tokens": "4b34174cd60f583a", |
|
"hash_cont_tokens": "280e3f4cd1260625" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1160, |
|
"padded": 4553, |
|
"non_padded": 87, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arc_easy_ar|0": { |
|
"hashes": { |
|
"hash_examples": "acb688624acc3d04", |
|
"hash_full_prompts": "acb688624acc3d04", |
|
"hash_input_tokens": "270142cd528ea38a", |
|
"hash_cont_tokens": "a211b67b7f2e582b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 2364, |
|
"padded": 9283, |
|
"non_padded": 173, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|boolq_ar|0": { |
|
"hashes": { |
|
"hash_examples": "48355a67867e0c32", |
|
"hash_full_prompts": "48355a67867e0c32", |
|
"hash_input_tokens": "4319f84f2dc8ad43", |
|
"hash_cont_tokens": "f51d666013e03070" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 3260, |
|
"padded": 6479, |
|
"non_padded": 41, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|copa_ext_ar|0": { |
|
"hashes": { |
|
"hash_examples": "9bb83301bb72eecf", |
|
"hash_full_prompts": "9bb83301bb72eecf", |
|
"hash_input_tokens": "8392677b98498d1c", |
|
"hash_cont_tokens": "cbf15c22099c110a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 90, |
|
"padded": 180, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|hellaswag_okapi_ar|0": { |
|
"hashes": { |
|
"hash_examples": "6e8cf57a322dfadd", |
|
"hash_full_prompts": "6e8cf57a322dfadd", |
|
"hash_input_tokens": "25140a17e9dc5ff0", |
|
"hash_cont_tokens": "c72eb06e7527c34e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 9171, |
|
"padded": 36613, |
|
"non_padded": 71, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|openbook_qa_ext_ar|0": { |
|
"hashes": { |
|
"hash_examples": "923d41eb0aca93eb", |
|
"hash_full_prompts": "923d41eb0aca93eb", |
|
"hash_input_tokens": "474b303d12778a12", |
|
"hash_cont_tokens": "5c95529c35f5c1ef" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 495, |
|
"padded": 1952, |
|
"non_padded": 28, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|piqa_ar|0": { |
|
"hashes": { |
|
"hash_examples": "94bc205a520d3ea0", |
|
"hash_full_prompts": "94bc205a520d3ea0", |
|
"hash_input_tokens": "4e81e591953a2d08", |
|
"hash_cont_tokens": "4814432fa313c884" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1833, |
|
"padded": 3622, |
|
"non_padded": 44, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|race_ar|0": { |
|
"hashes": { |
|
"hash_examples": "de65130bae647516", |
|
"hash_full_prompts": "de65130bae647516", |
|
"hash_input_tokens": "8c6dfc6184237348", |
|
"hash_cont_tokens": "1908b4afee48b130" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4929, |
|
"padded": 19713, |
|
"non_padded": 3, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|sciq_ar|0": { |
|
"hashes": { |
|
"hash_examples": "f3ced6cc822cd9ef", |
|
"hash_full_prompts": "f3ced6cc822cd9ef", |
|
"hash_input_tokens": "587d1dbc09cc0d48", |
|
"hash_cont_tokens": "c9b87934ebabd127" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 995, |
|
"padded": 3962, |
|
"non_padded": 18, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|toxigen_ar|0": { |
|
"hashes": { |
|
"hash_examples": "1e139513004a9a2e", |
|
"hash_full_prompts": "1e139513004a9a2e", |
|
"hash_input_tokens": "2b34489569e1fa5c", |
|
"hash_cont_tokens": "4323a1b5cd5f70c3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 935, |
|
"padded": 1844, |
|
"non_padded": 26, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"lighteval|xstory_cloze:ar|0": { |
|
"hashes": { |
|
"hash_examples": "865426a22c787481", |
|
"hash_full_prompts": "865426a22c787481", |
|
"hash_input_tokens": "14430d8cabf5c123", |
|
"hash_cont_tokens": "f9a9355b310f533a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1511, |
|
"padded": 2972, |
|
"non_padded": 50, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
} |
|
}, |
|
"summary_general": { |
|
"hashes": { |
|
"hash_examples": "d6532e1665535797", |
|
"hash_full_prompts": "d6532e1665535797", |
|
"hash_input_tokens": "d9f664dac800fb2e", |
|
"hash_cont_tokens": "1283e2b3bec9c170" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 72964, |
|
"padded": 234487, |
|
"non_padded": 1136, |
|
"num_truncated_few_shots": 0 |
|
} |
|
} |