|
{ |
|
"config_general": { |
|
"lighteval_sha": "?", |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null, |
|
"job_id": "", |
|
"start_time": 504.000136627, |
|
"end_time": 168540.611214571, |
|
"total_evaluation_time_secondes": "168036.611077944", |
|
"model_name": "core42/jais-30b-chat-v3", |
|
"model_sha": "1243e551951c45160f7f42ba4df4da062eab9d93", |
|
"model_dtype": "torch.float32", |
|
"model_size": "112.91 GB", |
|
"config": null |
|
}, |
|
"results": { |
|
"community|acva:Algeria|0": { |
|
"acc_norm": 0.5230769230769231, |
|
"acc_norm_stderr": 0.0358596530894741 |
|
}, |
|
"community|acva:Ancient_Egypt|0": { |
|
"acc_norm": 0.05714285714285714, |
|
"acc_norm_stderr": 0.013099028872013177 |
|
}, |
|
"community|acva:Arab_Empire|0": { |
|
"acc_norm": 0.30943396226415093, |
|
"acc_norm_stderr": 0.028450154794118627 |
|
}, |
|
"community|acva:Arabic_Architecture|0": { |
|
"acc_norm": 0.48205128205128206, |
|
"acc_norm_stderr": 0.0358747709877383 |
|
}, |
|
"community|acva:Arabic_Art|0": { |
|
"acc_norm": 0.3641025641025641, |
|
"acc_norm_stderr": 0.03454653867786389 |
|
}, |
|
"community|acva:Arabic_Astronomy|0": { |
|
"acc_norm": 0.4666666666666667, |
|
"acc_norm_stderr": 0.03581804596782233 |
|
}, |
|
"community|acva:Arabic_Calligraphy|0": { |
|
"acc_norm": 0.49019607843137253, |
|
"acc_norm_stderr": 0.03136675876780921 |
|
}, |
|
"community|acva:Arabic_Ceremony|0": { |
|
"acc_norm": 0.5135135135135135, |
|
"acc_norm_stderr": 0.03684702401944814 |
|
}, |
|
"community|acva:Arabic_Clothing|0": { |
|
"acc_norm": 0.5282051282051282, |
|
"acc_norm_stderr": 0.03584074674920833 |
|
}, |
|
"community|acva:Arabic_Culture|0": { |
|
"acc_norm": 0.23076923076923078, |
|
"acc_norm_stderr": 0.0302493752938313 |
|
}, |
|
"community|acva:Arabic_Food|0": { |
|
"acc_norm": 0.46153846153846156, |
|
"acc_norm_stderr": 0.03579154352544572 |
|
}, |
|
"community|acva:Arabic_Funeral|0": { |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.050529115263991134 |
|
}, |
|
"community|acva:Arabic_Geography|0": { |
|
"acc_norm": 0.6068965517241379, |
|
"acc_norm_stderr": 0.040703290137070705 |
|
}, |
|
"community|acva:Arabic_History|0": { |
|
"acc_norm": 0.30256410256410254, |
|
"acc_norm_stderr": 0.03298070870085619 |
|
}, |
|
"community|acva:Arabic_Language_Origin|0": { |
|
"acc_norm": 0.5684210526315789, |
|
"acc_norm_stderr": 0.051085926733089475 |
|
}, |
|
"community|acva:Arabic_Literature|0": { |
|
"acc_norm": 0.4689655172413793, |
|
"acc_norm_stderr": 0.04158632762097828 |
|
}, |
|
"community|acva:Arabic_Math|0": { |
|
"acc_norm": 0.30256410256410254, |
|
"acc_norm_stderr": 0.03298070870085618 |
|
}, |
|
"community|acva:Arabic_Medicine|0": { |
|
"acc_norm": 0.46206896551724136, |
|
"acc_norm_stderr": 0.041546596717075474 |
|
}, |
|
"community|acva:Arabic_Music|0": { |
|
"acc_norm": 0.2805755395683453, |
|
"acc_norm_stderr": 0.03824529014900686 |
|
}, |
|
"community|acva:Arabic_Ornament|0": { |
|
"acc_norm": 0.48205128205128206, |
|
"acc_norm_stderr": 0.035874770987738294 |
|
}, |
|
"community|acva:Arabic_Philosophy|0": { |
|
"acc_norm": 0.5793103448275863, |
|
"acc_norm_stderr": 0.0411391498118926 |
|
}, |
|
"community|acva:Arabic_Physics_and_Chemistry|0": { |
|
"acc_norm": 0.6102564102564103, |
|
"acc_norm_stderr": 0.035014247762563705 |
|
}, |
|
"community|acva:Arabic_Wedding|0": { |
|
"acc_norm": 0.41025641025641024, |
|
"acc_norm_stderr": 0.03531493712326671 |
|
}, |
|
"community|acva:Bahrain|0": { |
|
"acc_norm": 0.3111111111111111, |
|
"acc_norm_stderr": 0.06979205927323111 |
|
}, |
|
"community|acva:Comoros|0": { |
|
"acc_norm": 0.37777777777777777, |
|
"acc_norm_stderr": 0.07309112127323451 |
|
}, |
|
"community|acva:Egypt_modern|0": { |
|
"acc_norm": 0.3157894736842105, |
|
"acc_norm_stderr": 0.04794350420740798 |
|
}, |
|
"community|acva:InfluenceFromAncientEgypt|0": { |
|
"acc_norm": 0.6051282051282051, |
|
"acc_norm_stderr": 0.03509545602262038 |
|
}, |
|
"community|acva:InfluenceFromByzantium|0": { |
|
"acc_norm": 0.7172413793103448, |
|
"acc_norm_stderr": 0.03752833958003337 |
|
}, |
|
"community|acva:InfluenceFromChina|0": { |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.0317493043641267 |
|
}, |
|
"community|acva:InfluenceFromGreece|0": { |
|
"acc_norm": 0.6358974358974359, |
|
"acc_norm_stderr": 0.03454653867786389 |
|
}, |
|
"community|acva:InfluenceFromIslam|0": { |
|
"acc_norm": 0.296551724137931, |
|
"acc_norm_stderr": 0.03806142687309993 |
|
}, |
|
"community|acva:InfluenceFromPersia|0": { |
|
"acc_norm": 0.6971428571428572, |
|
"acc_norm_stderr": 0.03483414676585986 |
|
}, |
|
"community|acva:InfluenceFromRome|0": { |
|
"acc_norm": 0.558974358974359, |
|
"acc_norm_stderr": 0.0356473293185358 |
|
}, |
|
"community|acva:Iraq|0": { |
|
"acc_norm": 0.5058823529411764, |
|
"acc_norm_stderr": 0.05455069703232772 |
|
}, |
|
"community|acva:Islam_Education|0": { |
|
"acc_norm": 0.4512820512820513, |
|
"acc_norm_stderr": 0.03572709860318392 |
|
}, |
|
"community|acva:Islam_branches_and_schools|0": { |
|
"acc_norm": 0.44571428571428573, |
|
"acc_norm_stderr": 0.03768083305144797 |
|
}, |
|
"community|acva:Islamic_law_system|0": { |
|
"acc_norm": 0.4256410256410256, |
|
"acc_norm_stderr": 0.035498710803677086 |
|
}, |
|
"community|acva:Jordan|0": { |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.07106690545187012 |
|
}, |
|
"community|acva:Kuwait|0": { |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.06666666666666667 |
|
}, |
|
"community|acva:Lebanon|0": { |
|
"acc_norm": 0.17777777777777778, |
|
"acc_norm_stderr": 0.05763774795025094 |
|
}, |
|
"community|acva:Libya|0": { |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.07491109582924914 |
|
}, |
|
"community|acva:Mauritania|0": { |
|
"acc_norm": 0.4222222222222222, |
|
"acc_norm_stderr": 0.07446027270295805 |
|
}, |
|
"community|acva:Mesopotamia_civilization|0": { |
|
"acc_norm": 0.5225806451612903, |
|
"acc_norm_stderr": 0.0402500394824441 |
|
}, |
|
"community|acva:Morocco|0": { |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.06267511942419628 |
|
}, |
|
"community|acva:Oman|0": { |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.06030226891555273 |
|
}, |
|
"community|acva:Palestine|0": { |
|
"acc_norm": 0.24705882352941178, |
|
"acc_norm_stderr": 0.047058823529411785 |
|
}, |
|
"community|acva:Qatar|0": { |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.07385489458759964 |
|
}, |
|
"community|acva:Saudi_Arabia|0": { |
|
"acc_norm": 0.3282051282051282, |
|
"acc_norm_stderr": 0.03371243782413707 |
|
}, |
|
"community|acva:Somalia|0": { |
|
"acc_norm": 0.35555555555555557, |
|
"acc_norm_stderr": 0.07216392363431012 |
|
}, |
|
"community|acva:Sudan|0": { |
|
"acc_norm": 0.35555555555555557, |
|
"acc_norm_stderr": 0.07216392363431012 |
|
}, |
|
"community|acva:Syria|0": { |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.07106690545187012 |
|
}, |
|
"community|acva:Tunisia|0": { |
|
"acc_norm": 0.3111111111111111, |
|
"acc_norm_stderr": 0.06979205927323111 |
|
}, |
|
"community|acva:United_Arab_Emirates|0": { |
|
"acc_norm": 0.24705882352941178, |
|
"acc_norm_stderr": 0.047058823529411785 |
|
}, |
|
"community|acva:Yemen|0": { |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.13333333333333333 |
|
}, |
|
"community|acva:communication|0": { |
|
"acc_norm": 0.47802197802197804, |
|
"acc_norm_stderr": 0.02621782911490642 |
|
}, |
|
"community|acva:computer_and_phone|0": { |
|
"acc_norm": 0.45084745762711864, |
|
"acc_norm_stderr": 0.02901934773187137 |
|
}, |
|
"community|acva:daily_life|0": { |
|
"acc_norm": 0.228486646884273, |
|
"acc_norm_stderr": 0.022905116076972147 |
|
}, |
|
"community|acva:entertainment|0": { |
|
"acc_norm": 0.23389830508474577, |
|
"acc_norm_stderr": 0.024687839412166384 |
|
}, |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"acc_norm": 0.3985637342908438, |
|
"acc_norm_stderr": 0.02076377431343294 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"acc_norm": 0.4355885078776645, |
|
"acc_norm_stderr": 0.0067511951136154345 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"acc_norm": 0.4893854748603352, |
|
"acc_norm_stderr": 0.016718732941192107 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"acc_norm": 0.6933333333333334, |
|
"acc_norm_stderr": 0.053602922245650664 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"acc_norm": 0.62, |
|
"acc_norm_stderr": 0.03976440686960229 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"acc_norm": 0.5733333333333334, |
|
"acc_norm_stderr": 0.04051863621453781 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"acc_norm": 0.7515947467166979, |
|
"acc_norm_stderr": 0.00483270387030828 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"acc_norm": 0.5060884070058382, |
|
"acc_norm_stderr": 0.006457723340370245 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"acc_norm": 0.42965116279069765, |
|
"acc_norm_stderr": 0.011939615404174228 |
|
}, |
|
"community|arabic_exams|0": { |
|
"acc_norm": 0.5102420856610801, |
|
"acc_norm_stderr": 0.021592179160473072 |
|
}, |
|
"community|arabic_mmlu:abstract_algebra|0": { |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"community|arabic_mmlu:anatomy|0": { |
|
"acc_norm": 0.4222222222222222, |
|
"acc_norm_stderr": 0.04266763404099582 |
|
}, |
|
"community|arabic_mmlu:astronomy|0": { |
|
"acc_norm": 0.5263157894736842, |
|
"acc_norm_stderr": 0.04063302731486671 |
|
}, |
|
"community|arabic_mmlu:business_ethics|0": { |
|
"acc_norm": 0.54, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"community|arabic_mmlu:clinical_knowledge|0": { |
|
"acc_norm": 0.5735849056603773, |
|
"acc_norm_stderr": 0.030437794342983052 |
|
}, |
|
"community|arabic_mmlu:college_biology|0": { |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.04155319955593146 |
|
}, |
|
"community|arabic_mmlu:college_chemistry|0": { |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"community|arabic_mmlu:college_computer_science|0": { |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"community|arabic_mmlu:college_mathematics|0": { |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542124 |
|
}, |
|
"community|arabic_mmlu:college_medicine|0": { |
|
"acc_norm": 0.3815028901734104, |
|
"acc_norm_stderr": 0.03703851193099521 |
|
}, |
|
"community|arabic_mmlu:college_physics|0": { |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.04220773659171453 |
|
}, |
|
"community|arabic_mmlu:computer_security|0": { |
|
"acc_norm": 0.62, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"community|arabic_mmlu:conceptual_physics|0": { |
|
"acc_norm": 0.4297872340425532, |
|
"acc_norm_stderr": 0.03236214467715563 |
|
}, |
|
"community|arabic_mmlu:econometrics|0": { |
|
"acc_norm": 0.2982456140350877, |
|
"acc_norm_stderr": 0.04303684033537315 |
|
}, |
|
"community|arabic_mmlu:electrical_engineering|0": { |
|
"acc_norm": 0.5448275862068965, |
|
"acc_norm_stderr": 0.04149886942192117 |
|
}, |
|
"community|arabic_mmlu:elementary_mathematics|0": { |
|
"acc_norm": 0.3253968253968254, |
|
"acc_norm_stderr": 0.02413015829976262 |
|
}, |
|
"community|arabic_mmlu:formal_logic|0": { |
|
"acc_norm": 0.23809523809523808, |
|
"acc_norm_stderr": 0.03809523809523811 |
|
}, |
|
"community|arabic_mmlu:global_facts|0": { |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"community|arabic_mmlu:high_school_biology|0": { |
|
"acc_norm": 0.5129032258064516, |
|
"acc_norm_stderr": 0.028434533152681855 |
|
}, |
|
"community|arabic_mmlu:high_school_chemistry|0": { |
|
"acc_norm": 0.39408866995073893, |
|
"acc_norm_stderr": 0.03438157967036543 |
|
}, |
|
"community|arabic_mmlu:high_school_computer_science|0": { |
|
"acc_norm": 0.57, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"community|arabic_mmlu:high_school_european_history|0": { |
|
"acc_norm": 0.2606060606060606, |
|
"acc_norm_stderr": 0.034277431758165236 |
|
}, |
|
"community|arabic_mmlu:high_school_geography|0": { |
|
"acc_norm": 0.6414141414141414, |
|
"acc_norm_stderr": 0.03416903640391521 |
|
}, |
|
"community|arabic_mmlu:high_school_government_and_politics|0": { |
|
"acc_norm": 0.6010362694300518, |
|
"acc_norm_stderr": 0.03533999094065696 |
|
}, |
|
"community|arabic_mmlu:high_school_macroeconomics|0": { |
|
"acc_norm": 0.43846153846153846, |
|
"acc_norm_stderr": 0.025158266016868568 |
|
}, |
|
"community|arabic_mmlu:high_school_mathematics|0": { |
|
"acc_norm": 0.2740740740740741, |
|
"acc_norm_stderr": 0.027195934804085626 |
|
}, |
|
"community|arabic_mmlu:high_school_microeconomics|0": { |
|
"acc_norm": 0.4117647058823529, |
|
"acc_norm_stderr": 0.031968769891957786 |
|
}, |
|
"community|arabic_mmlu:high_school_physics|0": { |
|
"acc_norm": 0.36423841059602646, |
|
"acc_norm_stderr": 0.03929111781242741 |
|
}, |
|
"community|arabic_mmlu:high_school_psychology|0": { |
|
"acc_norm": 0.544954128440367, |
|
"acc_norm_stderr": 0.02135050309092517 |
|
}, |
|
"community|arabic_mmlu:high_school_statistics|0": { |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.03114144782353604 |
|
}, |
|
"community|arabic_mmlu:high_school_us_history|0": { |
|
"acc_norm": 0.2696078431372549, |
|
"acc_norm_stderr": 0.031145570659486782 |
|
}, |
|
"community|arabic_mmlu:high_school_world_history|0": { |
|
"acc_norm": 0.31645569620253167, |
|
"acc_norm_stderr": 0.030274974880218977 |
|
}, |
|
"community|arabic_mmlu:human_aging|0": { |
|
"acc_norm": 0.5829596412556054, |
|
"acc_norm_stderr": 0.03309266936071721 |
|
}, |
|
"community|arabic_mmlu:human_sexuality|0": { |
|
"acc_norm": 0.5725190839694656, |
|
"acc_norm_stderr": 0.04338920305792401 |
|
}, |
|
"community|arabic_mmlu:international_law|0": { |
|
"acc_norm": 0.6528925619834711, |
|
"acc_norm_stderr": 0.043457245702925335 |
|
}, |
|
"community|arabic_mmlu:jurisprudence|0": { |
|
"acc_norm": 0.5185185185185185, |
|
"acc_norm_stderr": 0.04830366024635331 |
|
}, |
|
"community|arabic_mmlu:logical_fallacies|0": { |
|
"acc_norm": 0.5766871165644172, |
|
"acc_norm_stderr": 0.03881891213334384 |
|
}, |
|
"community|arabic_mmlu:machine_learning|0": { |
|
"acc_norm": 0.3392857142857143, |
|
"acc_norm_stderr": 0.04493949068613538 |
|
}, |
|
"community|arabic_mmlu:management|0": { |
|
"acc_norm": 0.5631067961165048, |
|
"acc_norm_stderr": 0.04911147107365777 |
|
}, |
|
"community|arabic_mmlu:marketing|0": { |
|
"acc_norm": 0.6965811965811965, |
|
"acc_norm_stderr": 0.03011821010694263 |
|
}, |
|
"community|arabic_mmlu:medical_genetics|0": { |
|
"acc_norm": 0.55, |
|
"acc_norm_stderr": 0.04999999999999999 |
|
}, |
|
"community|arabic_mmlu:miscellaneous|0": { |
|
"acc_norm": 0.5913154533844189, |
|
"acc_norm_stderr": 0.017579250148153393 |
|
}, |
|
"community|arabic_mmlu:moral_disputes|0": { |
|
"acc_norm": 0.5549132947976878, |
|
"acc_norm_stderr": 0.02675625512966377 |
|
}, |
|
"community|arabic_mmlu:moral_scenarios|0": { |
|
"acc_norm": 0.2636871508379888, |
|
"acc_norm_stderr": 0.01473692638376199 |
|
}, |
|
"community|arabic_mmlu:nutrition|0": { |
|
"acc_norm": 0.5751633986928104, |
|
"acc_norm_stderr": 0.028304576673141114 |
|
}, |
|
"community|arabic_mmlu:philosophy|0": { |
|
"acc_norm": 0.594855305466238, |
|
"acc_norm_stderr": 0.027882383791325963 |
|
}, |
|
"community|arabic_mmlu:prehistory|0": { |
|
"acc_norm": 0.49691358024691357, |
|
"acc_norm_stderr": 0.027820214158594377 |
|
}, |
|
"community|arabic_mmlu:professional_accounting|0": { |
|
"acc_norm": 0.3475177304964539, |
|
"acc_norm_stderr": 0.02840662780959095 |
|
}, |
|
"community|arabic_mmlu:professional_law|0": { |
|
"acc_norm": 0.30378096479791394, |
|
"acc_norm_stderr": 0.011745787720472465 |
|
}, |
|
"community|arabic_mmlu:professional_medicine|0": { |
|
"acc_norm": 0.21691176470588236, |
|
"acc_norm_stderr": 0.025035845227711243 |
|
}, |
|
"community|arabic_mmlu:professional_psychology|0": { |
|
"acc_norm": 0.43300653594771243, |
|
"acc_norm_stderr": 0.020045442473324224 |
|
}, |
|
"community|arabic_mmlu:public_relations|0": { |
|
"acc_norm": 0.6090909090909091, |
|
"acc_norm_stderr": 0.04673752333670238 |
|
}, |
|
"community|arabic_mmlu:security_studies|0": { |
|
"acc_norm": 0.5959183673469388, |
|
"acc_norm_stderr": 0.031414708025865906 |
|
}, |
|
"community|arabic_mmlu:sociology|0": { |
|
"acc_norm": 0.582089552238806, |
|
"acc_norm_stderr": 0.034875586404620636 |
|
}, |
|
"community|arabic_mmlu:us_foreign_policy|0": { |
|
"acc_norm": 0.75, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"community|arabic_mmlu:virology|0": { |
|
"acc_norm": 0.463855421686747, |
|
"acc_norm_stderr": 0.03882310850890594 |
|
}, |
|
"community|arabic_mmlu:world_religions|0": { |
|
"acc_norm": 0.6374269005847953, |
|
"acc_norm_stderr": 0.0368713061556206 |
|
}, |
|
"community|arc_challenge_okapi_ar|0": { |
|
"acc_norm": 0.4862068965517241, |
|
"acc_norm_stderr": 0.01468124798967408 |
|
}, |
|
"community|arc_easy_ar|0": { |
|
"acc_norm": 0.5279187817258884, |
|
"acc_norm_stderr": 0.010269754384154013 |
|
}, |
|
"community|boolq_ar|0": { |
|
"acc_norm": 0.8322085889570552, |
|
"acc_norm_stderr": 0.0065457404344095235 |
|
}, |
|
"community|copa_ext_ar|0": { |
|
"acc_norm": 0.6111111111111112, |
|
"acc_norm_stderr": 0.051674686932038624 |
|
}, |
|
"community|hellaswag_okapi_ar|0": { |
|
"acc_norm": 0.40519027368880167, |
|
"acc_norm_stderr": 0.005126652360666745 |
|
}, |
|
"community|openbook_qa_ext_ar|0": { |
|
"acc_norm": 0.5232323232323233, |
|
"acc_norm_stderr": 0.022471766323428892 |
|
}, |
|
"community|piqa_ar|0": { |
|
"acc_norm": 0.7054009819967266, |
|
"acc_norm_stderr": 0.010650523199799249 |
|
}, |
|
"community|race_ar|0": { |
|
"acc_norm": 0.4796104686548996, |
|
"acc_norm_stderr": 0.007116611418269056 |
|
}, |
|
"community|sciq_ar|0": { |
|
"acc_norm": 0.5577889447236181, |
|
"acc_norm_stderr": 0.015752756389627733 |
|
}, |
|
"community|toxigen_ar|0": { |
|
"acc_norm": 0.5903743315508021, |
|
"acc_norm_stderr": 0.016091031197757986 |
|
}, |
|
"lighteval|xstory_cloze:ar|0": { |
|
"acc": 0.7816015883520847, |
|
"acc_stderr": 0.010632343054700507 |
|
}, |
|
"community|acva:_average|0": { |
|
"acc_norm": 0.4012035807351678, |
|
"acc_norm_stderr": 0.0459223560320091 |
|
}, |
|
"community|alghafa:_average|0": { |
|
"acc_norm": 0.5441709666898604, |
|
"acc_norm_stderr": 0.02237219003476489 |
|
}, |
|
"community|arabic_mmlu:_average|0": { |
|
"acc_norm": 0.4563967524086456, |
|
"acc_norm_stderr": 0.03573690043388315 |
|
}, |
|
"all": { |
|
"acc_norm": 0.4474907115407058, |
|
"acc_norm_stderr": 0.03765794544215593, |
|
"acc": 0.7816015883520847, |
|
"acc_stderr": 0.010632343054700507 |
|
} |
|
}, |
|
"versions": { |
|
"community|acva:Algeria|0": 0, |
|
"community|acva:Ancient_Egypt|0": 0, |
|
"community|acva:Arab_Empire|0": 0, |
|
"community|acva:Arabic_Architecture|0": 0, |
|
"community|acva:Arabic_Art|0": 0, |
|
"community|acva:Arabic_Astronomy|0": 0, |
|
"community|acva:Arabic_Calligraphy|0": 0, |
|
"community|acva:Arabic_Ceremony|0": 0, |
|
"community|acva:Arabic_Clothing|0": 0, |
|
"community|acva:Arabic_Culture|0": 0, |
|
"community|acva:Arabic_Food|0": 0, |
|
"community|acva:Arabic_Funeral|0": 0, |
|
"community|acva:Arabic_Geography|0": 0, |
|
"community|acva:Arabic_History|0": 0, |
|
"community|acva:Arabic_Language_Origin|0": 0, |
|
"community|acva:Arabic_Literature|0": 0, |
|
"community|acva:Arabic_Math|0": 0, |
|
"community|acva:Arabic_Medicine|0": 0, |
|
"community|acva:Arabic_Music|0": 0, |
|
"community|acva:Arabic_Ornament|0": 0, |
|
"community|acva:Arabic_Philosophy|0": 0, |
|
"community|acva:Arabic_Physics_and_Chemistry|0": 0, |
|
"community|acva:Arabic_Wedding|0": 0, |
|
"community|acva:Bahrain|0": 0, |
|
"community|acva:Comoros|0": 0, |
|
"community|acva:Egypt_modern|0": 0, |
|
"community|acva:InfluenceFromAncientEgypt|0": 0, |
|
"community|acva:InfluenceFromByzantium|0": 0, |
|
"community|acva:InfluenceFromChina|0": 0, |
|
"community|acva:InfluenceFromGreece|0": 0, |
|
"community|acva:InfluenceFromIslam|0": 0, |
|
"community|acva:InfluenceFromPersia|0": 0, |
|
"community|acva:InfluenceFromRome|0": 0, |
|
"community|acva:Iraq|0": 0, |
|
"community|acva:Islam_Education|0": 0, |
|
"community|acva:Islam_branches_and_schools|0": 0, |
|
"community|acva:Islamic_law_system|0": 0, |
|
"community|acva:Jordan|0": 0, |
|
"community|acva:Kuwait|0": 0, |
|
"community|acva:Lebanon|0": 0, |
|
"community|acva:Libya|0": 0, |
|
"community|acva:Mauritania|0": 0, |
|
"community|acva:Mesopotamia_civilization|0": 0, |
|
"community|acva:Morocco|0": 0, |
|
"community|acva:Oman|0": 0, |
|
"community|acva:Palestine|0": 0, |
|
"community|acva:Qatar|0": 0, |
|
"community|acva:Saudi_Arabia|0": 0, |
|
"community|acva:Somalia|0": 0, |
|
"community|acva:Sudan|0": 0, |
|
"community|acva:Syria|0": 0, |
|
"community|acva:Tunisia|0": 0, |
|
"community|acva:United_Arab_Emirates|0": 0, |
|
"community|acva:Yemen|0": 0, |
|
"community|acva:communication|0": 0, |
|
"community|acva:computer_and_phone|0": 0, |
|
"community|acva:daily_life|0": 0, |
|
"community|acva:entertainment|0": 0, |
|
"community|alghafa:mcq_exams_test_ar|0": 0, |
|
"community|alghafa:meta_ar_dialects|0": 0, |
|
"community|alghafa:meta_ar_msa|0": 0, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0, |
|
"community|alghafa:multiple_choice_sentiment_task|0": 0, |
|
"community|arabic_exams|0": 0, |
|
"community|arabic_mmlu:abstract_algebra|0": 0, |
|
"community|arabic_mmlu:anatomy|0": 0, |
|
"community|arabic_mmlu:astronomy|0": 0, |
|
"community|arabic_mmlu:business_ethics|0": 0, |
|
"community|arabic_mmlu:clinical_knowledge|0": 0, |
|
"community|arabic_mmlu:college_biology|0": 0, |
|
"community|arabic_mmlu:college_chemistry|0": 0, |
|
"community|arabic_mmlu:college_computer_science|0": 0, |
|
"community|arabic_mmlu:college_mathematics|0": 0, |
|
"community|arabic_mmlu:college_medicine|0": 0, |
|
"community|arabic_mmlu:college_physics|0": 0, |
|
"community|arabic_mmlu:computer_security|0": 0, |
|
"community|arabic_mmlu:conceptual_physics|0": 0, |
|
"community|arabic_mmlu:econometrics|0": 0, |
|
"community|arabic_mmlu:electrical_engineering|0": 0, |
|
"community|arabic_mmlu:elementary_mathematics|0": 0, |
|
"community|arabic_mmlu:formal_logic|0": 0, |
|
"community|arabic_mmlu:global_facts|0": 0, |
|
"community|arabic_mmlu:high_school_biology|0": 0, |
|
"community|arabic_mmlu:high_school_chemistry|0": 0, |
|
"community|arabic_mmlu:high_school_computer_science|0": 0, |
|
"community|arabic_mmlu:high_school_european_history|0": 0, |
|
"community|arabic_mmlu:high_school_geography|0": 0, |
|
"community|arabic_mmlu:high_school_government_and_politics|0": 0, |
|
"community|arabic_mmlu:high_school_macroeconomics|0": 0, |
|
"community|arabic_mmlu:high_school_mathematics|0": 0, |
|
"community|arabic_mmlu:high_school_microeconomics|0": 0, |
|
"community|arabic_mmlu:high_school_physics|0": 0, |
|
"community|arabic_mmlu:high_school_psychology|0": 0, |
|
"community|arabic_mmlu:high_school_statistics|0": 0, |
|
"community|arabic_mmlu:high_school_us_history|0": 0, |
|
"community|arabic_mmlu:high_school_world_history|0": 0, |
|
"community|arabic_mmlu:human_aging|0": 0, |
|
"community|arabic_mmlu:human_sexuality|0": 0, |
|
"community|arabic_mmlu:international_law|0": 0, |
|
"community|arabic_mmlu:jurisprudence|0": 0, |
|
"community|arabic_mmlu:logical_fallacies|0": 0, |
|
"community|arabic_mmlu:machine_learning|0": 0, |
|
"community|arabic_mmlu:management|0": 0, |
|
"community|arabic_mmlu:marketing|0": 0, |
|
"community|arabic_mmlu:medical_genetics|0": 0, |
|
"community|arabic_mmlu:miscellaneous|0": 0, |
|
"community|arabic_mmlu:moral_disputes|0": 0, |
|
"community|arabic_mmlu:moral_scenarios|0": 0, |
|
"community|arabic_mmlu:nutrition|0": 0, |
|
"community|arabic_mmlu:philosophy|0": 0, |
|
"community|arabic_mmlu:prehistory|0": 0, |
|
"community|arabic_mmlu:professional_accounting|0": 0, |
|
"community|arabic_mmlu:professional_law|0": 0, |
|
"community|arabic_mmlu:professional_medicine|0": 0, |
|
"community|arabic_mmlu:professional_psychology|0": 0, |
|
"community|arabic_mmlu:public_relations|0": 0, |
|
"community|arabic_mmlu:security_studies|0": 0, |
|
"community|arabic_mmlu:sociology|0": 0, |
|
"community|arabic_mmlu:us_foreign_policy|0": 0, |
|
"community|arabic_mmlu:virology|0": 0, |
|
"community|arabic_mmlu:world_religions|0": 0, |
|
"community|arc_challenge_okapi_ar|0": 0, |
|
"community|arc_easy_ar|0": 0, |
|
"community|boolq_ar|0": 0, |
|
"community|copa_ext_ar|0": 0, |
|
"community|hellaswag_okapi_ar|0": 0, |
|
"community|openbook_qa_ext_ar|0": 0, |
|
"community|piqa_ar|0": 0, |
|
"community|race_ar|0": 0, |
|
"community|sciq_ar|0": 0, |
|
"community|toxigen_ar|0": 0, |
|
"lighteval|xstory_cloze:ar|0": 0 |
|
}, |
|
"config_tasks": { |
|
"community|acva:Algeria": { |
|
"name": "acva:Algeria", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Algeria", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Ancient_Egypt": { |
|
"name": "acva:Ancient_Egypt", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Ancient_Egypt", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 315, |
|
"effective_num_docs": 315, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arab_Empire": { |
|
"name": "acva:Arab_Empire", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arab_Empire", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 265, |
|
"effective_num_docs": 265, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Architecture": { |
|
"name": "acva:Arabic_Architecture", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Architecture", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Art": { |
|
"name": "acva:Arabic_Art", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Art", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Astronomy": { |
|
"name": "acva:Arabic_Astronomy", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Astronomy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Calligraphy": { |
|
"name": "acva:Arabic_Calligraphy", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Calligraphy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 255, |
|
"effective_num_docs": 255, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Ceremony": { |
|
"name": "acva:Arabic_Ceremony", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Ceremony", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 185, |
|
"effective_num_docs": 185, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Clothing": { |
|
"name": "acva:Arabic_Clothing", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Clothing", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Culture": { |
|
"name": "acva:Arabic_Culture", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Culture", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Food": { |
|
"name": "acva:Arabic_Food", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Food", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Funeral": { |
|
"name": "acva:Arabic_Funeral", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Funeral", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 95, |
|
"effective_num_docs": 95, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Geography": { |
|
"name": "acva:Arabic_Geography", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Geography", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_History": { |
|
"name": "acva:Arabic_History", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_History", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Language_Origin": { |
|
"name": "acva:Arabic_Language_Origin", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Language_Origin", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 95, |
|
"effective_num_docs": 95, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Literature": { |
|
"name": "acva:Arabic_Literature", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Literature", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Math": { |
|
"name": "acva:Arabic_Math", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Math", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Medicine": { |
|
"name": "acva:Arabic_Medicine", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Medicine", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Music": { |
|
"name": "acva:Arabic_Music", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Music", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 139, |
|
"effective_num_docs": 139, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Ornament": { |
|
"name": "acva:Arabic_Ornament", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Ornament", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Philosophy": { |
|
"name": "acva:Arabic_Philosophy", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Philosophy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Physics_and_Chemistry": { |
|
"name": "acva:Arabic_Physics_and_Chemistry", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Physics_and_Chemistry", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Wedding": { |
|
"name": "acva:Arabic_Wedding", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Wedding", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Bahrain": { |
|
"name": "acva:Bahrain", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Bahrain", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Comoros": { |
|
"name": "acva:Comoros", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Comoros", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Egypt_modern": { |
|
"name": "acva:Egypt_modern", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Egypt_modern", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 95, |
|
"effective_num_docs": 95, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromAncientEgypt": { |
|
"name": "acva:InfluenceFromAncientEgypt", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromAncientEgypt", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromByzantium": { |
|
"name": "acva:InfluenceFromByzantium", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromByzantium", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromChina": { |
|
"name": "acva:InfluenceFromChina", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromChina", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromGreece": { |
|
"name": "acva:InfluenceFromGreece", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromGreece", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromIslam": { |
|
"name": "acva:InfluenceFromIslam", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromIslam", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromPersia": { |
|
"name": "acva:InfluenceFromPersia", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromPersia", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 175, |
|
"effective_num_docs": 175, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromRome": { |
|
"name": "acva:InfluenceFromRome", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromRome", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Iraq": { |
|
"name": "acva:Iraq", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Iraq", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 85, |
|
"effective_num_docs": 85, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Islam_Education": { |
|
"name": "acva:Islam_Education", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Islam_Education", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Islam_branches_and_schools": { |
|
"name": "acva:Islam_branches_and_schools", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Islam_branches_and_schools", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 175, |
|
"effective_num_docs": 175, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Islamic_law_system": { |
|
"name": "acva:Islamic_law_system", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Islamic_law_system", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Jordan": { |
|
"name": "acva:Jordan", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Jordan", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Kuwait": { |
|
"name": "acva:Kuwait", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Kuwait", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Lebanon": { |
|
"name": "acva:Lebanon", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Lebanon", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Libya": { |
|
"name": "acva:Libya", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Libya", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Mauritania": { |
|
"name": "acva:Mauritania", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Mauritania", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Mesopotamia_civilization": { |
|
"name": "acva:Mesopotamia_civilization", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Mesopotamia_civilization", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 155, |
|
"effective_num_docs": 155, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Morocco": { |
|
"name": "acva:Morocco", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Morocco", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Oman": { |
|
"name": "acva:Oman", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Oman", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Palestine": { |
|
"name": "acva:Palestine", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Palestine", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 85, |
|
"effective_num_docs": 85, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Qatar": { |
|
"name": "acva:Qatar", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Qatar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Saudi_Arabia": { |
|
"name": "acva:Saudi_Arabia", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Saudi_Arabia", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Somalia": { |
|
"name": "acva:Somalia", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Somalia", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Sudan": { |
|
"name": "acva:Sudan", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Sudan", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Syria": { |
|
"name": "acva:Syria", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Syria", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Tunisia": { |
|
"name": "acva:Tunisia", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Tunisia", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:United_Arab_Emirates": { |
|
"name": "acva:United_Arab_Emirates", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "United_Arab_Emirates", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 85, |
|
"effective_num_docs": 85, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Yemen": { |
|
"name": "acva:Yemen", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Yemen", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 10, |
|
"effective_num_docs": 10, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:communication": { |
|
"name": "acva:communication", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "communication", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 364, |
|
"effective_num_docs": 364, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:computer_and_phone": { |
|
"name": "acva:computer_and_phone", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "computer_and_phone", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 295, |
|
"effective_num_docs": 295, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:daily_life": { |
|
"name": "acva:daily_life", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "daily_life", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 337, |
|
"effective_num_docs": 337, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:entertainment": { |
|
"name": "acva:entertainment", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "entertainment", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 295, |
|
"effective_num_docs": 295, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:mcq_exams_test_ar": { |
|
"name": "alghafa:mcq_exams_test_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "mcq_exams_test_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 557, |
|
"effective_num_docs": 557, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects": { |
|
"name": "alghafa:meta_ar_dialects", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_dialects", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5395, |
|
"effective_num_docs": 5395, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa": { |
|
"name": "alghafa:meta_ar_msa", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_msa", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": { |
|
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_facts_truefalse_balanced_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 75, |
|
"effective_num_docs": 75, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_soqal_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_soqal_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 7995, |
|
"effective_num_docs": 7995, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5995, |
|
"effective_num_docs": 5995, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task": { |
|
"name": "alghafa:multiple_choice_sentiment_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_sentiment_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1720, |
|
"effective_num_docs": 1720, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_exams": { |
|
"name": "arabic_exams", |
|
"prompt_function": "arabic_exams", |
|
"hf_repo": "OALL/Arabic_EXAMS", |
|
"hf_subset": "default", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 537, |
|
"effective_num_docs": 537, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:abstract_algebra": { |
|
"name": "arabic_mmlu:abstract_algebra", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "abstract_algebra", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:anatomy": { |
|
"name": "arabic_mmlu:anatomy", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "anatomy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 135, |
|
"effective_num_docs": 135, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:astronomy": { |
|
"name": "arabic_mmlu:astronomy", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "astronomy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 152, |
|
"effective_num_docs": 152, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:business_ethics": { |
|
"name": "arabic_mmlu:business_ethics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "business_ethics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:clinical_knowledge": { |
|
"name": "arabic_mmlu:clinical_knowledge", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "clinical_knowledge", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 265, |
|
"effective_num_docs": 265, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_biology": { |
|
"name": "arabic_mmlu:college_biology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_biology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 144, |
|
"effective_num_docs": 144, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_chemistry": { |
|
"name": "arabic_mmlu:college_chemistry", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_chemistry", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_computer_science": { |
|
"name": "arabic_mmlu:college_computer_science", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_computer_science", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_mathematics": { |
|
"name": "arabic_mmlu:college_mathematics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_medicine": { |
|
"name": "arabic_mmlu:college_medicine", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_medicine", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 173, |
|
"effective_num_docs": 173, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_physics": { |
|
"name": "arabic_mmlu:college_physics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_physics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 102, |
|
"effective_num_docs": 102, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:computer_security": { |
|
"name": "arabic_mmlu:computer_security", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "computer_security", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:conceptual_physics": { |
|
"name": "arabic_mmlu:conceptual_physics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "conceptual_physics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 235, |
|
"effective_num_docs": 235, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:econometrics": { |
|
"name": "arabic_mmlu:econometrics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "econometrics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 114, |
|
"effective_num_docs": 114, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:electrical_engineering": { |
|
"name": "arabic_mmlu:electrical_engineering", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "electrical_engineering", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:elementary_mathematics": { |
|
"name": "arabic_mmlu:elementary_mathematics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "elementary_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 378, |
|
"effective_num_docs": 378, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:formal_logic": { |
|
"name": "arabic_mmlu:formal_logic", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "formal_logic", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 126, |
|
"effective_num_docs": 126, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:global_facts": { |
|
"name": "arabic_mmlu:global_facts", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "global_facts", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_biology": { |
|
"name": "arabic_mmlu:high_school_biology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_biology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 310, |
|
"effective_num_docs": 310, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_chemistry": { |
|
"name": "arabic_mmlu:high_school_chemistry", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_chemistry", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 203, |
|
"effective_num_docs": 203, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_computer_science": { |
|
"name": "arabic_mmlu:high_school_computer_science", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_computer_science", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_european_history": { |
|
"name": "arabic_mmlu:high_school_european_history", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_european_history", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 165, |
|
"effective_num_docs": 165, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_geography": { |
|
"name": "arabic_mmlu:high_school_geography", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_geography", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 198, |
|
"effective_num_docs": 198, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_government_and_politics": { |
|
"name": "arabic_mmlu:high_school_government_and_politics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_government_and_politics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 193, |
|
"effective_num_docs": 193, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_macroeconomics": { |
|
"name": "arabic_mmlu:high_school_macroeconomics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_macroeconomics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 390, |
|
"effective_num_docs": 390, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_mathematics": { |
|
"name": "arabic_mmlu:high_school_mathematics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 270, |
|
"effective_num_docs": 270, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_microeconomics": { |
|
"name": "arabic_mmlu:high_school_microeconomics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_microeconomics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 238, |
|
"effective_num_docs": 238, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_physics": { |
|
"name": "arabic_mmlu:high_school_physics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_physics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 151, |
|
"effective_num_docs": 151, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_psychology": { |
|
"name": "arabic_mmlu:high_school_psychology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_psychology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 545, |
|
"effective_num_docs": 545, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_statistics": { |
|
"name": "arabic_mmlu:high_school_statistics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_statistics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 216, |
|
"effective_num_docs": 216, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_us_history": { |
|
"name": "arabic_mmlu:high_school_us_history", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_us_history", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 204, |
|
"effective_num_docs": 204, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_world_history": { |
|
"name": "arabic_mmlu:high_school_world_history", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_world_history", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 237, |
|
"effective_num_docs": 237, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:human_aging": { |
|
"name": "arabic_mmlu:human_aging", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "human_aging", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 223, |
|
"effective_num_docs": 223, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:human_sexuality": { |
|
"name": "arabic_mmlu:human_sexuality", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "human_sexuality", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 131, |
|
"effective_num_docs": 131, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:international_law": { |
|
"name": "arabic_mmlu:international_law", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "international_law", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 121, |
|
"effective_num_docs": 121, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:jurisprudence": { |
|
"name": "arabic_mmlu:jurisprudence", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "jurisprudence", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 108, |
|
"effective_num_docs": 108, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:logical_fallacies": { |
|
"name": "arabic_mmlu:logical_fallacies", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "logical_fallacies", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 163, |
|
"effective_num_docs": 163, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:machine_learning": { |
|
"name": "arabic_mmlu:machine_learning", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "machine_learning", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 112, |
|
"effective_num_docs": 112, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:management": { |
|
"name": "arabic_mmlu:management", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "management", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 103, |
|
"effective_num_docs": 103, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:marketing": { |
|
"name": "arabic_mmlu:marketing", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "marketing", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 234, |
|
"effective_num_docs": 234, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:medical_genetics": { |
|
"name": "arabic_mmlu:medical_genetics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "medical_genetics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:miscellaneous": { |
|
"name": "arabic_mmlu:miscellaneous", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "miscellaneous", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 783, |
|
"effective_num_docs": 783, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:moral_disputes": { |
|
"name": "arabic_mmlu:moral_disputes", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "moral_disputes", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 346, |
|
"effective_num_docs": 346, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:moral_scenarios": { |
|
"name": "arabic_mmlu:moral_scenarios", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "moral_scenarios", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:nutrition": { |
|
"name": "arabic_mmlu:nutrition", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "nutrition", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 306, |
|
"effective_num_docs": 306, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:philosophy": { |
|
"name": "arabic_mmlu:philosophy", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "philosophy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 311, |
|
"effective_num_docs": 311, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:prehistory": { |
|
"name": "arabic_mmlu:prehistory", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "prehistory", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 324, |
|
"effective_num_docs": 324, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:professional_accounting": { |
|
"name": "arabic_mmlu:professional_accounting", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "professional_accounting", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 282, |
|
"effective_num_docs": 282, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:professional_law": { |
|
"name": "arabic_mmlu:professional_law", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "professional_law", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1534, |
|
"effective_num_docs": 1534, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:professional_medicine": { |
|
"name": "arabic_mmlu:professional_medicine", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "professional_medicine", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 272, |
|
"effective_num_docs": 272, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:professional_psychology": { |
|
"name": "arabic_mmlu:professional_psychology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "professional_psychology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 612, |
|
"effective_num_docs": 612, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:public_relations": { |
|
"name": "arabic_mmlu:public_relations", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "public_relations", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 110, |
|
"effective_num_docs": 110, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:security_studies": { |
|
"name": "arabic_mmlu:security_studies", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "security_studies", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 245, |
|
"effective_num_docs": 245, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:sociology": { |
|
"name": "arabic_mmlu:sociology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "sociology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 201, |
|
"effective_num_docs": 201, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:us_foreign_policy": { |
|
"name": "arabic_mmlu:us_foreign_policy", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "us_foreign_policy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:virology": { |
|
"name": "arabic_mmlu:virology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "virology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 166, |
|
"effective_num_docs": 166, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:world_religions": { |
|
"name": "arabic_mmlu:world_religions", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "world_religions", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 171, |
|
"effective_num_docs": 171, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arc_challenge_okapi_ar": { |
|
"name": "arc_challenge_okapi_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "arc_challenge_okapi_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1160, |
|
"effective_num_docs": 1160, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arc_easy_ar": { |
|
"name": "arc_easy_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "arc_easy_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2364, |
|
"effective_num_docs": 2364, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|boolq_ar": { |
|
"name": "boolq_ar", |
|
"prompt_function": "boolq_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "boolq_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 3260, |
|
"effective_num_docs": 3260, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|copa_ext_ar": { |
|
"name": "copa_ext_ar", |
|
"prompt_function": "copa_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "copa_ext_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 90, |
|
"effective_num_docs": 90, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|hellaswag_okapi_ar": { |
|
"name": "hellaswag_okapi_ar", |
|
"prompt_function": "hellaswag_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "hellaswag_okapi_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 9171, |
|
"effective_num_docs": 9171, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|openbook_qa_ext_ar": { |
|
"name": "openbook_qa_ext_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "openbook_qa_ext_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 495, |
|
"effective_num_docs": 495, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|piqa_ar": { |
|
"name": "piqa_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "piqa_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1833, |
|
"effective_num_docs": 1833, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|race_ar": { |
|
"name": "race_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "race_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4929, |
|
"effective_num_docs": 4929, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|sciq_ar": { |
|
"name": "sciq_ar", |
|
"prompt_function": "sciq_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "sciq_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 995, |
|
"effective_num_docs": 995, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|toxigen_ar": { |
|
"name": "toxigen_ar", |
|
"prompt_function": "toxigen_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "toxigen_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 935, |
|
"effective_num_docs": 935, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"lighteval|xstory_cloze:ar": { |
|
"name": "xstory_cloze:ar", |
|
"prompt_function": "storycloze", |
|
"hf_repo": "juletxara/xstory_cloze", |
|
"hf_subset": "ar", |
|
"metric": [ |
|
"loglikelihood_acc" |
|
], |
|
"hf_avail_splits": [ |
|
"training", |
|
"eval" |
|
], |
|
"evaluation_splits": [ |
|
"eval" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": [ |
|
"\n" |
|
], |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"lighteval" |
|
], |
|
"original_num_docs": 1511, |
|
"effective_num_docs": 1511, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
} |
|
}, |
|
"summary_tasks": { |
|
"community|acva:Algeria|0": { |
|
"hashes": { |
|
"hash_examples": "da5a3003cd46f6f9", |
|
"hash_full_prompts": "da5a3003cd46f6f9", |
|
"hash_input_tokens": "8f83f11a8ad17da9", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Ancient_Egypt|0": { |
|
"hashes": { |
|
"hash_examples": "52d6f767fede195b", |
|
"hash_full_prompts": "52d6f767fede195b", |
|
"hash_input_tokens": "5f20814e9399e6c8", |
|
"hash_cont_tokens": "02551e3a7799fc07" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 315, |
|
"padded": 630, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arab_Empire|0": { |
|
"hashes": { |
|
"hash_examples": "8dacff6a79804a75", |
|
"hash_full_prompts": "8dacff6a79804a75", |
|
"hash_input_tokens": "c60b5855fdf1018b", |
|
"hash_cont_tokens": "b2ac35753faf18a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 265, |
|
"padded": 530, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Architecture|0": { |
|
"hashes": { |
|
"hash_examples": "df286cd862d9f6bb", |
|
"hash_full_prompts": "df286cd862d9f6bb", |
|
"hash_input_tokens": "68f206265a42277d", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Art|0": { |
|
"hashes": { |
|
"hash_examples": "112883d764118a49", |
|
"hash_full_prompts": "112883d764118a49", |
|
"hash_input_tokens": "791ecba2faa0e629", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Astronomy|0": { |
|
"hashes": { |
|
"hash_examples": "20dcdf2454bf8671", |
|
"hash_full_prompts": "20dcdf2454bf8671", |
|
"hash_input_tokens": "382e64f009d287af", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Calligraphy|0": { |
|
"hashes": { |
|
"hash_examples": "3a9f9d1ebe868a15", |
|
"hash_full_prompts": "3a9f9d1ebe868a15", |
|
"hash_input_tokens": "b0bd9a216f45c00b", |
|
"hash_cont_tokens": "df078ae831ab33af" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 255, |
|
"padded": 510, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Ceremony|0": { |
|
"hashes": { |
|
"hash_examples": "c927630f8d2f44da", |
|
"hash_full_prompts": "c927630f8d2f44da", |
|
"hash_input_tokens": "7fc58aacf71b48e4", |
|
"hash_cont_tokens": "d141b5e1fb8ad2aa" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 185, |
|
"padded": 370, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Clothing|0": { |
|
"hashes": { |
|
"hash_examples": "6ad0740c2ac6ac92", |
|
"hash_full_prompts": "6ad0740c2ac6ac92", |
|
"hash_input_tokens": "6c3b415bea66174a", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Culture|0": { |
|
"hashes": { |
|
"hash_examples": "2177bd857ad872ae", |
|
"hash_full_prompts": "2177bd857ad872ae", |
|
"hash_input_tokens": "fb7b263bbbb6a029", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Food|0": { |
|
"hashes": { |
|
"hash_examples": "a6ada65b71d7c9c5", |
|
"hash_full_prompts": "a6ada65b71d7c9c5", |
|
"hash_input_tokens": "e48b97bcbb0d0c5b", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Funeral|0": { |
|
"hashes": { |
|
"hash_examples": "fcee39dc29eaae91", |
|
"hash_full_prompts": "fcee39dc29eaae91", |
|
"hash_input_tokens": "94b0f1bde579d725", |
|
"hash_cont_tokens": "4fde58d0d9fc5b50" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 95, |
|
"padded": 190, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Geography|0": { |
|
"hashes": { |
|
"hash_examples": "d36eda7c89231c02", |
|
"hash_full_prompts": "d36eda7c89231c02", |
|
"hash_input_tokens": "dc2d1675c69e7256", |
|
"hash_cont_tokens": "baa6b32f3f28c10d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_History|0": { |
|
"hashes": { |
|
"hash_examples": "6354ac0d6db6a5fc", |
|
"hash_full_prompts": "6354ac0d6db6a5fc", |
|
"hash_input_tokens": "409d41ce5ba3400d", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Language_Origin|0": { |
|
"hashes": { |
|
"hash_examples": "ddc967c8aca34402", |
|
"hash_full_prompts": "ddc967c8aca34402", |
|
"hash_input_tokens": "ed535dc7fe108a9f", |
|
"hash_cont_tokens": "4fde58d0d9fc5b50" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 95, |
|
"padded": 190, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Literature|0": { |
|
"hashes": { |
|
"hash_examples": "4305379fd46be5d8", |
|
"hash_full_prompts": "4305379fd46be5d8", |
|
"hash_input_tokens": "8e7dd873ae5075c4", |
|
"hash_cont_tokens": "baa6b32f3f28c10d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Math|0": { |
|
"hashes": { |
|
"hash_examples": "dec621144f4d28be", |
|
"hash_full_prompts": "dec621144f4d28be", |
|
"hash_input_tokens": "306b399020d1b728", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Medicine|0": { |
|
"hashes": { |
|
"hash_examples": "2b344cdae9495ff2", |
|
"hash_full_prompts": "2b344cdae9495ff2", |
|
"hash_input_tokens": "abc9b745c31d600c", |
|
"hash_cont_tokens": "baa6b32f3f28c10d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Music|0": { |
|
"hashes": { |
|
"hash_examples": "0c54624d881944ce", |
|
"hash_full_prompts": "0c54624d881944ce", |
|
"hash_input_tokens": "f33d8e3841dec094", |
|
"hash_cont_tokens": "af1670664011d93d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 139, |
|
"padded": 278, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Ornament|0": { |
|
"hashes": { |
|
"hash_examples": "251a4a84289d8bc1", |
|
"hash_full_prompts": "251a4a84289d8bc1", |
|
"hash_input_tokens": "88dc06723d0b082c", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Philosophy|0": { |
|
"hashes": { |
|
"hash_examples": "3f86fb9c94c13d22", |
|
"hash_full_prompts": "3f86fb9c94c13d22", |
|
"hash_input_tokens": "c39856a044677df1", |
|
"hash_cont_tokens": "baa6b32f3f28c10d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Physics_and_Chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "8fec65af3695b62a", |
|
"hash_full_prompts": "8fec65af3695b62a", |
|
"hash_input_tokens": "e446ed9275c00baf", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Wedding|0": { |
|
"hashes": { |
|
"hash_examples": "9cc3477184d7a4b8", |
|
"hash_full_prompts": "9cc3477184d7a4b8", |
|
"hash_input_tokens": "8d0e73699ce527f7", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Bahrain|0": { |
|
"hashes": { |
|
"hash_examples": "c92e803a0fa8b9e2", |
|
"hash_full_prompts": "c92e803a0fa8b9e2", |
|
"hash_input_tokens": "8273ce84c6d2f820", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Comoros|0": { |
|
"hashes": { |
|
"hash_examples": "06e5d4bba8e54cae", |
|
"hash_full_prompts": "06e5d4bba8e54cae", |
|
"hash_input_tokens": "e65e32c938b933cf", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Egypt_modern|0": { |
|
"hashes": { |
|
"hash_examples": "c6ec369164f93446", |
|
"hash_full_prompts": "c6ec369164f93446", |
|
"hash_input_tokens": "a4292ffb46b84a32", |
|
"hash_cont_tokens": "4fde58d0d9fc5b50" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 95, |
|
"padded": 190, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromAncientEgypt|0": { |
|
"hashes": { |
|
"hash_examples": "b9d56d74818b9bd4", |
|
"hash_full_prompts": "b9d56d74818b9bd4", |
|
"hash_input_tokens": "74ec393ee971cd52", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromByzantium|0": { |
|
"hashes": { |
|
"hash_examples": "5316c9624e7e59b8", |
|
"hash_full_prompts": "5316c9624e7e59b8", |
|
"hash_input_tokens": "8b022bacbf3afed0", |
|
"hash_cont_tokens": "baa6b32f3f28c10d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromChina|0": { |
|
"hashes": { |
|
"hash_examples": "87894bce95a56411", |
|
"hash_full_prompts": "87894bce95a56411", |
|
"hash_input_tokens": "85e2e224fed52fff", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromGreece|0": { |
|
"hashes": { |
|
"hash_examples": "0baa78a27e469312", |
|
"hash_full_prompts": "0baa78a27e469312", |
|
"hash_input_tokens": "8a167be28d7e9881", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromIslam|0": { |
|
"hashes": { |
|
"hash_examples": "0c2532cde6541ff2", |
|
"hash_full_prompts": "0c2532cde6541ff2", |
|
"hash_input_tokens": "68da785661d6905b", |
|
"hash_cont_tokens": "baa6b32f3f28c10d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromPersia|0": { |
|
"hashes": { |
|
"hash_examples": "efcd8112dc53c6e5", |
|
"hash_full_prompts": "efcd8112dc53c6e5", |
|
"hash_input_tokens": "c244588f75df3348", |
|
"hash_cont_tokens": "23acea25e70deb27" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 175, |
|
"padded": 350, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromRome|0": { |
|
"hashes": { |
|
"hash_examples": "9db61480e2e85fd3", |
|
"hash_full_prompts": "9db61480e2e85fd3", |
|
"hash_input_tokens": "7901dcf17d731d8c", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Iraq|0": { |
|
"hashes": { |
|
"hash_examples": "96dac3dfa8d2f41f", |
|
"hash_full_prompts": "96dac3dfa8d2f41f", |
|
"hash_input_tokens": "c68de7eeb582c9b3", |
|
"hash_cont_tokens": "abfa9c7e3408ad07" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 85, |
|
"padded": 170, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Islam_Education|0": { |
|
"hashes": { |
|
"hash_examples": "0d80355f6a4cb51b", |
|
"hash_full_prompts": "0d80355f6a4cb51b", |
|
"hash_input_tokens": "25db6e2f1a4f8dd1", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Islam_branches_and_schools|0": { |
|
"hashes": { |
|
"hash_examples": "5cedce1be2c3ad50", |
|
"hash_full_prompts": "5cedce1be2c3ad50", |
|
"hash_input_tokens": "afe1c00de0604337", |
|
"hash_cont_tokens": "23acea25e70deb27" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 175, |
|
"padded": 350, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Islamic_law_system|0": { |
|
"hashes": { |
|
"hash_examples": "c0e6db8bc84e105e", |
|
"hash_full_prompts": "c0e6db8bc84e105e", |
|
"hash_input_tokens": "b0403c928ef6cbe5", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Jordan|0": { |
|
"hashes": { |
|
"hash_examples": "33deb5b4e5ddd6a1", |
|
"hash_full_prompts": "33deb5b4e5ddd6a1", |
|
"hash_input_tokens": "840caf12abfd0f9e", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Kuwait|0": { |
|
"hashes": { |
|
"hash_examples": "eb41773346d7c46c", |
|
"hash_full_prompts": "eb41773346d7c46c", |
|
"hash_input_tokens": "6dc1d99b6ff9348f", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Lebanon|0": { |
|
"hashes": { |
|
"hash_examples": "25932dbf4c13d34f", |
|
"hash_full_prompts": "25932dbf4c13d34f", |
|
"hash_input_tokens": "41a7806a97979c1e", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Libya|0": { |
|
"hashes": { |
|
"hash_examples": "f2c4db63cd402926", |
|
"hash_full_prompts": "f2c4db63cd402926", |
|
"hash_input_tokens": "9faf7a06731ef98a", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Mauritania|0": { |
|
"hashes": { |
|
"hash_examples": "8723ab5fdf286b54", |
|
"hash_full_prompts": "8723ab5fdf286b54", |
|
"hash_input_tokens": "bcb20a2be7eda20f", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Mesopotamia_civilization|0": { |
|
"hashes": { |
|
"hash_examples": "c33f5502a6130ca9", |
|
"hash_full_prompts": "c33f5502a6130ca9", |
|
"hash_input_tokens": "97d7e7e46e80cd57", |
|
"hash_cont_tokens": "1af2ae1742adbc24" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 155, |
|
"padded": 310, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Morocco|0": { |
|
"hashes": { |
|
"hash_examples": "588a5ed27904b1ae", |
|
"hash_full_prompts": "588a5ed27904b1ae", |
|
"hash_input_tokens": "a601f57cddcbc0ae", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Oman|0": { |
|
"hashes": { |
|
"hash_examples": "d447c52b94248b69", |
|
"hash_full_prompts": "d447c52b94248b69", |
|
"hash_input_tokens": "6ad194aa8a114237", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Palestine|0": { |
|
"hashes": { |
|
"hash_examples": "19197e076ad14ff5", |
|
"hash_full_prompts": "19197e076ad14ff5", |
|
"hash_input_tokens": "f84ffacc1d592e8d", |
|
"hash_cont_tokens": "abfa9c7e3408ad07" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 85, |
|
"padded": 170, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Qatar|0": { |
|
"hashes": { |
|
"hash_examples": "cf0736fa185b28f6", |
|
"hash_full_prompts": "cf0736fa185b28f6", |
|
"hash_input_tokens": "1269956b7a1c4e70", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Saudi_Arabia|0": { |
|
"hashes": { |
|
"hash_examples": "69beda6e1b85a08d", |
|
"hash_full_prompts": "69beda6e1b85a08d", |
|
"hash_input_tokens": "d7223a3235c113fd", |
|
"hash_cont_tokens": "8c4ed6e8a1b9384e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Somalia|0": { |
|
"hashes": { |
|
"hash_examples": "b387940c65784fbf", |
|
"hash_full_prompts": "b387940c65784fbf", |
|
"hash_input_tokens": "368defb71a1a60c9", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Sudan|0": { |
|
"hashes": { |
|
"hash_examples": "e02c32b9d2dd0c3f", |
|
"hash_full_prompts": "e02c32b9d2dd0c3f", |
|
"hash_input_tokens": "40490fb519d3793c", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Syria|0": { |
|
"hashes": { |
|
"hash_examples": "60a6f8fe73bda4bb", |
|
"hash_full_prompts": "60a6f8fe73bda4bb", |
|
"hash_input_tokens": "f22accf08dceb00b", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Tunisia|0": { |
|
"hashes": { |
|
"hash_examples": "34bb15d3830c5649", |
|
"hash_full_prompts": "34bb15d3830c5649", |
|
"hash_input_tokens": "e501a154de606b7c", |
|
"hash_cont_tokens": "ace4dac0482c2b97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:United_Arab_Emirates|0": { |
|
"hashes": { |
|
"hash_examples": "98a0ba78172718ce", |
|
"hash_full_prompts": "98a0ba78172718ce", |
|
"hash_input_tokens": "4848c9103864f264", |
|
"hash_cont_tokens": "abfa9c7e3408ad07" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 85, |
|
"padded": 170, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Yemen|0": { |
|
"hashes": { |
|
"hash_examples": "18e9bcccbb4ced7a", |
|
"hash_full_prompts": "18e9bcccbb4ced7a", |
|
"hash_input_tokens": "332ac84879783610", |
|
"hash_cont_tokens": "2670597aba47c825" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 10, |
|
"padded": 20, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:communication|0": { |
|
"hashes": { |
|
"hash_examples": "9ff28ab5eab5c97b", |
|
"hash_full_prompts": "9ff28ab5eab5c97b", |
|
"hash_input_tokens": "d33a20043b6f034e", |
|
"hash_cont_tokens": "697eee2f0c78642e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 364, |
|
"padded": 728, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:computer_and_phone|0": { |
|
"hashes": { |
|
"hash_examples": "37bac2f086aaf6c2", |
|
"hash_full_prompts": "37bac2f086aaf6c2", |
|
"hash_input_tokens": "7489d4f6815851bd", |
|
"hash_cont_tokens": "d33be83a92fe5a3c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 295, |
|
"padded": 590, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:daily_life|0": { |
|
"hashes": { |
|
"hash_examples": "bf07363c1c252e2f", |
|
"hash_full_prompts": "bf07363c1c252e2f", |
|
"hash_input_tokens": "036bd7b88c52efae", |
|
"hash_cont_tokens": "8d6b2ac47099f43c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 337, |
|
"padded": 674, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:entertainment|0": { |
|
"hashes": { |
|
"hash_examples": "37077bc00f0ac56a", |
|
"hash_full_prompts": "37077bc00f0ac56a", |
|
"hash_input_tokens": "6e8dcc62cb9a41cb", |
|
"hash_cont_tokens": "d33be83a92fe5a3c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 295, |
|
"padded": 590, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"hashes": { |
|
"hash_examples": "c07a5e78c5c0b8fe", |
|
"hash_full_prompts": "c07a5e78c5c0b8fe", |
|
"hash_input_tokens": "3fc6afdef84dba0f", |
|
"hash_cont_tokens": "7f907530d0fd020e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 557, |
|
"padded": 2228, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"hashes": { |
|
"hash_examples": "c0b6081f83e14064", |
|
"hash_full_prompts": "c0b6081f83e14064", |
|
"hash_input_tokens": "b20ae63ba57cb3c0", |
|
"hash_cont_tokens": "8dc61709faf12e74" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5395, |
|
"padded": 21580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"hashes": { |
|
"hash_examples": "64eb78a7c5b7484b", |
|
"hash_full_prompts": "64eb78a7c5b7484b", |
|
"hash_input_tokens": "9aefb868b68eea82", |
|
"hash_cont_tokens": "d3545e11ef188cd0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"hashes": { |
|
"hash_examples": "54fc3502c1c02c06", |
|
"hash_full_prompts": "54fc3502c1c02c06", |
|
"hash_input_tokens": "a82f83b63bf5624c", |
|
"hash_cont_tokens": "b3d037d09ace8d7c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 75, |
|
"padded": 150, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"hashes": { |
|
"hash_examples": "46572d83696552ae", |
|
"hash_full_prompts": "46572d83696552ae", |
|
"hash_input_tokens": "cc72a3f27d560dcc", |
|
"hash_cont_tokens": "0faf53527a163be7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 749, |
|
"non_padded": 1, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"hashes": { |
|
"hash_examples": "f430d97ff715bc1c", |
|
"hash_full_prompts": "f430d97ff715bc1c", |
|
"hash_input_tokens": "12e7d701275ab938", |
|
"hash_cont_tokens": "8280078ee8a38203" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 749, |
|
"non_padded": 1, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"hashes": { |
|
"hash_examples": "6b70a7416584f98c", |
|
"hash_full_prompts": "6b70a7416584f98c", |
|
"hash_input_tokens": "dadd57098a72a861", |
|
"hash_cont_tokens": "a288013b4fde6e42" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 7995, |
|
"padded": 15990, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "bc2005cc9d2f436e", |
|
"hash_full_prompts": "bc2005cc9d2f436e", |
|
"hash_input_tokens": "130620f4358ce109", |
|
"hash_cont_tokens": "f1d66eb38974410c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5995, |
|
"padded": 17716, |
|
"non_padded": 269, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "6fb0e254ea5945d8", |
|
"hash_full_prompts": "6fb0e254ea5945d8", |
|
"hash_input_tokens": "3e180a0caf47bcaa", |
|
"hash_cont_tokens": "b3419f2698061061" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1720, |
|
"padded": 5046, |
|
"non_padded": 114, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_exams|0": { |
|
"hashes": { |
|
"hash_examples": "6d721df351722656", |
|
"hash_full_prompts": "6d721df351722656", |
|
"hash_input_tokens": "32a4aa3a24557387", |
|
"hash_cont_tokens": "171fb57fd6259975" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 537, |
|
"padded": 2080, |
|
"non_padded": 68, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:abstract_algebra|0": { |
|
"hashes": { |
|
"hash_examples": "f2ddca8f45c0a511", |
|
"hash_full_prompts": "f2ddca8f45c0a511", |
|
"hash_input_tokens": "0213155a5302b36d", |
|
"hash_cont_tokens": "14f152d098bdd3a0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 392, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:anatomy|0": { |
|
"hashes": { |
|
"hash_examples": "dfdbc1b83107668d", |
|
"hash_full_prompts": "dfdbc1b83107668d", |
|
"hash_input_tokens": "cfdc330931ca284d", |
|
"hash_cont_tokens": "2f8458eac638d093" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 135, |
|
"padded": 528, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:astronomy|0": { |
|
"hashes": { |
|
"hash_examples": "9736a606002a848e", |
|
"hash_full_prompts": "9736a606002a848e", |
|
"hash_input_tokens": "67631f4a25105790", |
|
"hash_cont_tokens": "098d2a6d06caf0c7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 152, |
|
"padded": 600, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:business_ethics|0": { |
|
"hashes": { |
|
"hash_examples": "735e452fbb6dc63d", |
|
"hash_full_prompts": "735e452fbb6dc63d", |
|
"hash_input_tokens": "50388f6a99af1826", |
|
"hash_cont_tokens": "14f152d098bdd3a0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 388, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:clinical_knowledge|0": { |
|
"hashes": { |
|
"hash_examples": "6ab0ca4da98aedcf", |
|
"hash_full_prompts": "6ab0ca4da98aedcf", |
|
"hash_input_tokens": "d25b58a31c14049e", |
|
"hash_cont_tokens": "9d535d367f84244a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 265, |
|
"padded": 1044, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_biology|0": { |
|
"hashes": { |
|
"hash_examples": "17e4e390848018a4", |
|
"hash_full_prompts": "17e4e390848018a4", |
|
"hash_input_tokens": "20dd1097daa5158e", |
|
"hash_cont_tokens": "6228bf41353d74fe" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 144, |
|
"padded": 576, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "4abb169f6dfd234b", |
|
"hash_full_prompts": "4abb169f6dfd234b", |
|
"hash_input_tokens": "22f0b14f83ee96e1", |
|
"hash_cont_tokens": "14f152d098bdd3a0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "a369e2e941358a1e", |
|
"hash_full_prompts": "a369e2e941358a1e", |
|
"hash_input_tokens": "6f427018e10b6e70", |
|
"hash_cont_tokens": "14f152d098bdd3a0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "d7be03b8b6020bff", |
|
"hash_full_prompts": "d7be03b8b6020bff", |
|
"hash_input_tokens": "41db9dbb4789e6f3", |
|
"hash_cont_tokens": "14f152d098bdd3a0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 392, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "0518a00f097346bf", |
|
"hash_full_prompts": "0518a00f097346bf", |
|
"hash_input_tokens": "a85d92229c2a955e", |
|
"hash_cont_tokens": "7549c006d7005bc7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 173, |
|
"padded": 688, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_physics|0": { |
|
"hashes": { |
|
"hash_examples": "5d842cd49bc70e12", |
|
"hash_full_prompts": "5d842cd49bc70e12", |
|
"hash_input_tokens": "cc583b01a57a3829", |
|
"hash_cont_tokens": "c677bcf45d184788" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 102, |
|
"padded": 408, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:computer_security|0": { |
|
"hashes": { |
|
"hash_examples": "8e85d9f85be9b32f", |
|
"hash_full_prompts": "8e85d9f85be9b32f", |
|
"hash_input_tokens": "baa291d385458406", |
|
"hash_cont_tokens": "14f152d098bdd3a0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 388, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:conceptual_physics|0": { |
|
"hashes": { |
|
"hash_examples": "7964b55a0a49502b", |
|
"hash_full_prompts": "7964b55a0a49502b", |
|
"hash_input_tokens": "7e0748bf6427aeb1", |
|
"hash_cont_tokens": "7431f013a85ada93" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 235, |
|
"padded": 888, |
|
"non_padded": 52, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:econometrics|0": { |
|
"hashes": { |
|
"hash_examples": "1e192eae38347257", |
|
"hash_full_prompts": "1e192eae38347257", |
|
"hash_input_tokens": "29aee13f6e80f48b", |
|
"hash_cont_tokens": "13220ec7589e0834" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 114, |
|
"padded": 452, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:electrical_engineering|0": { |
|
"hashes": { |
|
"hash_examples": "cf97671d5c441da1", |
|
"hash_full_prompts": "cf97671d5c441da1", |
|
"hash_input_tokens": "13cd96ad4fd3c890", |
|
"hash_cont_tokens": "5488d4fa8c4a1222" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 572, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:elementary_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "6f49107ed43c40c5", |
|
"hash_full_prompts": "6f49107ed43c40c5", |
|
"hash_input_tokens": "6dc91a4e6c207684", |
|
"hash_cont_tokens": "cb8033a2d782c7b5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 378, |
|
"padded": 1492, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:formal_logic|0": { |
|
"hashes": { |
|
"hash_examples": "7922c376008ba77b", |
|
"hash_full_prompts": "7922c376008ba77b", |
|
"hash_input_tokens": "26f55c1664c094bc", |
|
"hash_cont_tokens": "b6e3f9dbc76ce037" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 126, |
|
"padded": 504, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:global_facts|0": { |
|
"hashes": { |
|
"hash_examples": "11f9813185047d5b", |
|
"hash_full_prompts": "11f9813185047d5b", |
|
"hash_input_tokens": "295d87260aeff815", |
|
"hash_cont_tokens": "14f152d098bdd3a0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 384, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_biology|0": { |
|
"hashes": { |
|
"hash_examples": "2a804b1d90cbe66e", |
|
"hash_full_prompts": "2a804b1d90cbe66e", |
|
"hash_input_tokens": "1bdaec1afdbf1f6e", |
|
"hash_cont_tokens": "ddab4e7b2a9c6a5c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 310, |
|
"padded": 1220, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "0032168adabc53b4", |
|
"hash_full_prompts": "0032168adabc53b4", |
|
"hash_input_tokens": "b9c051bb4b27aba5", |
|
"hash_cont_tokens": "fd4ea6647cf145d2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 203, |
|
"padded": 808, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "f2fb8740f9df980f", |
|
"hash_full_prompts": "f2fb8740f9df980f", |
|
"hash_input_tokens": "5e3819afac957dad", |
|
"hash_cont_tokens": "14f152d098bdd3a0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 392, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_european_history|0": { |
|
"hashes": { |
|
"hash_examples": "73509021e7e66435", |
|
"hash_full_prompts": "73509021e7e66435", |
|
"hash_input_tokens": "2e7dca062c7fb096", |
|
"hash_cont_tokens": "ea459a291992ca85" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 165, |
|
"padded": 660, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_geography|0": { |
|
"hashes": { |
|
"hash_examples": "9e08d1894940ff42", |
|
"hash_full_prompts": "9e08d1894940ff42", |
|
"hash_input_tokens": "8715120661c99cde", |
|
"hash_cont_tokens": "8836eb46ac53b19c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 198, |
|
"padded": 768, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_government_and_politics|0": { |
|
"hashes": { |
|
"hash_examples": "64b7e97817ca6c76", |
|
"hash_full_prompts": "64b7e97817ca6c76", |
|
"hash_input_tokens": "7262ad9cfaad3aa7", |
|
"hash_cont_tokens": "d830568da2d2f70a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 193, |
|
"padded": 768, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_macroeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "9f582da8534bd2ef", |
|
"hash_full_prompts": "9f582da8534bd2ef", |
|
"hash_input_tokens": "acbb2fbfd816fadd", |
|
"hash_cont_tokens": "2e550681d0e0aef9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 390, |
|
"padded": 1556, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "fd54f1c10d423c51", |
|
"hash_full_prompts": "fd54f1c10d423c51", |
|
"hash_input_tokens": "01a69a48b5a32d00", |
|
"hash_cont_tokens": "40b6c49157bcee1e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 270, |
|
"padded": 1072, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_microeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "7037896925aaf42f", |
|
"hash_full_prompts": "7037896925aaf42f", |
|
"hash_input_tokens": "275fc2ab32f66f54", |
|
"hash_cont_tokens": "e281db6be7bdad11" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 238, |
|
"padded": 952, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_physics|0": { |
|
"hashes": { |
|
"hash_examples": "60c3776215167dae", |
|
"hash_full_prompts": "60c3776215167dae", |
|
"hash_input_tokens": "26767d38381e0577", |
|
"hash_cont_tokens": "22f8f4e87ca69ce4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 151, |
|
"padded": 604, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "61176bfd5da1298f", |
|
"hash_full_prompts": "61176bfd5da1298f", |
|
"hash_input_tokens": "f2077a32f465623d", |
|
"hash_cont_tokens": "63c435700adc4992" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 545, |
|
"padded": 2148, |
|
"non_padded": 32, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_statistics|0": { |
|
"hashes": { |
|
"hash_examples": "40dfeebd1ea10f76", |
|
"hash_full_prompts": "40dfeebd1ea10f76", |
|
"hash_input_tokens": "c56e4491f1f9b4f6", |
|
"hash_cont_tokens": "62c8c96999f26175" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 216, |
|
"padded": 856, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_us_history|0": { |
|
"hashes": { |
|
"hash_examples": "03daa510ba917f4d", |
|
"hash_full_prompts": "03daa510ba917f4d", |
|
"hash_input_tokens": "6516f65c5506da39", |
|
"hash_cont_tokens": "bb685a5b79cc4bcf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 204, |
|
"padded": 816, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_world_history|0": { |
|
"hashes": { |
|
"hash_examples": "be075ffd579f43c2", |
|
"hash_full_prompts": "be075ffd579f43c2", |
|
"hash_input_tokens": "d004b652daf13693", |
|
"hash_cont_tokens": "bda258dc2ee9ae84" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 237, |
|
"padded": 948, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:human_aging|0": { |
|
"hashes": { |
|
"hash_examples": "caa5b69f640bd1ef", |
|
"hash_full_prompts": "caa5b69f640bd1ef", |
|
"hash_input_tokens": "23d55f006aede681", |
|
"hash_cont_tokens": "6e6e8bc3e6da2961" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 223, |
|
"padded": 864, |
|
"non_padded": 28, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:human_sexuality|0": { |
|
"hashes": { |
|
"hash_examples": "5ed2e38fb25a3767", |
|
"hash_full_prompts": "5ed2e38fb25a3767", |
|
"hash_input_tokens": "bfd035fe3441fbb5", |
|
"hash_cont_tokens": "c1ed6db7a04a7612" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 131, |
|
"padded": 504, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:international_law|0": { |
|
"hashes": { |
|
"hash_examples": "4e3e9e28d1b96484", |
|
"hash_full_prompts": "4e3e9e28d1b96484", |
|
"hash_input_tokens": "75d3630b17a02b02", |
|
"hash_cont_tokens": "92daaf64d9d9da2a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 121, |
|
"padded": 484, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:jurisprudence|0": { |
|
"hashes": { |
|
"hash_examples": "e264b755366310b3", |
|
"hash_full_prompts": "e264b755366310b3", |
|
"hash_input_tokens": "25f7fe5b32a29a69", |
|
"hash_cont_tokens": "7fcf8d23df5ccc91" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 108, |
|
"padded": 424, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:logical_fallacies|0": { |
|
"hashes": { |
|
"hash_examples": "a4ab6965a3e38071", |
|
"hash_full_prompts": "a4ab6965a3e38071", |
|
"hash_input_tokens": "51151ac823b61c83", |
|
"hash_cont_tokens": "72948da08a500157" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 163, |
|
"padded": 648, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:machine_learning|0": { |
|
"hashes": { |
|
"hash_examples": "b92320efa6636b40", |
|
"hash_full_prompts": "b92320efa6636b40", |
|
"hash_input_tokens": "44db37c536b2e853", |
|
"hash_cont_tokens": "405e73c102222fba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 112, |
|
"padded": 436, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:management|0": { |
|
"hashes": { |
|
"hash_examples": "c9ee4872a850fe20", |
|
"hash_full_prompts": "c9ee4872a850fe20", |
|
"hash_input_tokens": "8dba7826fac430b9", |
|
"hash_cont_tokens": "4726a545073d0c81" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 103, |
|
"padded": 404, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:marketing|0": { |
|
"hashes": { |
|
"hash_examples": "0c151b70f6a047e3", |
|
"hash_full_prompts": "0c151b70f6a047e3", |
|
"hash_input_tokens": "b420f4dfedb5d067", |
|
"hash_cont_tokens": "259e902474dbb8d2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 234, |
|
"padded": 920, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:medical_genetics|0": { |
|
"hashes": { |
|
"hash_examples": "513f6cb8fca3a24e", |
|
"hash_full_prompts": "513f6cb8fca3a24e", |
|
"hash_input_tokens": "329645fb41ab609d", |
|
"hash_cont_tokens": "14f152d098bdd3a0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:miscellaneous|0": { |
|
"hashes": { |
|
"hash_examples": "259a190d635331db", |
|
"hash_full_prompts": "259a190d635331db", |
|
"hash_input_tokens": "1d1f4339c5089b8b", |
|
"hash_cont_tokens": "d13fcbf27d589c0b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 783, |
|
"padded": 3012, |
|
"non_padded": 120, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:moral_disputes|0": { |
|
"hashes": { |
|
"hash_examples": "b85052c48a0b7bc3", |
|
"hash_full_prompts": "b85052c48a0b7bc3", |
|
"hash_input_tokens": "6649b351489ffd90", |
|
"hash_cont_tokens": "0c9d0a6a2c7e15ce" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 346, |
|
"padded": 1364, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:moral_scenarios|0": { |
|
"hashes": { |
|
"hash_examples": "28d0b069ef00dd00", |
|
"hash_full_prompts": "28d0b069ef00dd00", |
|
"hash_input_tokens": "ac68a1dc021e5709", |
|
"hash_cont_tokens": "30e44790ff428f22" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:nutrition|0": { |
|
"hashes": { |
|
"hash_examples": "00c9bc5f1d305b2f", |
|
"hash_full_prompts": "00c9bc5f1d305b2f", |
|
"hash_input_tokens": "35608f434f7e88f9", |
|
"hash_cont_tokens": "7a4bdd0cd253cd01" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 306, |
|
"padded": 1208, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:philosophy|0": { |
|
"hashes": { |
|
"hash_examples": "a458c08454a3fd5f", |
|
"hash_full_prompts": "a458c08454a3fd5f", |
|
"hash_input_tokens": "5f190454ff7953c7", |
|
"hash_cont_tokens": "367fb2f82c92f770" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 311, |
|
"padded": 1220, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:prehistory|0": { |
|
"hashes": { |
|
"hash_examples": "d6a0ecbdbb670e9c", |
|
"hash_full_prompts": "d6a0ecbdbb670e9c", |
|
"hash_input_tokens": "ffba03dfdc55fcca", |
|
"hash_cont_tokens": "648a075d49cc9f7d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 324, |
|
"padded": 1269, |
|
"non_padded": 27, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:professional_accounting|0": { |
|
"hashes": { |
|
"hash_examples": "b4a95fe480b6540e", |
|
"hash_full_prompts": "b4a95fe480b6540e", |
|
"hash_input_tokens": "56c98cc560a0db62", |
|
"hash_cont_tokens": "485cab302a3f12b4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 282, |
|
"padded": 1108, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:professional_law|0": { |
|
"hashes": { |
|
"hash_examples": "c2be9651cdbdde3b", |
|
"hash_full_prompts": "c2be9651cdbdde3b", |
|
"hash_input_tokens": "d01d4ad8c104e5ad", |
|
"hash_cont_tokens": "6fac7226deec9bd7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1534, |
|
"padded": 6080, |
|
"non_padded": 56, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:professional_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "26ce92416288f273", |
|
"hash_full_prompts": "26ce92416288f273", |
|
"hash_input_tokens": "eeda962cf17da6a1", |
|
"hash_cont_tokens": "2ee2dae0fd3373a7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 272, |
|
"padded": 1084, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:professional_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "71ea5f182ea9a641", |
|
"hash_full_prompts": "71ea5f182ea9a641", |
|
"hash_input_tokens": "82ca754cdb4e0f6f", |
|
"hash_cont_tokens": "833292c391c68c54" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 612, |
|
"padded": 2360, |
|
"non_padded": 88, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:public_relations|0": { |
|
"hashes": { |
|
"hash_examples": "125adc21f91f8d77", |
|
"hash_full_prompts": "125adc21f91f8d77", |
|
"hash_input_tokens": "1b58cd75f6291da7", |
|
"hash_cont_tokens": "96d5f037f3fe3f79" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 110, |
|
"padded": 424, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:security_studies|0": { |
|
"hashes": { |
|
"hash_examples": "3c18b216c099fb26", |
|
"hash_full_prompts": "3c18b216c099fb26", |
|
"hash_input_tokens": "34f85457e26a8e0d", |
|
"hash_cont_tokens": "d2b03acafc7b6fc8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 245, |
|
"padded": 972, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:sociology|0": { |
|
"hashes": { |
|
"hash_examples": "3f2a9634cef7417d", |
|
"hash_full_prompts": "3f2a9634cef7417d", |
|
"hash_input_tokens": "685a81c1a2319a96", |
|
"hash_cont_tokens": "31b5e3ff3801aa77" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 201, |
|
"padded": 792, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:us_foreign_policy|0": { |
|
"hashes": { |
|
"hash_examples": "22249da54056475e", |
|
"hash_full_prompts": "22249da54056475e", |
|
"hash_input_tokens": "b95a6004c71aee8d", |
|
"hash_cont_tokens": "14f152d098bdd3a0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:virology|0": { |
|
"hashes": { |
|
"hash_examples": "9d194b9471dc624e", |
|
"hash_full_prompts": "9d194b9471dc624e", |
|
"hash_input_tokens": "5b7df209375363f1", |
|
"hash_cont_tokens": "83a09eb09208b55d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 166, |
|
"padded": 640, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:world_religions|0": { |
|
"hashes": { |
|
"hash_examples": "229e5fe50082b064", |
|
"hash_full_prompts": "229e5fe50082b064", |
|
"hash_input_tokens": "5d1a49dcd0f838af", |
|
"hash_cont_tokens": "793c7a744f84bd0d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 171, |
|
"padded": 644, |
|
"non_padded": 40, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arc_challenge_okapi_ar|0": { |
|
"hashes": { |
|
"hash_examples": "ab893807673bc355", |
|
"hash_full_prompts": "ab893807673bc355", |
|
"hash_input_tokens": "6ac1c8defcea34a8", |
|
"hash_cont_tokens": "cbab293b9e45551a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1160, |
|
"padded": 4527, |
|
"non_padded": 113, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arc_easy_ar|0": { |
|
"hashes": { |
|
"hash_examples": "acb688624acc3d04", |
|
"hash_full_prompts": "acb688624acc3d04", |
|
"hash_input_tokens": "e989dc9a99f9b1a2", |
|
"hash_cont_tokens": "b97546d2067efc92" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 2364, |
|
"padded": 9105, |
|
"non_padded": 351, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|boolq_ar|0": { |
|
"hashes": { |
|
"hash_examples": "48355a67867e0c32", |
|
"hash_full_prompts": "48355a67867e0c32", |
|
"hash_input_tokens": "397efd6798a8e8c4", |
|
"hash_cont_tokens": "3651b7bd5e7b4715" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 3260, |
|
"padded": 6440, |
|
"non_padded": 80, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|copa_ext_ar|0": { |
|
"hashes": { |
|
"hash_examples": "9bb83301bb72eecf", |
|
"hash_full_prompts": "9bb83301bb72eecf", |
|
"hash_input_tokens": "3d2eeb3bcaef8f48", |
|
"hash_cont_tokens": "3df4385784831cbc" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 90, |
|
"padded": 179, |
|
"non_padded": 1, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|hellaswag_okapi_ar|0": { |
|
"hashes": { |
|
"hash_examples": "6e8cf57a322dfadd", |
|
"hash_full_prompts": "6e8cf57a322dfadd", |
|
"hash_input_tokens": "87174659bee3d852", |
|
"hash_cont_tokens": "f4d29e77f6a4876c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 9171, |
|
"padded": 36502, |
|
"non_padded": 182, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|openbook_qa_ext_ar|0": { |
|
"hashes": { |
|
"hash_examples": "923d41eb0aca93eb", |
|
"hash_full_prompts": "923d41eb0aca93eb", |
|
"hash_input_tokens": "2a1291ebaa2f7b8d", |
|
"hash_cont_tokens": "f85ab30b9ed808ae" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 495, |
|
"padded": 1931, |
|
"non_padded": 49, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|piqa_ar|0": { |
|
"hashes": { |
|
"hash_examples": "94bc205a520d3ea0", |
|
"hash_full_prompts": "94bc205a520d3ea0", |
|
"hash_input_tokens": "4e9e3af3a5fd85a5", |
|
"hash_cont_tokens": "d11b83ad3fec67ae" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1833, |
|
"padded": 3582, |
|
"non_padded": 84, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|race_ar|0": { |
|
"hashes": { |
|
"hash_examples": "de65130bae647516", |
|
"hash_full_prompts": "de65130bae647516", |
|
"hash_input_tokens": "eb64693e28176c83", |
|
"hash_cont_tokens": "dd2ebfc1dcb115ae" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4929, |
|
"padded": 19699, |
|
"non_padded": 17, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|sciq_ar|0": { |
|
"hashes": { |
|
"hash_examples": "8778de08a6dfb050", |
|
"hash_full_prompts": "8778de08a6dfb050", |
|
"hash_input_tokens": "f919eb2a0d387ffe", |
|
"hash_cont_tokens": "d2d8ffb94fc5595d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 995, |
|
"padded": 3925, |
|
"non_padded": 55, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|toxigen_ar|0": { |
|
"hashes": { |
|
"hash_examples": "1e139513004a9a2e", |
|
"hash_full_prompts": "1e139513004a9a2e", |
|
"hash_input_tokens": "0e78bd5dad04c7f0", |
|
"hash_cont_tokens": "60a1fc71dabf9c79" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 935, |
|
"padded": 1830, |
|
"non_padded": 40, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"lighteval|xstory_cloze:ar|0": { |
|
"hashes": { |
|
"hash_examples": "865426a22c787481", |
|
"hash_full_prompts": "865426a22c787481", |
|
"hash_input_tokens": "9d9ffa09031f385d", |
|
"hash_cont_tokens": "9fefe4a997904abd" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1511, |
|
"padded": 2959, |
|
"non_padded": 63, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
} |
|
}, |
|
"summary_general": { |
|
"hashes": { |
|
"hash_examples": "896fa8128ce0a35c", |
|
"hash_full_prompts": "896fa8128ce0a35c", |
|
"hash_input_tokens": "fc75857ee270a62c", |
|
"hash_cont_tokens": "1e2235e87ac4ff90" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 72964, |
|
"padded": 233260, |
|
"non_padded": 2363, |
|
"num_truncated_few_shots": 0 |
|
} |
|
} |