results / tiiuae /falcon-7b-instruct /results_2024-06-06T22-11-20.791093.json
Hamza-Alobeidli's picture
Updating model tiiuae/falcon-7b-instruct
7154c25 verified
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 519.267631364,
"end_time": 27192.395715721,
"total_evaluation_time_secondes": "26673.128084357",
"model_name": "tiiuae/falcon-7b-instruct",
"model_sha": "cf4b3c42ce2fdfe24f753f0f0d179202fea59c99",
"model_dtype": "torch.bfloat16",
"model_size": "12.89 GB",
"config": null
},
"results": {
"community|acva:Algeria|0": {
"acc_norm": 0.517948717948718,
"acc_norm_stderr": 0.03587477098773826
},
"community|acva:Ancient_Egypt|0": {
"acc_norm": 0.9396825396825397,
"acc_norm_stderr": 0.013435297210747533
},
"community|acva:Arab_Empire|0": {
"acc_norm": 0.6716981132075471,
"acc_norm_stderr": 0.02890159361241178
},
"community|acva:Arabic_Architecture|0": {
"acc_norm": 0.5230769230769231,
"acc_norm_stderr": 0.0358596530894741
},
"community|acva:Arabic_Art|0": {
"acc_norm": 0.6564102564102564,
"acc_norm_stderr": 0.03409627301409856
},
"community|acva:Arabic_Astronomy|0": {
"acc_norm": 0.5282051282051282,
"acc_norm_stderr": 0.035840746749208334
},
"community|acva:Arabic_Calligraphy|0": {
"acc_norm": 0.596078431372549,
"acc_norm_stderr": 0.03078813396443134
},
"community|acva:Arabic_Ceremony|0": {
"acc_norm": 0.4918918918918919,
"acc_norm_stderr": 0.036855642198496893
},
"community|acva:Arabic_Clothing|0": {
"acc_norm": 0.49230769230769234,
"acc_norm_stderr": 0.03589365940635212
},
"community|acva:Arabic_Culture|0": {
"acc_norm": 0.7589743589743589,
"acc_norm_stderr": 0.0307074893811242
},
"community|acva:Arabic_Food|0": {
"acc_norm": 0.5794871794871795,
"acc_norm_stderr": 0.03544138389303482
},
"community|acva:Arabic_Funeral|0": {
"acc_norm": 0.5578947368421052,
"acc_norm_stderr": 0.05122418389181814
},
"community|acva:Arabic_Geography|0": {
"acc_norm": 0.4689655172413793,
"acc_norm_stderr": 0.04158632762097828
},
"community|acva:Arabic_History|0": {
"acc_norm": 0.6871794871794872,
"acc_norm_stderr": 0.033287550657248546
},
"community|acva:Arabic_Language_Origin|0": {
"acc_norm": 0.4842105263157895,
"acc_norm_stderr": 0.05154534179593067
},
"community|acva:Arabic_Literature|0": {
"acc_norm": 0.4068965517241379,
"acc_norm_stderr": 0.040937939812662374
},
"community|acva:Arabic_Math|0": {
"acc_norm": 0.6871794871794872,
"acc_norm_stderr": 0.03328755065724854
},
"community|acva:Arabic_Medicine|0": {
"acc_norm": 0.5241379310344828,
"acc_norm_stderr": 0.041618085035015295
},
"community|acva:Arabic_Music|0": {
"acc_norm": 0.762589928057554,
"acc_norm_stderr": 0.036220593237998276
},
"community|acva:Arabic_Ornament|0": {
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.03581804596782233
},
"community|acva:Arabic_Philosophy|0": {
"acc_norm": 0.5241379310344828,
"acc_norm_stderr": 0.041618085035015295
},
"community|acva:Arabic_Physics_and_Chemistry|0": {
"acc_norm": 0.5538461538461539,
"acc_norm_stderr": 0.03568913546569233
},
"community|acva:Arabic_Wedding|0": {
"acc_norm": 0.5846153846153846,
"acc_norm_stderr": 0.03538013280575029
},
"community|acva:Bahrain|0": {
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.07106690545187012
},
"community|acva:Comoros|0": {
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.07106690545187012
},
"community|acva:Egypt_modern|0": {
"acc_norm": 0.6736842105263158,
"acc_norm_stderr": 0.04835966701461423
},
"community|acva:InfluenceFromAncientEgypt|0": {
"acc_norm": 0.37435897435897436,
"acc_norm_stderr": 0.03474608430626236
},
"community|acva:InfluenceFromByzantium|0": {
"acc_norm": 0.3103448275862069,
"acc_norm_stderr": 0.03855289616378949
},
"community|acva:InfluenceFromChina|0": {
"acc_norm": 0.28717948717948716,
"acc_norm_stderr": 0.032483733385398866
},
"community|acva:InfluenceFromGreece|0": {
"acc_norm": 0.3641025641025641,
"acc_norm_stderr": 0.03454653867786389
},
"community|acva:InfluenceFromIslam|0": {
"acc_norm": 0.7172413793103448,
"acc_norm_stderr": 0.03752833958003336
},
"community|acva:InfluenceFromPersia|0": {
"acc_norm": 0.29714285714285715,
"acc_norm_stderr": 0.03464507889884372
},
"community|acva:InfluenceFromRome|0": {
"acc_norm": 0.4256410256410256,
"acc_norm_stderr": 0.03549871080367708
},
"community|acva:Iraq|0": {
"acc_norm": 0.5411764705882353,
"acc_norm_stderr": 0.0543691634273002
},
"community|acva:Islam_Education|0": {
"acc_norm": 0.5435897435897435,
"acc_norm_stderr": 0.03576123096991215
},
"community|acva:Islam_branches_and_schools|0": {
"acc_norm": 0.56,
"acc_norm_stderr": 0.037630997249913346
},
"community|acva:Islamic_law_system|0": {
"acc_norm": 0.6,
"acc_norm_stderr": 0.0351726229056329
},
"community|acva:Jordan|0": {
"acc_norm": 0.6222222222222222,
"acc_norm_stderr": 0.07309112127323451
},
"community|acva:Kuwait|0": {
"acc_norm": 0.7111111111111111,
"acc_norm_stderr": 0.06832943242540508
},
"community|acva:Lebanon|0": {
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.07106690545187012
},
"community|acva:Libya|0": {
"acc_norm": 0.5777777777777777,
"acc_norm_stderr": 0.07446027270295806
},
"community|acva:Mauritania|0": {
"acc_norm": 0.5111111111111111,
"acc_norm_stderr": 0.07535922203472523
},
"community|acva:Mesopotamia_civilization|0": {
"acc_norm": 0.5032258064516129,
"acc_norm_stderr": 0.04029030966708646
},
"community|acva:Morocco|0": {
"acc_norm": 0.7555555555555555,
"acc_norm_stderr": 0.06478835438717
},
"community|acva:Oman|0": {
"acc_norm": 0.7333333333333333,
"acc_norm_stderr": 0.06666666666666668
},
"community|acva:Palestine|0": {
"acc_norm": 0.7411764705882353,
"acc_norm_stderr": 0.04778846120374094
},
"community|acva:Qatar|0": {
"acc_norm": 0.5777777777777777,
"acc_norm_stderr": 0.07446027270295806
},
"community|acva:Saudi_Arabia|0": {
"acc_norm": 0.7230769230769231,
"acc_norm_stderr": 0.032127058190759304
},
"community|acva:Somalia|0": {
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.0752101433090355
},
"community|acva:Sudan|0": {
"acc_norm": 0.6222222222222222,
"acc_norm_stderr": 0.07309112127323451
},
"community|acva:Syria|0": {
"acc_norm": 0.6444444444444445,
"acc_norm_stderr": 0.07216392363431012
},
"community|acva:Tunisia|0": {
"acc_norm": 0.6222222222222222,
"acc_norm_stderr": 0.0730911212732345
},
"community|acva:United_Arab_Emirates|0": {
"acc_norm": 0.7529411764705882,
"acc_norm_stderr": 0.047058823529411785
},
"community|acva:Yemen|0": {
"acc_norm": 0.6,
"acc_norm_stderr": 0.16329931618554522
},
"community|acva:communication|0": {
"acc_norm": 0.5769230769230769,
"acc_norm_stderr": 0.025930763940893855
},
"community|acva:computer_and_phone|0": {
"acc_norm": 0.535593220338983,
"acc_norm_stderr": 0.02908661254728461
},
"community|acva:daily_life|0": {
"acc_norm": 0.6913946587537092,
"acc_norm_stderr": 0.025199683754189534
},
"community|acva:entertainment|0": {
"acc_norm": 0.7694915254237288,
"acc_norm_stderr": 0.024562451137185734
},
"community|alghafa:mcq_exams_test_ar|0": {
"acc_norm": 0.2621184919210054,
"acc_norm_stderr": 0.018651112765714396
},
"community|alghafa:meta_ar_dialects|0": {
"acc_norm": 0.24133456904541242,
"acc_norm_stderr": 0.005826120252987784
},
"community|alghafa:meta_ar_msa|0": {
"acc_norm": 0.23687150837988827,
"acc_norm_stderr": 0.014219570788103987
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"acc_norm": 0.52,
"acc_norm_stderr": 0.05807730170189531
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"acc_norm": 0.2866666666666667,
"acc_norm_stderr": 0.03704603420423023
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"acc_norm": 0.22666666666666666,
"acc_norm_stderr": 0.03429920547036886
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"acc_norm": 0.5018136335209505,
"acc_norm_stderr": 0.005592230648355162
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"acc_norm": 0.34078398665554627,
"acc_norm_stderr": 0.006122024940476835
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"acc_norm": 0.3569767441860465,
"acc_norm_stderr": 0.0115556761937906
},
"community|arabic_exams|0": {
"acc_norm": 0.2346368715083799,
"acc_norm_stderr": 0.018304147337988314
},
"community|arabic_mmlu:abstract_algebra|0": {
"acc_norm": 0.22,
"acc_norm_stderr": 0.041633319989322674
},
"community|arabic_mmlu:anatomy|0": {
"acc_norm": 0.2074074074074074,
"acc_norm_stderr": 0.03502553170678318
},
"community|arabic_mmlu:astronomy|0": {
"acc_norm": 0.21052631578947367,
"acc_norm_stderr": 0.03317672787533158
},
"community|arabic_mmlu:business_ethics|0": {
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720683
},
"community|arabic_mmlu:clinical_knowledge|0": {
"acc_norm": 0.21509433962264152,
"acc_norm_stderr": 0.02528839450289137
},
"community|arabic_mmlu:college_biology|0": {
"acc_norm": 0.22916666666666666,
"acc_norm_stderr": 0.03514697467862388
},
"community|arabic_mmlu:college_chemistry|0": {
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932269
},
"community|arabic_mmlu:college_computer_science|0": {
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"community|arabic_mmlu:college_mathematics|0": {
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"community|arabic_mmlu:college_medicine|0": {
"acc_norm": 0.21965317919075145,
"acc_norm_stderr": 0.031568093627031744
},
"community|arabic_mmlu:college_physics|0": {
"acc_norm": 0.20588235294117646,
"acc_norm_stderr": 0.04023382273617747
},
"community|arabic_mmlu:computer_security|0": {
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768079
},
"community|arabic_mmlu:conceptual_physics|0": {
"acc_norm": 0.2765957446808511,
"acc_norm_stderr": 0.029241883869628827
},
"community|arabic_mmlu:econometrics|0": {
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.03999423879281337
},
"community|arabic_mmlu:electrical_engineering|0": {
"acc_norm": 0.23448275862068965,
"acc_norm_stderr": 0.035306258743465914
},
"community|arabic_mmlu:elementary_mathematics|0": {
"acc_norm": 0.2037037037037037,
"acc_norm_stderr": 0.020742740560122656
},
"community|arabic_mmlu:formal_logic|0": {
"acc_norm": 0.23809523809523808,
"acc_norm_stderr": 0.03809523809523809
},
"community|arabic_mmlu:global_facts|0": {
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036845
},
"community|arabic_mmlu:high_school_biology|0": {
"acc_norm": 0.17096774193548386,
"acc_norm_stderr": 0.02141724293632157
},
"community|arabic_mmlu:high_school_chemistry|0": {
"acc_norm": 0.1625615763546798,
"acc_norm_stderr": 0.025960300064605576
},
"community|arabic_mmlu:high_school_computer_science|0": {
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"community|arabic_mmlu:high_school_european_history|0": {
"acc_norm": 0.21212121212121213,
"acc_norm_stderr": 0.03192271569548299
},
"community|arabic_mmlu:high_school_geography|0": {
"acc_norm": 0.18181818181818182,
"acc_norm_stderr": 0.027479603010538797
},
"community|arabic_mmlu:high_school_government_and_politics|0": {
"acc_norm": 0.19689119170984457,
"acc_norm_stderr": 0.028697873971860664
},
"community|arabic_mmlu:high_school_macroeconomics|0": {
"acc_norm": 0.20512820512820512,
"acc_norm_stderr": 0.020473233173551982
},
"community|arabic_mmlu:high_school_mathematics|0": {
"acc_norm": 0.2037037037037037,
"acc_norm_stderr": 0.024556172219141276
},
"community|arabic_mmlu:high_school_microeconomics|0": {
"acc_norm": 0.2184873949579832,
"acc_norm_stderr": 0.02684151432295893
},
"community|arabic_mmlu:high_school_physics|0": {
"acc_norm": 0.19205298013245034,
"acc_norm_stderr": 0.03216298420593614
},
"community|arabic_mmlu:high_school_psychology|0": {
"acc_norm": 0.181651376146789,
"acc_norm_stderr": 0.01653061740926687
},
"community|arabic_mmlu:high_school_statistics|0": {
"acc_norm": 0.16666666666666666,
"acc_norm_stderr": 0.025416428388767478
},
"community|arabic_mmlu:high_school_us_history|0": {
"acc_norm": 0.24019607843137256,
"acc_norm_stderr": 0.02998373305591361
},
"community|arabic_mmlu:high_school_world_history|0": {
"acc_norm": 0.270042194092827,
"acc_norm_stderr": 0.028900721906293426
},
"community|arabic_mmlu:human_aging|0": {
"acc_norm": 0.31390134529147984,
"acc_norm_stderr": 0.031146796482972465
},
"community|arabic_mmlu:human_sexuality|0": {
"acc_norm": 0.25190839694656486,
"acc_norm_stderr": 0.03807387116306086
},
"community|arabic_mmlu:international_law|0": {
"acc_norm": 0.2396694214876033,
"acc_norm_stderr": 0.03896878985070417
},
"community|arabic_mmlu:jurisprudence|0": {
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.042365112580946336
},
"community|arabic_mmlu:logical_fallacies|0": {
"acc_norm": 0.2147239263803681,
"acc_norm_stderr": 0.03226219377286774
},
"community|arabic_mmlu:machine_learning|0": {
"acc_norm": 0.3125,
"acc_norm_stderr": 0.043994650575715215
},
"community|arabic_mmlu:management|0": {
"acc_norm": 0.1553398058252427,
"acc_norm_stderr": 0.035865947385739734
},
"community|arabic_mmlu:marketing|0": {
"acc_norm": 0.2863247863247863,
"acc_norm_stderr": 0.029614323690456645
},
"community|arabic_mmlu:medical_genetics|0": {
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"community|arabic_mmlu:miscellaneous|0": {
"acc_norm": 0.24010217113665389,
"acc_norm_stderr": 0.015274685213734193
},
"community|arabic_mmlu:moral_disputes|0": {
"acc_norm": 0.2514450867052023,
"acc_norm_stderr": 0.02335736578587404
},
"community|arabic_mmlu:moral_scenarios|0": {
"acc_norm": 0.24022346368715083,
"acc_norm_stderr": 0.014288343803925293
},
"community|arabic_mmlu:nutrition|0": {
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.02392915551735128
},
"community|arabic_mmlu:philosophy|0": {
"acc_norm": 0.19292604501607716,
"acc_norm_stderr": 0.022411516780911366
},
"community|arabic_mmlu:prehistory|0": {
"acc_norm": 0.21604938271604937,
"acc_norm_stderr": 0.022899162918445806
},
"community|arabic_mmlu:professional_accounting|0": {
"acc_norm": 0.2375886524822695,
"acc_norm_stderr": 0.025389512552729903
},
"community|arabic_mmlu:professional_law|0": {
"acc_norm": 0.24445893089960888,
"acc_norm_stderr": 0.010976425013113906
},
"community|arabic_mmlu:professional_medicine|0": {
"acc_norm": 0.1801470588235294,
"acc_norm_stderr": 0.02334516361654485
},
"community|arabic_mmlu:professional_psychology|0": {
"acc_norm": 0.24183006535947713,
"acc_norm_stderr": 0.017322789207784326
},
"community|arabic_mmlu:public_relations|0": {
"acc_norm": 0.20909090909090908,
"acc_norm_stderr": 0.038950910157241364
},
"community|arabic_mmlu:security_studies|0": {
"acc_norm": 0.17551020408163265,
"acc_norm_stderr": 0.02435280072297001
},
"community|arabic_mmlu:sociology|0": {
"acc_norm": 0.24378109452736318,
"acc_norm_stderr": 0.030360490154014652
},
"community|arabic_mmlu:us_foreign_policy|0": {
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"community|arabic_mmlu:virology|0": {
"acc_norm": 0.2891566265060241,
"acc_norm_stderr": 0.03529486801511115
},
"community|arabic_mmlu:world_religions|0": {
"acc_norm": 0.3216374269005848,
"acc_norm_stderr": 0.03582529442573122
},
"community|arc_challenge_okapi_ar|0": {
"acc_norm": 0.2543103448275862,
"acc_norm_stderr": 0.012791437539377021
},
"community|arc_easy_ar|0": {
"acc_norm": 0.2516920473773266,
"acc_norm_stderr": 0.008927771500374845
},
"community|boolq_ar|0": {
"acc_norm": 0.37944785276073617,
"acc_norm_stderr": 0.00850007995551102
},
"community|copa_ext_ar|0": {
"acc_norm": 0.4777777777777778,
"acc_norm_stderr": 0.05294752255076824
},
"community|hellaswag_okapi_ar|0": {
"acc_norm": 0.2383600479773198,
"acc_norm_stderr": 0.004449456064505461
},
"community|openbook_qa_ext_ar|0": {
"acc_norm": 0.3373737373737374,
"acc_norm_stderr": 0.02127288949194415
},
"community|piqa_ar|0": {
"acc_norm": 0.5144571740316422,
"acc_norm_stderr": 0.011676848621656596
},
"community|race_ar|0": {
"acc_norm": 0.2627307770338811,
"acc_norm_stderr": 0.006269503915720708
},
"community|sciq_ar|0": {
"acc_norm": 0.3065326633165829,
"acc_norm_stderr": 0.014623731080022977
},
"community|toxigen_ar|0": {
"acc_norm": 0.5679144385026738,
"acc_norm_stderr": 0.01620887578524445
},
"lighteval|xstory_cloze:ar|0": {
"acc": 0.4692256783587028,
"acc_stderr": 0.012842730340585789
},
"community|acva:_average|0": {
"acc_norm": 0.5867266156919362,
"acc_norm_stderr": 0.04707687115634792
},
"community|alghafa:_average|0": {
"acc_norm": 0.3303591407824647,
"acc_norm_stderr": 0.021265475218435904
},
"community|arabic_mmlu:_average|0": {
"acc_norm": 0.2307509222931127,
"acc_norm_stderr": 0.031510436541868116
},
"all": {
"acc_norm": 0.3998623131879227,
"acc_norm_stderr": 0.036251221857508883,
"acc": 0.4692256783587028,
"acc_stderr": 0.012842730340585789
}
},
"versions": {
"community|acva:Algeria|0": 0,
"community|acva:Ancient_Egypt|0": 0,
"community|acva:Arab_Empire|0": 0,
"community|acva:Arabic_Architecture|0": 0,
"community|acva:Arabic_Art|0": 0,
"community|acva:Arabic_Astronomy|0": 0,
"community|acva:Arabic_Calligraphy|0": 0,
"community|acva:Arabic_Ceremony|0": 0,
"community|acva:Arabic_Clothing|0": 0,
"community|acva:Arabic_Culture|0": 0,
"community|acva:Arabic_Food|0": 0,
"community|acva:Arabic_Funeral|0": 0,
"community|acva:Arabic_Geography|0": 0,
"community|acva:Arabic_History|0": 0,
"community|acva:Arabic_Language_Origin|0": 0,
"community|acva:Arabic_Literature|0": 0,
"community|acva:Arabic_Math|0": 0,
"community|acva:Arabic_Medicine|0": 0,
"community|acva:Arabic_Music|0": 0,
"community|acva:Arabic_Ornament|0": 0,
"community|acva:Arabic_Philosophy|0": 0,
"community|acva:Arabic_Physics_and_Chemistry|0": 0,
"community|acva:Arabic_Wedding|0": 0,
"community|acva:Bahrain|0": 0,
"community|acva:Comoros|0": 0,
"community|acva:Egypt_modern|0": 0,
"community|acva:InfluenceFromAncientEgypt|0": 0,
"community|acva:InfluenceFromByzantium|0": 0,
"community|acva:InfluenceFromChina|0": 0,
"community|acva:InfluenceFromGreece|0": 0,
"community|acva:InfluenceFromIslam|0": 0,
"community|acva:InfluenceFromPersia|0": 0,
"community|acva:InfluenceFromRome|0": 0,
"community|acva:Iraq|0": 0,
"community|acva:Islam_Education|0": 0,
"community|acva:Islam_branches_and_schools|0": 0,
"community|acva:Islamic_law_system|0": 0,
"community|acva:Jordan|0": 0,
"community|acva:Kuwait|0": 0,
"community|acva:Lebanon|0": 0,
"community|acva:Libya|0": 0,
"community|acva:Mauritania|0": 0,
"community|acva:Mesopotamia_civilization|0": 0,
"community|acva:Morocco|0": 0,
"community|acva:Oman|0": 0,
"community|acva:Palestine|0": 0,
"community|acva:Qatar|0": 0,
"community|acva:Saudi_Arabia|0": 0,
"community|acva:Somalia|0": 0,
"community|acva:Sudan|0": 0,
"community|acva:Syria|0": 0,
"community|acva:Tunisia|0": 0,
"community|acva:United_Arab_Emirates|0": 0,
"community|acva:Yemen|0": 0,
"community|acva:communication|0": 0,
"community|acva:computer_and_phone|0": 0,
"community|acva:daily_life|0": 0,
"community|acva:entertainment|0": 0,
"community|alghafa:mcq_exams_test_ar|0": 0,
"community|alghafa:meta_ar_dialects|0": 0,
"community|alghafa:meta_ar_msa|0": 0,
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0,
"community|alghafa:multiple_choice_sentiment_task|0": 0,
"community|arabic_exams|0": 0,
"community|arabic_mmlu:abstract_algebra|0": 0,
"community|arabic_mmlu:anatomy|0": 0,
"community|arabic_mmlu:astronomy|0": 0,
"community|arabic_mmlu:business_ethics|0": 0,
"community|arabic_mmlu:clinical_knowledge|0": 0,
"community|arabic_mmlu:college_biology|0": 0,
"community|arabic_mmlu:college_chemistry|0": 0,
"community|arabic_mmlu:college_computer_science|0": 0,
"community|arabic_mmlu:college_mathematics|0": 0,
"community|arabic_mmlu:college_medicine|0": 0,
"community|arabic_mmlu:college_physics|0": 0,
"community|arabic_mmlu:computer_security|0": 0,
"community|arabic_mmlu:conceptual_physics|0": 0,
"community|arabic_mmlu:econometrics|0": 0,
"community|arabic_mmlu:electrical_engineering|0": 0,
"community|arabic_mmlu:elementary_mathematics|0": 0,
"community|arabic_mmlu:formal_logic|0": 0,
"community|arabic_mmlu:global_facts|0": 0,
"community|arabic_mmlu:high_school_biology|0": 0,
"community|arabic_mmlu:high_school_chemistry|0": 0,
"community|arabic_mmlu:high_school_computer_science|0": 0,
"community|arabic_mmlu:high_school_european_history|0": 0,
"community|arabic_mmlu:high_school_geography|0": 0,
"community|arabic_mmlu:high_school_government_and_politics|0": 0,
"community|arabic_mmlu:high_school_macroeconomics|0": 0,
"community|arabic_mmlu:high_school_mathematics|0": 0,
"community|arabic_mmlu:high_school_microeconomics|0": 0,
"community|arabic_mmlu:high_school_physics|0": 0,
"community|arabic_mmlu:high_school_psychology|0": 0,
"community|arabic_mmlu:high_school_statistics|0": 0,
"community|arabic_mmlu:high_school_us_history|0": 0,
"community|arabic_mmlu:high_school_world_history|0": 0,
"community|arabic_mmlu:human_aging|0": 0,
"community|arabic_mmlu:human_sexuality|0": 0,
"community|arabic_mmlu:international_law|0": 0,
"community|arabic_mmlu:jurisprudence|0": 0,
"community|arabic_mmlu:logical_fallacies|0": 0,
"community|arabic_mmlu:machine_learning|0": 0,
"community|arabic_mmlu:management|0": 0,
"community|arabic_mmlu:marketing|0": 0,
"community|arabic_mmlu:medical_genetics|0": 0,
"community|arabic_mmlu:miscellaneous|0": 0,
"community|arabic_mmlu:moral_disputes|0": 0,
"community|arabic_mmlu:moral_scenarios|0": 0,
"community|arabic_mmlu:nutrition|0": 0,
"community|arabic_mmlu:philosophy|0": 0,
"community|arabic_mmlu:prehistory|0": 0,
"community|arabic_mmlu:professional_accounting|0": 0,
"community|arabic_mmlu:professional_law|0": 0,
"community|arabic_mmlu:professional_medicine|0": 0,
"community|arabic_mmlu:professional_psychology|0": 0,
"community|arabic_mmlu:public_relations|0": 0,
"community|arabic_mmlu:security_studies|0": 0,
"community|arabic_mmlu:sociology|0": 0,
"community|arabic_mmlu:us_foreign_policy|0": 0,
"community|arabic_mmlu:virology|0": 0,
"community|arabic_mmlu:world_religions|0": 0,
"community|arc_challenge_okapi_ar|0": 0,
"community|arc_easy_ar|0": 0,
"community|boolq_ar|0": 0,
"community|copa_ext_ar|0": 0,
"community|hellaswag_okapi_ar|0": 0,
"community|openbook_qa_ext_ar|0": 0,
"community|piqa_ar|0": 0,
"community|race_ar|0": 0,
"community|sciq_ar|0": 0,
"community|toxigen_ar|0": 0,
"lighteval|xstory_cloze:ar|0": 0
},
"config_tasks": {
"community|acva:Algeria": {
"name": "acva:Algeria",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Algeria",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Ancient_Egypt": {
"name": "acva:Ancient_Egypt",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Ancient_Egypt",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 315,
"effective_num_docs": 315,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arab_Empire": {
"name": "acva:Arab_Empire",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arab_Empire",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Architecture": {
"name": "acva:Arabic_Architecture",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Architecture",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Art": {
"name": "acva:Arabic_Art",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Art",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Astronomy": {
"name": "acva:Arabic_Astronomy",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Astronomy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Calligraphy": {
"name": "acva:Arabic_Calligraphy",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Calligraphy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 255,
"effective_num_docs": 255,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Ceremony": {
"name": "acva:Arabic_Ceremony",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Ceremony",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 185,
"effective_num_docs": 185,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Clothing": {
"name": "acva:Arabic_Clothing",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Clothing",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Culture": {
"name": "acva:Arabic_Culture",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Culture",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Food": {
"name": "acva:Arabic_Food",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Food",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Funeral": {
"name": "acva:Arabic_Funeral",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Funeral",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 95,
"effective_num_docs": 95,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Geography": {
"name": "acva:Arabic_Geography",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Geography",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_History": {
"name": "acva:Arabic_History",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_History",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Language_Origin": {
"name": "acva:Arabic_Language_Origin",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Language_Origin",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 95,
"effective_num_docs": 95,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Literature": {
"name": "acva:Arabic_Literature",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Literature",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Math": {
"name": "acva:Arabic_Math",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Math",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Medicine": {
"name": "acva:Arabic_Medicine",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Medicine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Music": {
"name": "acva:Arabic_Music",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Music",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 139,
"effective_num_docs": 139,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Ornament": {
"name": "acva:Arabic_Ornament",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Ornament",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Philosophy": {
"name": "acva:Arabic_Philosophy",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Philosophy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Physics_and_Chemistry": {
"name": "acva:Arabic_Physics_and_Chemistry",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Physics_and_Chemistry",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Wedding": {
"name": "acva:Arabic_Wedding",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Wedding",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Bahrain": {
"name": "acva:Bahrain",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Bahrain",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Comoros": {
"name": "acva:Comoros",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Comoros",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Egypt_modern": {
"name": "acva:Egypt_modern",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Egypt_modern",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 95,
"effective_num_docs": 95,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromAncientEgypt": {
"name": "acva:InfluenceFromAncientEgypt",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromAncientEgypt",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromByzantium": {
"name": "acva:InfluenceFromByzantium",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromByzantium",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromChina": {
"name": "acva:InfluenceFromChina",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromChina",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromGreece": {
"name": "acva:InfluenceFromGreece",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromGreece",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromIslam": {
"name": "acva:InfluenceFromIslam",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromIslam",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromPersia": {
"name": "acva:InfluenceFromPersia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromPersia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 175,
"effective_num_docs": 175,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromRome": {
"name": "acva:InfluenceFromRome",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromRome",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Iraq": {
"name": "acva:Iraq",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Iraq",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 85,
"effective_num_docs": 85,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Islam_Education": {
"name": "acva:Islam_Education",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Islam_Education",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Islam_branches_and_schools": {
"name": "acva:Islam_branches_and_schools",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Islam_branches_and_schools",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 175,
"effective_num_docs": 175,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Islamic_law_system": {
"name": "acva:Islamic_law_system",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Islamic_law_system",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Jordan": {
"name": "acva:Jordan",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Jordan",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Kuwait": {
"name": "acva:Kuwait",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Kuwait",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Lebanon": {
"name": "acva:Lebanon",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Lebanon",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Libya": {
"name": "acva:Libya",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Libya",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Mauritania": {
"name": "acva:Mauritania",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Mauritania",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Mesopotamia_civilization": {
"name": "acva:Mesopotamia_civilization",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Mesopotamia_civilization",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 155,
"effective_num_docs": 155,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Morocco": {
"name": "acva:Morocco",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Morocco",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Oman": {
"name": "acva:Oman",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Oman",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Palestine": {
"name": "acva:Palestine",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Palestine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 85,
"effective_num_docs": 85,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Qatar": {
"name": "acva:Qatar",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Qatar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Saudi_Arabia": {
"name": "acva:Saudi_Arabia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Saudi_Arabia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Somalia": {
"name": "acva:Somalia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Somalia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Sudan": {
"name": "acva:Sudan",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Sudan",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Syria": {
"name": "acva:Syria",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Syria",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Tunisia": {
"name": "acva:Tunisia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Tunisia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:United_Arab_Emirates": {
"name": "acva:United_Arab_Emirates",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "United_Arab_Emirates",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 85,
"effective_num_docs": 85,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Yemen": {
"name": "acva:Yemen",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Yemen",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 10,
"effective_num_docs": 10,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:communication": {
"name": "acva:communication",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "communication",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 364,
"effective_num_docs": 364,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:computer_and_phone": {
"name": "acva:computer_and_phone",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "computer_and_phone",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 295,
"effective_num_docs": 295,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:daily_life": {
"name": "acva:daily_life",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "daily_life",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 337,
"effective_num_docs": 337,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:entertainment": {
"name": "acva:entertainment",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "entertainment",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 295,
"effective_num_docs": 295,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:mcq_exams_test_ar": {
"name": "alghafa:mcq_exams_test_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "mcq_exams_test_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 557,
"effective_num_docs": 557,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:meta_ar_dialects": {
"name": "alghafa:meta_ar_dialects",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_dialects",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 5395,
"effective_num_docs": 5395,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:meta_ar_msa": {
"name": "alghafa:meta_ar_msa",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_msa",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": {
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_facts_truefalse_balanced_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 75,
"effective_num_docs": 75,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task": {
"name": "alghafa:multiple_choice_grounded_statement_soqal_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_soqal_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": {
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": {
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 7995,
"effective_num_docs": 7995,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task": {
"name": "alghafa:multiple_choice_rating_sentiment_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 5995,
"effective_num_docs": 5995,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_sentiment_task": {
"name": "alghafa:multiple_choice_sentiment_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_sentiment_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1720,
"effective_num_docs": 1720,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_exams": {
"name": "arabic_exams",
"prompt_function": "arabic_exams",
"hf_repo": "OALL/Arabic_EXAMS",
"hf_subset": "default",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 537,
"effective_num_docs": 537,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:abstract_algebra": {
"name": "arabic_mmlu:abstract_algebra",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "abstract_algebra",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:anatomy": {
"name": "arabic_mmlu:anatomy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "anatomy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 135,
"effective_num_docs": 135,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:astronomy": {
"name": "arabic_mmlu:astronomy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "astronomy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 152,
"effective_num_docs": 152,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:business_ethics": {
"name": "arabic_mmlu:business_ethics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "business_ethics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:clinical_knowledge": {
"name": "arabic_mmlu:clinical_knowledge",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "clinical_knowledge",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_biology": {
"name": "arabic_mmlu:college_biology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_biology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 144,
"effective_num_docs": 144,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_chemistry": {
"name": "arabic_mmlu:college_chemistry",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_chemistry",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_computer_science": {
"name": "arabic_mmlu:college_computer_science",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_computer_science",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_mathematics": {
"name": "arabic_mmlu:college_mathematics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_mathematics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_medicine": {
"name": "arabic_mmlu:college_medicine",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_medicine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 173,
"effective_num_docs": 173,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_physics": {
"name": "arabic_mmlu:college_physics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_physics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:computer_security": {
"name": "arabic_mmlu:computer_security",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "computer_security",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:conceptual_physics": {
"name": "arabic_mmlu:conceptual_physics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "conceptual_physics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 235,
"effective_num_docs": 235,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:econometrics": {
"name": "arabic_mmlu:econometrics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "econometrics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 114,
"effective_num_docs": 114,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:electrical_engineering": {
"name": "arabic_mmlu:electrical_engineering",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "electrical_engineering",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:elementary_mathematics": {
"name": "arabic_mmlu:elementary_mathematics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "elementary_mathematics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 378,
"effective_num_docs": 378,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:formal_logic": {
"name": "arabic_mmlu:formal_logic",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "formal_logic",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 126,
"effective_num_docs": 126,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:global_facts": {
"name": "arabic_mmlu:global_facts",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "global_facts",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_biology": {
"name": "arabic_mmlu:high_school_biology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_biology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 310,
"effective_num_docs": 310,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_chemistry": {
"name": "arabic_mmlu:high_school_chemistry",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_chemistry",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 203,
"effective_num_docs": 203,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_computer_science": {
"name": "arabic_mmlu:high_school_computer_science",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_computer_science",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_european_history": {
"name": "arabic_mmlu:high_school_european_history",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_european_history",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 165,
"effective_num_docs": 165,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_geography": {
"name": "arabic_mmlu:high_school_geography",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_geography",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 198,
"effective_num_docs": 198,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_government_and_politics": {
"name": "arabic_mmlu:high_school_government_and_politics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_government_and_politics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 193,
"effective_num_docs": 193,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_macroeconomics": {
"name": "arabic_mmlu:high_school_macroeconomics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_macroeconomics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 390,
"effective_num_docs": 390,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_mathematics": {
"name": "arabic_mmlu:high_school_mathematics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_mathematics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 270,
"effective_num_docs": 270,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_microeconomics": {
"name": "arabic_mmlu:high_school_microeconomics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_microeconomics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_physics": {
"name": "arabic_mmlu:high_school_physics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_physics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 151,
"effective_num_docs": 151,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_psychology": {
"name": "arabic_mmlu:high_school_psychology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_psychology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 545,
"effective_num_docs": 545,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_statistics": {
"name": "arabic_mmlu:high_school_statistics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_statistics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 216,
"effective_num_docs": 216,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_us_history": {
"name": "arabic_mmlu:high_school_us_history",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_us_history",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 204,
"effective_num_docs": 204,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_world_history": {
"name": "arabic_mmlu:high_school_world_history",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_world_history",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 237,
"effective_num_docs": 237,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:human_aging": {
"name": "arabic_mmlu:human_aging",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "human_aging",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 223,
"effective_num_docs": 223,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:human_sexuality": {
"name": "arabic_mmlu:human_sexuality",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "human_sexuality",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 131,
"effective_num_docs": 131,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:international_law": {
"name": "arabic_mmlu:international_law",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "international_law",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 121,
"effective_num_docs": 121,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:jurisprudence": {
"name": "arabic_mmlu:jurisprudence",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "jurisprudence",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 108,
"effective_num_docs": 108,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:logical_fallacies": {
"name": "arabic_mmlu:logical_fallacies",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "logical_fallacies",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 163,
"effective_num_docs": 163,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:machine_learning": {
"name": "arabic_mmlu:machine_learning",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "machine_learning",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 112,
"effective_num_docs": 112,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:management": {
"name": "arabic_mmlu:management",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "management",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 103,
"effective_num_docs": 103,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:marketing": {
"name": "arabic_mmlu:marketing",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "marketing",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 234,
"effective_num_docs": 234,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:medical_genetics": {
"name": "arabic_mmlu:medical_genetics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "medical_genetics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:miscellaneous": {
"name": "arabic_mmlu:miscellaneous",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "miscellaneous",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 783,
"effective_num_docs": 783,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:moral_disputes": {
"name": "arabic_mmlu:moral_disputes",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "moral_disputes",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 346,
"effective_num_docs": 346,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:moral_scenarios": {
"name": "arabic_mmlu:moral_scenarios",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "moral_scenarios",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:nutrition": {
"name": "arabic_mmlu:nutrition",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "nutrition",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 306,
"effective_num_docs": 306,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:philosophy": {
"name": "arabic_mmlu:philosophy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "philosophy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 311,
"effective_num_docs": 311,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:prehistory": {
"name": "arabic_mmlu:prehistory",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "prehistory",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 324,
"effective_num_docs": 324,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_accounting": {
"name": "arabic_mmlu:professional_accounting",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_accounting",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 282,
"effective_num_docs": 282,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_law": {
"name": "arabic_mmlu:professional_law",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_law",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1534,
"effective_num_docs": 1534,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_medicine": {
"name": "arabic_mmlu:professional_medicine",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_medicine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 272,
"effective_num_docs": 272,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_psychology": {
"name": "arabic_mmlu:professional_psychology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_psychology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:public_relations": {
"name": "arabic_mmlu:public_relations",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "public_relations",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 110,
"effective_num_docs": 110,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:security_studies": {
"name": "arabic_mmlu:security_studies",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "security_studies",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 245,
"effective_num_docs": 245,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:sociology": {
"name": "arabic_mmlu:sociology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "sociology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 201,
"effective_num_docs": 201,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:us_foreign_policy": {
"name": "arabic_mmlu:us_foreign_policy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "us_foreign_policy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:virology": {
"name": "arabic_mmlu:virology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "virology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 166,
"effective_num_docs": 166,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:world_religions": {
"name": "arabic_mmlu:world_religions",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "world_religions",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 171,
"effective_num_docs": 171,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arc_challenge_okapi_ar": {
"name": "arc_challenge_okapi_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "arc_challenge_okapi_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1160,
"effective_num_docs": 1160,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arc_easy_ar": {
"name": "arc_easy_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "arc_easy_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 2364,
"effective_num_docs": 2364,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|boolq_ar": {
"name": "boolq_ar",
"prompt_function": "boolq_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "boolq_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 3260,
"effective_num_docs": 3260,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|copa_ext_ar": {
"name": "copa_ext_ar",
"prompt_function": "copa_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "copa_ext_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 90,
"effective_num_docs": 90,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|hellaswag_okapi_ar": {
"name": "hellaswag_okapi_ar",
"prompt_function": "hellaswag_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "hellaswag_okapi_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 9171,
"effective_num_docs": 9171,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|openbook_qa_ext_ar": {
"name": "openbook_qa_ext_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "openbook_qa_ext_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 495,
"effective_num_docs": 495,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|piqa_ar": {
"name": "piqa_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "piqa_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1833,
"effective_num_docs": 1833,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|race_ar": {
"name": "race_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "race_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 4929,
"effective_num_docs": 4929,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|sciq_ar": {
"name": "sciq_ar",
"prompt_function": "sciq_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "sciq_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 995,
"effective_num_docs": 995,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|toxigen_ar": {
"name": "toxigen_ar",
"prompt_function": "toxigen_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "toxigen_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 935,
"effective_num_docs": 935,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"lighteval|xstory_cloze:ar": {
"name": "xstory_cloze:ar",
"prompt_function": "storycloze",
"hf_repo": "juletxara/xstory_cloze",
"hf_subset": "ar",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"training",
"eval"
],
"evaluation_splits": [
"eval"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 1511,
"effective_num_docs": 1511,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
}
},
"summary_tasks": {
"community|acva:Algeria|0": {
"hashes": {
"hash_examples": "da5a3003cd46f6f9",
"hash_full_prompts": "da5a3003cd46f6f9",
"hash_input_tokens": "2763bed7a133576d",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Ancient_Egypt|0": {
"hashes": {
"hash_examples": "52d6f767fede195b",
"hash_full_prompts": "52d6f767fede195b",
"hash_input_tokens": "7776996f09595180",
"hash_cont_tokens": "a8b8c8cb6e28a061"
},
"truncated": 0,
"non_truncated": 315,
"padded": 630,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arab_Empire|0": {
"hashes": {
"hash_examples": "8dacff6a79804a75",
"hash_full_prompts": "8dacff6a79804a75",
"hash_input_tokens": "1b1f10d7069cb3da",
"hash_cont_tokens": "707cdba3306b240e"
},
"truncated": 0,
"non_truncated": 265,
"padded": 530,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Architecture|0": {
"hashes": {
"hash_examples": "df286cd862d9f6bb",
"hash_full_prompts": "df286cd862d9f6bb",
"hash_input_tokens": "c872edfd95f22baf",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Art|0": {
"hashes": {
"hash_examples": "112883d764118a49",
"hash_full_prompts": "112883d764118a49",
"hash_input_tokens": "cc7c783b2a10d502",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Astronomy|0": {
"hashes": {
"hash_examples": "20dcdf2454bf8671",
"hash_full_prompts": "20dcdf2454bf8671",
"hash_input_tokens": "423e61497e1a791b",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Calligraphy|0": {
"hashes": {
"hash_examples": "3a9f9d1ebe868a15",
"hash_full_prompts": "3a9f9d1ebe868a15",
"hash_input_tokens": "6c1a51115a545b08",
"hash_cont_tokens": "09700d4249d1629c"
},
"truncated": 0,
"non_truncated": 255,
"padded": 510,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Ceremony|0": {
"hashes": {
"hash_examples": "c927630f8d2f44da",
"hash_full_prompts": "c927630f8d2f44da",
"hash_input_tokens": "f30901261faf85f3",
"hash_cont_tokens": "3edcbf5fdb04e4b9"
},
"truncated": 0,
"non_truncated": 185,
"padded": 370,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Clothing|0": {
"hashes": {
"hash_examples": "6ad0740c2ac6ac92",
"hash_full_prompts": "6ad0740c2ac6ac92",
"hash_input_tokens": "85d724e52fa35ddc",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Culture|0": {
"hashes": {
"hash_examples": "2177bd857ad872ae",
"hash_full_prompts": "2177bd857ad872ae",
"hash_input_tokens": "4d784a4006cd0960",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Food|0": {
"hashes": {
"hash_examples": "a6ada65b71d7c9c5",
"hash_full_prompts": "a6ada65b71d7c9c5",
"hash_input_tokens": "5fe4898378ed8be2",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Funeral|0": {
"hashes": {
"hash_examples": "fcee39dc29eaae91",
"hash_full_prompts": "fcee39dc29eaae91",
"hash_input_tokens": "b545afc57c780410",
"hash_cont_tokens": "bc36a6bcf989f8cd"
},
"truncated": 0,
"non_truncated": 95,
"padded": 190,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Geography|0": {
"hashes": {
"hash_examples": "d36eda7c89231c02",
"hash_full_prompts": "d36eda7c89231c02",
"hash_input_tokens": "20bdedcb31f82bb4",
"hash_cont_tokens": "f54b7404de5ccff4"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_History|0": {
"hashes": {
"hash_examples": "6354ac0d6db6a5fc",
"hash_full_prompts": "6354ac0d6db6a5fc",
"hash_input_tokens": "7a73511d7f5b9816",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Language_Origin|0": {
"hashes": {
"hash_examples": "ddc967c8aca34402",
"hash_full_prompts": "ddc967c8aca34402",
"hash_input_tokens": "ebafa7d9de66860b",
"hash_cont_tokens": "bc36a6bcf989f8cd"
},
"truncated": 0,
"non_truncated": 95,
"padded": 190,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Literature|0": {
"hashes": {
"hash_examples": "4305379fd46be5d8",
"hash_full_prompts": "4305379fd46be5d8",
"hash_input_tokens": "feb65a6ee5e3b599",
"hash_cont_tokens": "f54b7404de5ccff4"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Math|0": {
"hashes": {
"hash_examples": "dec621144f4d28be",
"hash_full_prompts": "dec621144f4d28be",
"hash_input_tokens": "665aa4b0ad586e1b",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Medicine|0": {
"hashes": {
"hash_examples": "2b344cdae9495ff2",
"hash_full_prompts": "2b344cdae9495ff2",
"hash_input_tokens": "a9ba48cc3a1159ec",
"hash_cont_tokens": "f54b7404de5ccff4"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Music|0": {
"hashes": {
"hash_examples": "0c54624d881944ce",
"hash_full_prompts": "0c54624d881944ce",
"hash_input_tokens": "07a02228d16e8336",
"hash_cont_tokens": "a0293e4a7b3944a5"
},
"truncated": 0,
"non_truncated": 139,
"padded": 278,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Ornament|0": {
"hashes": {
"hash_examples": "251a4a84289d8bc1",
"hash_full_prompts": "251a4a84289d8bc1",
"hash_input_tokens": "b1bfb9af328571ab",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Philosophy|0": {
"hashes": {
"hash_examples": "3f86fb9c94c13d22",
"hash_full_prompts": "3f86fb9c94c13d22",
"hash_input_tokens": "0c05d86a8dd90f25",
"hash_cont_tokens": "f54b7404de5ccff4"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Physics_and_Chemistry|0": {
"hashes": {
"hash_examples": "8fec65af3695b62a",
"hash_full_prompts": "8fec65af3695b62a",
"hash_input_tokens": "5e03284aacebd27e",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Wedding|0": {
"hashes": {
"hash_examples": "9cc3477184d7a4b8",
"hash_full_prompts": "9cc3477184d7a4b8",
"hash_input_tokens": "e14a24a6e82bb1c2",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Bahrain|0": {
"hashes": {
"hash_examples": "c92e803a0fa8b9e2",
"hash_full_prompts": "c92e803a0fa8b9e2",
"hash_input_tokens": "ad5965c9c2459fdc",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Comoros|0": {
"hashes": {
"hash_examples": "06e5d4bba8e54cae",
"hash_full_prompts": "06e5d4bba8e54cae",
"hash_input_tokens": "630bfd7d62fb4775",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Egypt_modern|0": {
"hashes": {
"hash_examples": "c6ec369164f93446",
"hash_full_prompts": "c6ec369164f93446",
"hash_input_tokens": "5b6db0c4dff2cebd",
"hash_cont_tokens": "bc36a6bcf989f8cd"
},
"truncated": 0,
"non_truncated": 95,
"padded": 190,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromAncientEgypt|0": {
"hashes": {
"hash_examples": "b9d56d74818b9bd4",
"hash_full_prompts": "b9d56d74818b9bd4",
"hash_input_tokens": "6ad418f4c6d3c554",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromByzantium|0": {
"hashes": {
"hash_examples": "5316c9624e7e59b8",
"hash_full_prompts": "5316c9624e7e59b8",
"hash_input_tokens": "14978c7bff849df1",
"hash_cont_tokens": "f54b7404de5ccff4"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromChina|0": {
"hashes": {
"hash_examples": "87894bce95a56411",
"hash_full_prompts": "87894bce95a56411",
"hash_input_tokens": "c50b55ec2a76a25f",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromGreece|0": {
"hashes": {
"hash_examples": "0baa78a27e469312",
"hash_full_prompts": "0baa78a27e469312",
"hash_input_tokens": "666aa326b9b8407b",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromIslam|0": {
"hashes": {
"hash_examples": "0c2532cde6541ff2",
"hash_full_prompts": "0c2532cde6541ff2",
"hash_input_tokens": "cb8e6ffe71994cc8",
"hash_cont_tokens": "f54b7404de5ccff4"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromPersia|0": {
"hashes": {
"hash_examples": "efcd8112dc53c6e5",
"hash_full_prompts": "efcd8112dc53c6e5",
"hash_input_tokens": "90f5e3f1d6b09627",
"hash_cont_tokens": "ea26f11d082efddd"
},
"truncated": 0,
"non_truncated": 175,
"padded": 350,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromRome|0": {
"hashes": {
"hash_examples": "9db61480e2e85fd3",
"hash_full_prompts": "9db61480e2e85fd3",
"hash_input_tokens": "a66fb057cff27915",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Iraq|0": {
"hashes": {
"hash_examples": "96dac3dfa8d2f41f",
"hash_full_prompts": "96dac3dfa8d2f41f",
"hash_input_tokens": "9c4a20c96f6e95d2",
"hash_cont_tokens": "9531e0a89b3048dc"
},
"truncated": 0,
"non_truncated": 85,
"padded": 170,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Islam_Education|0": {
"hashes": {
"hash_examples": "0d80355f6a4cb51b",
"hash_full_prompts": "0d80355f6a4cb51b",
"hash_input_tokens": "7292c3e4ce6a2d1f",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Islam_branches_and_schools|0": {
"hashes": {
"hash_examples": "5cedce1be2c3ad50",
"hash_full_prompts": "5cedce1be2c3ad50",
"hash_input_tokens": "12f0b2ab0c3efcb6",
"hash_cont_tokens": "ea26f11d082efddd"
},
"truncated": 0,
"non_truncated": 175,
"padded": 350,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Islamic_law_system|0": {
"hashes": {
"hash_examples": "c0e6db8bc84e105e",
"hash_full_prompts": "c0e6db8bc84e105e",
"hash_input_tokens": "07a775b0056f51ff",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Jordan|0": {
"hashes": {
"hash_examples": "33deb5b4e5ddd6a1",
"hash_full_prompts": "33deb5b4e5ddd6a1",
"hash_input_tokens": "dbd79dae306b7c40",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Kuwait|0": {
"hashes": {
"hash_examples": "eb41773346d7c46c",
"hash_full_prompts": "eb41773346d7c46c",
"hash_input_tokens": "dbe67e6e406085a7",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Lebanon|0": {
"hashes": {
"hash_examples": "25932dbf4c13d34f",
"hash_full_prompts": "25932dbf4c13d34f",
"hash_input_tokens": "c438c02435d66402",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Libya|0": {
"hashes": {
"hash_examples": "f2c4db63cd402926",
"hash_full_prompts": "f2c4db63cd402926",
"hash_input_tokens": "48a1a383bbe562da",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Mauritania|0": {
"hashes": {
"hash_examples": "8723ab5fdf286b54",
"hash_full_prompts": "8723ab5fdf286b54",
"hash_input_tokens": "c686e77109790e38",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Mesopotamia_civilization|0": {
"hashes": {
"hash_examples": "c33f5502a6130ca9",
"hash_full_prompts": "c33f5502a6130ca9",
"hash_input_tokens": "fc27c03186728c74",
"hash_cont_tokens": "e80b6b48b632be1f"
},
"truncated": 0,
"non_truncated": 155,
"padded": 310,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Morocco|0": {
"hashes": {
"hash_examples": "588a5ed27904b1ae",
"hash_full_prompts": "588a5ed27904b1ae",
"hash_input_tokens": "4f96175e08465393",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Oman|0": {
"hashes": {
"hash_examples": "d447c52b94248b69",
"hash_full_prompts": "d447c52b94248b69",
"hash_input_tokens": "70374a88239ec02b",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Palestine|0": {
"hashes": {
"hash_examples": "19197e076ad14ff5",
"hash_full_prompts": "19197e076ad14ff5",
"hash_input_tokens": "ab316f6cd5017022",
"hash_cont_tokens": "9531e0a89b3048dc"
},
"truncated": 0,
"non_truncated": 85,
"padded": 170,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Qatar|0": {
"hashes": {
"hash_examples": "cf0736fa185b28f6",
"hash_full_prompts": "cf0736fa185b28f6",
"hash_input_tokens": "43d7803873003951",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Saudi_Arabia|0": {
"hashes": {
"hash_examples": "69beda6e1b85a08d",
"hash_full_prompts": "69beda6e1b85a08d",
"hash_input_tokens": "6b22da9ac7dc94a1",
"hash_cont_tokens": "a28f989813341fc1"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Somalia|0": {
"hashes": {
"hash_examples": "b387940c65784fbf",
"hash_full_prompts": "b387940c65784fbf",
"hash_input_tokens": "05598f9791c34623",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Sudan|0": {
"hashes": {
"hash_examples": "e02c32b9d2dd0c3f",
"hash_full_prompts": "e02c32b9d2dd0c3f",
"hash_input_tokens": "c19ba2a74976c47b",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Syria|0": {
"hashes": {
"hash_examples": "60a6f8fe73bda4bb",
"hash_full_prompts": "60a6f8fe73bda4bb",
"hash_input_tokens": "27ee40a7027aa345",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Tunisia|0": {
"hashes": {
"hash_examples": "34bb15d3830c5649",
"hash_full_prompts": "34bb15d3830c5649",
"hash_input_tokens": "f5da56313052f546",
"hash_cont_tokens": "2c7ca4cc20784924"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:United_Arab_Emirates|0": {
"hashes": {
"hash_examples": "98a0ba78172718ce",
"hash_full_prompts": "98a0ba78172718ce",
"hash_input_tokens": "5d958713d3a37587",
"hash_cont_tokens": "9531e0a89b3048dc"
},
"truncated": 0,
"non_truncated": 85,
"padded": 170,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Yemen|0": {
"hashes": {
"hash_examples": "18e9bcccbb4ced7a",
"hash_full_prompts": "18e9bcccbb4ced7a",
"hash_input_tokens": "0ef0e405b140850d",
"hash_cont_tokens": "c55906ac700b1ccb"
},
"truncated": 0,
"non_truncated": 10,
"padded": 20,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:communication|0": {
"hashes": {
"hash_examples": "9ff28ab5eab5c97b",
"hash_full_prompts": "9ff28ab5eab5c97b",
"hash_input_tokens": "3123d09235ce2e02",
"hash_cont_tokens": "697f72cbc32c5f64"
},
"truncated": 0,
"non_truncated": 364,
"padded": 728,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:computer_and_phone|0": {
"hashes": {
"hash_examples": "37bac2f086aaf6c2",
"hash_full_prompts": "37bac2f086aaf6c2",
"hash_input_tokens": "4c90330883fff8ff",
"hash_cont_tokens": "6bbcd0e015481614"
},
"truncated": 0,
"non_truncated": 295,
"padded": 590,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:daily_life|0": {
"hashes": {
"hash_examples": "bf07363c1c252e2f",
"hash_full_prompts": "bf07363c1c252e2f",
"hash_input_tokens": "483842df3c708258",
"hash_cont_tokens": "f2c5c561fb3baba9"
},
"truncated": 0,
"non_truncated": 337,
"padded": 674,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:entertainment|0": {
"hashes": {
"hash_examples": "37077bc00f0ac56a",
"hash_full_prompts": "37077bc00f0ac56a",
"hash_input_tokens": "a891100705a0e52e",
"hash_cont_tokens": "6bbcd0e015481614"
},
"truncated": 0,
"non_truncated": 295,
"padded": 590,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:mcq_exams_test_ar|0": {
"hashes": {
"hash_examples": "c07a5e78c5c0b8fe",
"hash_full_prompts": "c07a5e78c5c0b8fe",
"hash_input_tokens": "e200df0baa7f41d6",
"hash_cont_tokens": "00211aff25697911"
},
"truncated": 0,
"non_truncated": 557,
"padded": 2228,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_dialects|0": {
"hashes": {
"hash_examples": "c0b6081f83e14064",
"hash_full_prompts": "c0b6081f83e14064",
"hash_input_tokens": "ba6d0d9246af61c5",
"hash_cont_tokens": "13bb7e814c18ffdf"
},
"truncated": 0,
"non_truncated": 5395,
"padded": 21553,
"non_padded": 27,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_msa|0": {
"hashes": {
"hash_examples": "64eb78a7c5b7484b",
"hash_full_prompts": "64eb78a7c5b7484b",
"hash_input_tokens": "7fb226d573af4b4b",
"hash_cont_tokens": "e1ae520ad3d741fc"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3573,
"non_padded": 7,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"hashes": {
"hash_examples": "54fc3502c1c02c06",
"hash_full_prompts": "54fc3502c1c02c06",
"hash_input_tokens": "0d50210eac67d8eb",
"hash_cont_tokens": "a5b763359c50206f"
},
"truncated": 0,
"non_truncated": 75,
"padded": 150,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"hashes": {
"hash_examples": "46572d83696552ae",
"hash_full_prompts": "46572d83696552ae",
"hash_input_tokens": "967a711732500e9f",
"hash_cont_tokens": "e0f4019d6f840859"
},
"truncated": 5,
"non_truncated": 145,
"padded": 744,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"hashes": {
"hash_examples": "f430d97ff715bc1c",
"hash_full_prompts": "f430d97ff715bc1c",
"hash_input_tokens": "d816b8bb3103d009",
"hash_cont_tokens": "132af3fde7b0c1ad"
},
"truncated": 0,
"non_truncated": 150,
"padded": 747,
"non_padded": 3,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"hashes": {
"hash_examples": "6b70a7416584f98c",
"hash_full_prompts": "6b70a7416584f98c",
"hash_input_tokens": "9578d7363352f887",
"hash_cont_tokens": "c6a81ece1d9716fe"
},
"truncated": 0,
"non_truncated": 7995,
"padded": 15990,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"hashes": {
"hash_examples": "bc2005cc9d2f436e",
"hash_full_prompts": "bc2005cc9d2f436e",
"hash_input_tokens": "fb3fd54a6481eaa3",
"hash_cont_tokens": "5c9e78f73053ab05"
},
"truncated": 0,
"non_truncated": 5995,
"padded": 17846,
"non_padded": 139,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"hashes": {
"hash_examples": "6fb0e254ea5945d8",
"hash_full_prompts": "6fb0e254ea5945d8",
"hash_input_tokens": "e5f7ce711ed4a89b",
"hash_cont_tokens": "b70617079f1f9520"
},
"truncated": 0,
"non_truncated": 1720,
"padded": 5142,
"non_padded": 18,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_exams|0": {
"hashes": {
"hash_examples": "6d721df351722656",
"hash_full_prompts": "6d721df351722656",
"hash_input_tokens": "ffa3174fe8bd0313",
"hash_cont_tokens": "2b0bbd7dbf1988f6"
},
"truncated": 0,
"non_truncated": 537,
"padded": 2138,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:abstract_algebra|0": {
"hashes": {
"hash_examples": "f2ddca8f45c0a511",
"hash_full_prompts": "f2ddca8f45c0a511",
"hash_input_tokens": "0a5add1152998284",
"hash_cont_tokens": "1e345bbf921c10cf"
},
"truncated": 0,
"non_truncated": 100,
"padded": 398,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:anatomy|0": {
"hashes": {
"hash_examples": "dfdbc1b83107668d",
"hash_full_prompts": "dfdbc1b83107668d",
"hash_input_tokens": "5c5d61b6572f5927",
"hash_cont_tokens": "ea8033ecc2ea6e8e"
},
"truncated": 0,
"non_truncated": 135,
"padded": 534,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:astronomy|0": {
"hashes": {
"hash_examples": "9736a606002a848e",
"hash_full_prompts": "9736a606002a848e",
"hash_input_tokens": "7b36883dd0c87962",
"hash_cont_tokens": "4384eef852123eb3"
},
"truncated": 0,
"non_truncated": 152,
"padded": 604,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:business_ethics|0": {
"hashes": {
"hash_examples": "735e452fbb6dc63d",
"hash_full_prompts": "735e452fbb6dc63d",
"hash_input_tokens": "e23a4d8b22d9cd40",
"hash_cont_tokens": "1e345bbf921c10cf"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:clinical_knowledge|0": {
"hashes": {
"hash_examples": "6ab0ca4da98aedcf",
"hash_full_prompts": "6ab0ca4da98aedcf",
"hash_input_tokens": "b404a1b1d7af8eca",
"hash_cont_tokens": "c4f6cb96b01bbc11"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1054,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_biology|0": {
"hashes": {
"hash_examples": "17e4e390848018a4",
"hash_full_prompts": "17e4e390848018a4",
"hash_input_tokens": "1350930ff3ea64d4",
"hash_cont_tokens": "c7d9b2dc89bd7976"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_chemistry|0": {
"hashes": {
"hash_examples": "4abb169f6dfd234b",
"hash_full_prompts": "4abb169f6dfd234b",
"hash_input_tokens": "9580d38c9344cd6b",
"hash_cont_tokens": "1e345bbf921c10cf"
},
"truncated": 0,
"non_truncated": 100,
"padded": 398,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_computer_science|0": {
"hashes": {
"hash_examples": "a369e2e941358a1e",
"hash_full_prompts": "a369e2e941358a1e",
"hash_input_tokens": "c8c7ba0727c4abb3",
"hash_cont_tokens": "1e345bbf921c10cf"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_mathematics|0": {
"hashes": {
"hash_examples": "d7be03b8b6020bff",
"hash_full_prompts": "d7be03b8b6020bff",
"hash_input_tokens": "5f22cbf765b0907b",
"hash_cont_tokens": "1e345bbf921c10cf"
},
"truncated": 0,
"non_truncated": 100,
"padded": 398,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_medicine|0": {
"hashes": {
"hash_examples": "0518a00f097346bf",
"hash_full_prompts": "0518a00f097346bf",
"hash_input_tokens": "13705086f3178d24",
"hash_cont_tokens": "6bbced5b204a7341"
},
"truncated": 0,
"non_truncated": 173,
"padded": 688,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_physics|0": {
"hashes": {
"hash_examples": "5d842cd49bc70e12",
"hash_full_prompts": "5d842cd49bc70e12",
"hash_input_tokens": "d5f33f94e143261b",
"hash_cont_tokens": "695994df491000a7"
},
"truncated": 0,
"non_truncated": 102,
"padded": 404,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:computer_security|0": {
"hashes": {
"hash_examples": "8e85d9f85be9b32f",
"hash_full_prompts": "8e85d9f85be9b32f",
"hash_input_tokens": "7cc8adc51c8c8e69",
"hash_cont_tokens": "1e345bbf921c10cf"
},
"truncated": 0,
"non_truncated": 100,
"padded": 398,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:conceptual_physics|0": {
"hashes": {
"hash_examples": "7964b55a0a49502b",
"hash_full_prompts": "7964b55a0a49502b",
"hash_input_tokens": "a8edb78035538303",
"hash_cont_tokens": "337e25debff00a71"
},
"truncated": 0,
"non_truncated": 235,
"padded": 920,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:econometrics|0": {
"hashes": {
"hash_examples": "1e192eae38347257",
"hash_full_prompts": "1e192eae38347257",
"hash_input_tokens": "03df5fce5f7ca550",
"hash_cont_tokens": "c82f883c84c28dd1"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:electrical_engineering|0": {
"hashes": {
"hash_examples": "cf97671d5c441da1",
"hash_full_prompts": "cf97671d5c441da1",
"hash_input_tokens": "ef895f6efc0f9cea",
"hash_cont_tokens": "e0e8d9b27bfe0728"
},
"truncated": 0,
"non_truncated": 145,
"padded": 580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:elementary_mathematics|0": {
"hashes": {
"hash_examples": "6f49107ed43c40c5",
"hash_full_prompts": "6f49107ed43c40c5",
"hash_input_tokens": "3048ef12eee4e48c",
"hash_cont_tokens": "22d928b937187ba1"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1508,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:formal_logic|0": {
"hashes": {
"hash_examples": "7922c376008ba77b",
"hash_full_prompts": "7922c376008ba77b",
"hash_input_tokens": "177e856eb7bdc83f",
"hash_cont_tokens": "4ca9d0e01174ca23"
},
"truncated": 0,
"non_truncated": 126,
"padded": 500,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:global_facts|0": {
"hashes": {
"hash_examples": "11f9813185047d5b",
"hash_full_prompts": "11f9813185047d5b",
"hash_input_tokens": "937fb249d0ba43f3",
"hash_cont_tokens": "1e345bbf921c10cf"
},
"truncated": 0,
"non_truncated": 100,
"padded": 398,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_biology|0": {
"hashes": {
"hash_examples": "2a804b1d90cbe66e",
"hash_full_prompts": "2a804b1d90cbe66e",
"hash_input_tokens": "691a17d9b6417857",
"hash_cont_tokens": "a2fae6e6aaff8614"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1232,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_chemistry|0": {
"hashes": {
"hash_examples": "0032168adabc53b4",
"hash_full_prompts": "0032168adabc53b4",
"hash_input_tokens": "38efffe6e3ee182f",
"hash_cont_tokens": "44bfc3479ee5148b"
},
"truncated": 0,
"non_truncated": 203,
"padded": 810,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_computer_science|0": {
"hashes": {
"hash_examples": "f2fb8740f9df980f",
"hash_full_prompts": "f2fb8740f9df980f",
"hash_input_tokens": "1d72bce7251bcbcc",
"hash_cont_tokens": "1e345bbf921c10cf"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_european_history|0": {
"hashes": {
"hash_examples": "73509021e7e66435",
"hash_full_prompts": "73509021e7e66435",
"hash_input_tokens": "db3ba39e64f5fb1b",
"hash_cont_tokens": "6d6d41aaf4b8e061"
},
"truncated": 8,
"non_truncated": 157,
"padded": 652,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_geography|0": {
"hashes": {
"hash_examples": "9e08d1894940ff42",
"hash_full_prompts": "9e08d1894940ff42",
"hash_input_tokens": "c4dd8b365ed6d407",
"hash_cont_tokens": "7c335acaea165a28"
},
"truncated": 0,
"non_truncated": 198,
"padded": 786,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_government_and_politics|0": {
"hashes": {
"hash_examples": "64b7e97817ca6c76",
"hash_full_prompts": "64b7e97817ca6c76",
"hash_input_tokens": "6235b8272959981d",
"hash_cont_tokens": "dff1866c681cb0bd"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_macroeconomics|0": {
"hashes": {
"hash_examples": "9f582da8534bd2ef",
"hash_full_prompts": "9f582da8534bd2ef",
"hash_input_tokens": "8e48c4f13326c88d",
"hash_cont_tokens": "972b5ad5161dc137"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1556,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_mathematics|0": {
"hashes": {
"hash_examples": "fd54f1c10d423c51",
"hash_full_prompts": "fd54f1c10d423c51",
"hash_input_tokens": "f2bae056bb27a6e9",
"hash_cont_tokens": "a1b74b33d81719c7"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1078,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_microeconomics|0": {
"hashes": {
"hash_examples": "7037896925aaf42f",
"hash_full_prompts": "7037896925aaf42f",
"hash_input_tokens": "e8bdf1cf3d234c86",
"hash_cont_tokens": "cfbef6913c3eb666"
},
"truncated": 0,
"non_truncated": 238,
"padded": 950,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_physics|0": {
"hashes": {
"hash_examples": "60c3776215167dae",
"hash_full_prompts": "60c3776215167dae",
"hash_input_tokens": "40be4b5d3f05302f",
"hash_cont_tokens": "43b3c9c99112ea80"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_psychology|0": {
"hashes": {
"hash_examples": "61176bfd5da1298f",
"hash_full_prompts": "61176bfd5da1298f",
"hash_input_tokens": "abc66d1d7cec8b38",
"hash_cont_tokens": "5cd7a04e1285013f"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2173,
"non_padded": 7,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_statistics|0": {
"hashes": {
"hash_examples": "40dfeebd1ea10f76",
"hash_full_prompts": "40dfeebd1ea10f76",
"hash_input_tokens": "1211ebdb6e73bc00",
"hash_cont_tokens": "471cfa6a8a655143"
},
"truncated": 0,
"non_truncated": 216,
"padded": 852,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_us_history|0": {
"hashes": {
"hash_examples": "03daa510ba917f4d",
"hash_full_prompts": "03daa510ba917f4d",
"hash_input_tokens": "935ea4ab465f276a",
"hash_cont_tokens": "2f73593e06d349c3"
},
"truncated": 0,
"non_truncated": 204,
"padded": 800,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_world_history|0": {
"hashes": {
"hash_examples": "be075ffd579f43c2",
"hash_full_prompts": "be075ffd579f43c2",
"hash_input_tokens": "46c75eb19888b787",
"hash_cont_tokens": "bdfa14922a28ade1"
},
"truncated": 12,
"non_truncated": 225,
"padded": 914,
"non_padded": 34,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:human_aging|0": {
"hashes": {
"hash_examples": "caa5b69f640bd1ef",
"hash_full_prompts": "caa5b69f640bd1ef",
"hash_input_tokens": "a6543f85616a2bd0",
"hash_cont_tokens": "13f9b7d32041e539"
},
"truncated": 0,
"non_truncated": 223,
"padded": 888,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:human_sexuality|0": {
"hashes": {
"hash_examples": "5ed2e38fb25a3767",
"hash_full_prompts": "5ed2e38fb25a3767",
"hash_input_tokens": "7185f2cda9b3b80d",
"hash_cont_tokens": "596c7027395cb8c8"
},
"truncated": 0,
"non_truncated": 131,
"padded": 520,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:international_law|0": {
"hashes": {
"hash_examples": "4e3e9e28d1b96484",
"hash_full_prompts": "4e3e9e28d1b96484",
"hash_input_tokens": "02ce5b3b331b617b",
"hash_cont_tokens": "5794c36d9edf8ebd"
},
"truncated": 0,
"non_truncated": 121,
"padded": 480,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:jurisprudence|0": {
"hashes": {
"hash_examples": "e264b755366310b3",
"hash_full_prompts": "e264b755366310b3",
"hash_input_tokens": "fa6825fb60a13077",
"hash_cont_tokens": "ceeee59ec473a08c"
},
"truncated": 0,
"non_truncated": 108,
"padded": 426,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:logical_fallacies|0": {
"hashes": {
"hash_examples": "a4ab6965a3e38071",
"hash_full_prompts": "a4ab6965a3e38071",
"hash_input_tokens": "77e609a1b1670e47",
"hash_cont_tokens": "3c050f2b8a15d5a8"
},
"truncated": 0,
"non_truncated": 163,
"padded": 642,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:machine_learning|0": {
"hashes": {
"hash_examples": "b92320efa6636b40",
"hash_full_prompts": "b92320efa6636b40",
"hash_input_tokens": "3bbf18357c91a1b0",
"hash_cont_tokens": "aa0fd769ad365019"
},
"truncated": 0,
"non_truncated": 112,
"padded": 446,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:management|0": {
"hashes": {
"hash_examples": "c9ee4872a850fe20",
"hash_full_prompts": "c9ee4872a850fe20",
"hash_input_tokens": "1a4a6fce702b751c",
"hash_cont_tokens": "68e45ad4db670439"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:marketing|0": {
"hashes": {
"hash_examples": "0c151b70f6a047e3",
"hash_full_prompts": "0c151b70f6a047e3",
"hash_input_tokens": "39999b3657e4960c",
"hash_cont_tokens": "d081de7bda3a2bef"
},
"truncated": 0,
"non_truncated": 234,
"padded": 928,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:medical_genetics|0": {
"hashes": {
"hash_examples": "513f6cb8fca3a24e",
"hash_full_prompts": "513f6cb8fca3a24e",
"hash_input_tokens": "c5acf9c6df327d9a",
"hash_cont_tokens": "1e345bbf921c10cf"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:miscellaneous|0": {
"hashes": {
"hash_examples": "259a190d635331db",
"hash_full_prompts": "259a190d635331db",
"hash_input_tokens": "2cf57303b325287f",
"hash_cont_tokens": "bb58e15c6e765197"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3110,
"non_padded": 22,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:moral_disputes|0": {
"hashes": {
"hash_examples": "b85052c48a0b7bc3",
"hash_full_prompts": "b85052c48a0b7bc3",
"hash_input_tokens": "1c3869c7dda1e6c8",
"hash_cont_tokens": "2734d33c8c5a8818"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1378,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:moral_scenarios|0": {
"hashes": {
"hash_examples": "28d0b069ef00dd00",
"hash_full_prompts": "28d0b069ef00dd00",
"hash_input_tokens": "29b5fc011a52f9fe",
"hash_cont_tokens": "12059b70cce87cc1"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:nutrition|0": {
"hashes": {
"hash_examples": "00c9bc5f1d305b2f",
"hash_full_prompts": "00c9bc5f1d305b2f",
"hash_input_tokens": "a725f8646938ff13",
"hash_cont_tokens": "e90bf340154a927d"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1212,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:philosophy|0": {
"hashes": {
"hash_examples": "a458c08454a3fd5f",
"hash_full_prompts": "a458c08454a3fd5f",
"hash_input_tokens": "7312b10d99523a95",
"hash_cont_tokens": "f6b159198370618f"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1234,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:prehistory|0": {
"hashes": {
"hash_examples": "d6a0ecbdbb670e9c",
"hash_full_prompts": "d6a0ecbdbb670e9c",
"hash_input_tokens": "7871e3d278e701e2",
"hash_cont_tokens": "4facd438e9ae7191"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1288,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_accounting|0": {
"hashes": {
"hash_examples": "b4a95fe480b6540e",
"hash_full_prompts": "b4a95fe480b6540e",
"hash_input_tokens": "a4a8b104cb6cda68",
"hash_cont_tokens": "67a0fb4f18a1b1c1"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1128,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_law|0": {
"hashes": {
"hash_examples": "c2be9651cdbdde3b",
"hash_full_prompts": "c2be9651cdbdde3b",
"hash_input_tokens": "ce9f741adfcdb158",
"hash_cont_tokens": "7f5e4aece8a7d7cd"
},
"truncated": 48,
"non_truncated": 1486,
"padded": 6078,
"non_padded": 58,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_medicine|0": {
"hashes": {
"hash_examples": "26ce92416288f273",
"hash_full_prompts": "26ce92416288f273",
"hash_input_tokens": "6206fa531f092572",
"hash_cont_tokens": "fd9b60ff18fe4143"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1080,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_psychology|0": {
"hashes": {
"hash_examples": "71ea5f182ea9a641",
"hash_full_prompts": "71ea5f182ea9a641",
"hash_input_tokens": "9d98a6b5b976b6f7",
"hash_cont_tokens": "80ee4d92f7ee68e7"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2440,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:public_relations|0": {
"hashes": {
"hash_examples": "125adc21f91f8d77",
"hash_full_prompts": "125adc21f91f8d77",
"hash_input_tokens": "6619da86c4fd7b65",
"hash_cont_tokens": "8be6ae1b3cf3ef5a"
},
"truncated": 0,
"non_truncated": 110,
"padded": 438,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:security_studies|0": {
"hashes": {
"hash_examples": "3c18b216c099fb26",
"hash_full_prompts": "3c18b216c099fb26",
"hash_input_tokens": "4bcddbd082b42901",
"hash_cont_tokens": "a06b75df968e6a1d"
},
"truncated": 0,
"non_truncated": 245,
"padded": 978,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:sociology|0": {
"hashes": {
"hash_examples": "3f2a9634cef7417d",
"hash_full_prompts": "3f2a9634cef7417d",
"hash_input_tokens": "92ae13d13f7ce893",
"hash_cont_tokens": "070a56949eed3ebe"
},
"truncated": 0,
"non_truncated": 201,
"padded": 794,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:us_foreign_policy|0": {
"hashes": {
"hash_examples": "22249da54056475e",
"hash_full_prompts": "22249da54056475e",
"hash_input_tokens": "4f122f80a9c0654b",
"hash_cont_tokens": "1e345bbf921c10cf"
},
"truncated": 0,
"non_truncated": 100,
"padded": 398,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:virology|0": {
"hashes": {
"hash_examples": "9d194b9471dc624e",
"hash_full_prompts": "9d194b9471dc624e",
"hash_input_tokens": "012474e187f69de3",
"hash_cont_tokens": "6415116fce53cc41"
},
"truncated": 0,
"non_truncated": 166,
"padded": 658,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:world_religions|0": {
"hashes": {
"hash_examples": "229e5fe50082b064",
"hash_full_prompts": "229e5fe50082b064",
"hash_input_tokens": "287b59218ba14046",
"hash_cont_tokens": "6de17645c28f3f1c"
},
"truncated": 0,
"non_truncated": 171,
"padded": 674,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arc_challenge_okapi_ar|0": {
"hashes": {
"hash_examples": "ab893807673bc355",
"hash_full_prompts": "ab893807673bc355",
"hash_input_tokens": "982961fe8469c779",
"hash_cont_tokens": "3493068220883ca3"
},
"truncated": 0,
"non_truncated": 1160,
"padded": 4613,
"non_padded": 27,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arc_easy_ar|0": {
"hashes": {
"hash_examples": "acb688624acc3d04",
"hash_full_prompts": "acb688624acc3d04",
"hash_input_tokens": "4a157760423fd8d4",
"hash_cont_tokens": "2772a7b351de1e0c"
},
"truncated": 0,
"non_truncated": 2364,
"padded": 9392,
"non_padded": 64,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|boolq_ar|0": {
"hashes": {
"hash_examples": "48355a67867e0c32",
"hash_full_prompts": "48355a67867e0c32",
"hash_input_tokens": "db83f2bdfb54df3d",
"hash_cont_tokens": "6d359ab6597e2500"
},
"truncated": 22,
"non_truncated": 3238,
"padded": 6479,
"non_padded": 41,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|copa_ext_ar|0": {
"hashes": {
"hash_examples": "9bb83301bb72eecf",
"hash_full_prompts": "9bb83301bb72eecf",
"hash_input_tokens": "32d13fa1aa0e7338",
"hash_cont_tokens": "ae3392796411e259"
},
"truncated": 0,
"non_truncated": 90,
"padded": 180,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|hellaswag_okapi_ar|0": {
"hashes": {
"hash_examples": "6e8cf57a322dfadd",
"hash_full_prompts": "6e8cf57a322dfadd",
"hash_input_tokens": "86410ca0255bd04e",
"hash_cont_tokens": "b99e6be1b6c6612d"
},
"truncated": 0,
"non_truncated": 9171,
"padded": 36632,
"non_padded": 52,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|openbook_qa_ext_ar|0": {
"hashes": {
"hash_examples": "923d41eb0aca93eb",
"hash_full_prompts": "923d41eb0aca93eb",
"hash_input_tokens": "e223f792efb5be84",
"hash_cont_tokens": "12cd878a9e38c28f"
},
"truncated": 0,
"non_truncated": 495,
"padded": 1969,
"non_padded": 11,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|piqa_ar|0": {
"hashes": {
"hash_examples": "94bc205a520d3ea0",
"hash_full_prompts": "94bc205a520d3ea0",
"hash_input_tokens": "3455c02db2a0f279",
"hash_cont_tokens": "c6fea78e9be28b35"
},
"truncated": 10,
"non_truncated": 1823,
"padded": 3633,
"non_padded": 33,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|race_ar|0": {
"hashes": {
"hash_examples": "de65130bae647516",
"hash_full_prompts": "de65130bae647516",
"hash_input_tokens": "ed41cb719c4f8560",
"hash_cont_tokens": "e7d02656ef57bae0"
},
"truncated": 3555,
"non_truncated": 1374,
"padded": 16146,
"non_padded": 3570,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|sciq_ar|0": {
"hashes": {
"hash_examples": "732400d4b0189c8e",
"hash_full_prompts": "732400d4b0189c8e",
"hash_input_tokens": "b6d5a5a2261fa7c4",
"hash_cont_tokens": "db0822f6689518ad"
},
"truncated": 48,
"non_truncated": 947,
"padded": 3911,
"non_padded": 69,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|toxigen_ar|0": {
"hashes": {
"hash_examples": "1e139513004a9a2e",
"hash_full_prompts": "1e139513004a9a2e",
"hash_input_tokens": "91bd294124fd2d61",
"hash_cont_tokens": "7bbf8eead4749f31"
},
"truncated": 0,
"non_truncated": 935,
"padded": 1855,
"non_padded": 15,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|xstory_cloze:ar|0": {
"hashes": {
"hash_examples": "865426a22c787481",
"hash_full_prompts": "865426a22c787481",
"hash_input_tokens": "85767798555bcc3d",
"hash_cont_tokens": "c2aa491f6de7f89d"
},
"truncated": 0,
"non_truncated": 1511,
"padded": 2991,
"non_padded": 31,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "38734d49837f2eb2",
"hash_full_prompts": "38734d49837f2eb2",
"hash_input_tokens": "ecfd20c2f33ae84d",
"hash_cont_tokens": "68deaabdd88113af"
},
"truncated": 3708,
"non_truncated": 69256,
"padded": 231131,
"non_padded": 4492,
"num_truncated_few_shots": 0
}
}