Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
results / migtissera /Tess-M-v1.3 /results_2024-05-17T14-04-22.963349.json
Hamza-Alobeidli's picture
Updating model migtissera/Tess-M-v1.3
94bcc63 verified
raw
history blame
194 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 481.5395096,
"end_time": 70079.506846877,
"total_evaluation_time_secondes": "69597.967337277",
"model_name": "migtissera/Tess-M-v1.3",
"model_sha": "eefb8c8a89918ed00dd7a0ee866821b5765b951c",
"model_dtype": "torch.float16",
"model_size": "69.78 GB",
"config": null
},
"results": {
"community|acva:Algeria|0": {
"acc_norm": 0.558974358974359,
"acc_norm_stderr": 0.035647329318535786
},
"community|acva:Ancient_Egypt|0": {
"acc_norm": 0.1111111111111111,
"acc_norm_stderr": 0.017735264099280446
},
"community|acva:Arab_Empire|0": {
"acc_norm": 0.32452830188679244,
"acc_norm_stderr": 0.028815615713432118
},
"community|acva:Arabic_Architecture|0": {
"acc_norm": 0.517948717948718,
"acc_norm_stderr": 0.035874770987738246
},
"community|acva:Arabic_Art|0": {
"acc_norm": 0.5230769230769231,
"acc_norm_stderr": 0.0358596530894741
},
"community|acva:Arabic_Astronomy|0": {
"acc_norm": 0.48205128205128206,
"acc_norm_stderr": 0.0358747709877383
},
"community|acva:Arabic_Calligraphy|0": {
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.031302995308925066
},
"community|acva:Arabic_Ceremony|0": {
"acc_norm": 0.5243243243243243,
"acc_norm_stderr": 0.0368168445060319
},
"community|acva:Arabic_Clothing|0": {
"acc_norm": 0.5435897435897435,
"acc_norm_stderr": 0.035761230969912156
},
"community|acva:Arabic_Culture|0": {
"acc_norm": 0.3128205128205128,
"acc_norm_stderr": 0.033287550657248546
},
"community|acva:Arabic_Food|0": {
"acc_norm": 0.5435897435897435,
"acc_norm_stderr": 0.03576123096991214
},
"community|acva:Arabic_Funeral|0": {
"acc_norm": 0.4,
"acc_norm_stderr": 0.050529115263991134
},
"community|acva:Arabic_Geography|0": {
"acc_norm": 0.5103448275862069,
"acc_norm_stderr": 0.04165774775728762
},
"community|acva:Arabic_History|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03384487217112063
},
"community|acva:Arabic_Language_Origin|0": {
"acc_norm": 0.5789473684210527,
"acc_norm_stderr": 0.050924152299673286
},
"community|acva:Arabic_Literature|0": {
"acc_norm": 0.4413793103448276,
"acc_norm_stderr": 0.04137931034482757
},
"community|acva:Arabic_Math|0": {
"acc_norm": 0.35384615384615387,
"acc_norm_stderr": 0.03433004254147036
},
"community|acva:Arabic_Medicine|0": {
"acc_norm": 0.496551724137931,
"acc_norm_stderr": 0.041665675771015785
},
"community|acva:Arabic_Music|0": {
"acc_norm": 0.26618705035971224,
"acc_norm_stderr": 0.037622409350890895
},
"community|acva:Arabic_Ornament|0": {
"acc_norm": 0.49743589743589745,
"acc_norm_stderr": 0.03589743589743589
},
"community|acva:Arabic_Philosophy|0": {
"acc_norm": 0.593103448275862,
"acc_norm_stderr": 0.04093793981266236
},
"community|acva:Arabic_Physics_and_Chemistry|0": {
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.03581804596782232
},
"community|acva:Arabic_Wedding|0": {
"acc_norm": 0.41025641025641024,
"acc_norm_stderr": 0.03531493712326671
},
"community|acva:Bahrain|0": {
"acc_norm": 0.35555555555555557,
"acc_norm_stderr": 0.07216392363431012
},
"community|acva:Comoros|0": {
"acc_norm": 0.4222222222222222,
"acc_norm_stderr": 0.07446027270295806
},
"community|acva:Egypt_modern|0": {
"acc_norm": 0.4842105263157895,
"acc_norm_stderr": 0.051545341795930656
},
"community|acva:InfluenceFromAncientEgypt|0": {
"acc_norm": 0.6461538461538462,
"acc_norm_stderr": 0.03433004254147036
},
"community|acva:InfluenceFromByzantium|0": {
"acc_norm": 0.7034482758620689,
"acc_norm_stderr": 0.03806142687309992
},
"community|acva:InfluenceFromChina|0": {
"acc_norm": 0.27692307692307694,
"acc_norm_stderr": 0.032127058190759304
},
"community|acva:InfluenceFromGreece|0": {
"acc_norm": 0.6307692307692307,
"acc_norm_stderr": 0.034648411418637566
},
"community|acva:InfluenceFromIslam|0": {
"acc_norm": 0.5103448275862069,
"acc_norm_stderr": 0.04165774775728763
},
"community|acva:InfluenceFromPersia|0": {
"acc_norm": 0.7428571428571429,
"acc_norm_stderr": 0.033133343292217204
},
"community|acva:InfluenceFromRome|0": {
"acc_norm": 0.5743589743589743,
"acc_norm_stderr": 0.03549871080367708
},
"community|acva:Iraq|0": {
"acc_norm": 0.5411764705882353,
"acc_norm_stderr": 0.0543691634273002
},
"community|acva:Islam_Education|0": {
"acc_norm": 0.4512820512820513,
"acc_norm_stderr": 0.03572709860318392
},
"community|acva:Islam_branches_and_schools|0": {
"acc_norm": 0.44571428571428573,
"acc_norm_stderr": 0.03768083305144797
},
"community|acva:Islamic_law_system|0": {
"acc_norm": 0.5641025641025641,
"acc_norm_stderr": 0.03560166662346636
},
"community|acva:Jordan|0": {
"acc_norm": 0.35555555555555557,
"acc_norm_stderr": 0.07216392363431012
},
"community|acva:Kuwait|0": {
"acc_norm": 0.3111111111111111,
"acc_norm_stderr": 0.06979205927323111
},
"community|acva:Lebanon|0": {
"acc_norm": 0.24444444444444444,
"acc_norm_stderr": 0.06478835438717
},
"community|acva:Libya|0": {
"acc_norm": 0.4888888888888889,
"acc_norm_stderr": 0.07535922203472523
},
"community|acva:Mauritania|0": {
"acc_norm": 0.4,
"acc_norm_stderr": 0.07385489458759965
},
"community|acva:Mesopotamia_civilization|0": {
"acc_norm": 0.5483870967741935,
"acc_norm_stderr": 0.040102036161810406
},
"community|acva:Morocco|0": {
"acc_norm": 0.28888888888888886,
"acc_norm_stderr": 0.06832943242540508
},
"community|acva:Oman|0": {
"acc_norm": 0.2,
"acc_norm_stderr": 0.06030226891555273
},
"community|acva:Palestine|0": {
"acc_norm": 0.2823529411764706,
"acc_norm_stderr": 0.049114753600680516
},
"community|acva:Qatar|0": {
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.07491109582924914
},
"community|acva:Saudi_Arabia|0": {
"acc_norm": 0.41025641025641024,
"acc_norm_stderr": 0.03531493712326673
},
"community|acva:Somalia|0": {
"acc_norm": 0.7111111111111111,
"acc_norm_stderr": 0.06832943242540507
},
"community|acva:Sudan|0": {
"acc_norm": 0.4222222222222222,
"acc_norm_stderr": 0.07446027270295806
},
"community|acva:Syria|0": {
"acc_norm": 0.4666666666666667,
"acc_norm_stderr": 0.0752101433090355
},
"community|acva:Tunisia|0": {
"acc_norm": 0.4222222222222222,
"acc_norm_stderr": 0.07446027270295805
},
"community|acva:United_Arab_Emirates|0": {
"acc_norm": 0.3058823529411765,
"acc_norm_stderr": 0.05027523520585573
},
"community|acva:Yemen|0": {
"acc_norm": 0.3,
"acc_norm_stderr": 0.15275252316519466
},
"community|acva:communication|0": {
"acc_norm": 0.43131868131868134,
"acc_norm_stderr": 0.02599443023962308
},
"community|acva:computer_and_phone|0": {
"acc_norm": 0.45084745762711864,
"acc_norm_stderr": 0.02901934773187137
},
"community|acva:daily_life|0": {
"acc_norm": 0.18991097922848665,
"acc_norm_stderr": 0.021397930418183976
},
"community|acva:entertainment|0": {
"acc_norm": 0.23389830508474577,
"acc_norm_stderr": 0.024687839412166384
},
"community|alghafa:mcq_exams_test_ar|0": {
"acc_norm": 0.2621184919210054,
"acc_norm_stderr": 0.018651112765714396
},
"community|alghafa:meta_ar_dialects|0": {
"acc_norm": 0.2748841519925857,
"acc_norm_stderr": 0.006078873116300998
},
"community|alghafa:meta_ar_msa|0": {
"acc_norm": 0.293854748603352,
"acc_norm_stderr": 0.015235075776719622
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"acc_norm": 0.52,
"acc_norm_stderr": 0.05807730170189531
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"acc_norm": 0.4066666666666667,
"acc_norm_stderr": 0.040241626657390624
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"acc_norm": 0.32666666666666666,
"acc_norm_stderr": 0.03842150156165228
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"acc_norm": 0.499812382739212,
"acc_norm_stderr": 0.005592267043694276
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"acc_norm": 0.3336113427856547,
"acc_norm_stderr": 0.006090119759393051
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"acc_norm": 0.34186046511627904,
"acc_norm_stderr": 0.011440512796479087
},
"community|arabic_exams|0": {
"acc_norm": 0.2532588454376164,
"acc_norm_stderr": 0.01878386507150923
},
"community|arabic_mmlu:abstract_algebra|0": {
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"community|arabic_mmlu:anatomy|0": {
"acc_norm": 0.2074074074074074,
"acc_norm_stderr": 0.035025531706783165
},
"community|arabic_mmlu:astronomy|0": {
"acc_norm": 0.3026315789473684,
"acc_norm_stderr": 0.037385206761196686
},
"community|arabic_mmlu:business_ethics|0": {
"acc_norm": 0.35,
"acc_norm_stderr": 0.04793724854411019
},
"community|arabic_mmlu:clinical_knowledge|0": {
"acc_norm": 0.2943396226415094,
"acc_norm_stderr": 0.02804918631569525
},
"community|arabic_mmlu:college_biology|0": {
"acc_norm": 0.2708333333333333,
"acc_norm_stderr": 0.03716177437566018
},
"community|arabic_mmlu:college_chemistry|0": {
"acc_norm": 0.18,
"acc_norm_stderr": 0.038612291966536955
},
"community|arabic_mmlu:college_computer_science|0": {
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"community|arabic_mmlu:college_mathematics|0": {
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816505
},
"community|arabic_mmlu:college_medicine|0": {
"acc_norm": 0.2543352601156069,
"acc_norm_stderr": 0.0332055644308557
},
"community|arabic_mmlu:college_physics|0": {
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.041583075330832865
},
"community|arabic_mmlu:computer_security|0": {
"acc_norm": 0.47,
"acc_norm_stderr": 0.050161355804659205
},
"community|arabic_mmlu:conceptual_physics|0": {
"acc_norm": 0.2851063829787234,
"acc_norm_stderr": 0.02951319662553935
},
"community|arabic_mmlu:econometrics|0": {
"acc_norm": 0.21052631578947367,
"acc_norm_stderr": 0.038351539543994194
},
"community|arabic_mmlu:electrical_engineering|0": {
"acc_norm": 0.3724137931034483,
"acc_norm_stderr": 0.0402873153294756
},
"community|arabic_mmlu:elementary_mathematics|0": {
"acc_norm": 0.2566137566137566,
"acc_norm_stderr": 0.022494510767503154
},
"community|arabic_mmlu:formal_logic|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.042163702135578345
},
"community|arabic_mmlu:global_facts|0": {
"acc_norm": 0.23,
"acc_norm_stderr": 0.042295258468165044
},
"community|arabic_mmlu:high_school_biology|0": {
"acc_norm": 0.3096774193548387,
"acc_norm_stderr": 0.026302774983517418
},
"community|arabic_mmlu:high_school_chemistry|0": {
"acc_norm": 0.24630541871921183,
"acc_norm_stderr": 0.030315099285617732
},
"community|arabic_mmlu:high_school_computer_science|0": {
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145632
},
"community|arabic_mmlu:high_school_european_history|0": {
"acc_norm": 0.24242424242424243,
"acc_norm_stderr": 0.03346409881055953
},
"community|arabic_mmlu:high_school_geography|0": {
"acc_norm": 0.30303030303030304,
"acc_norm_stderr": 0.03274287914026868
},
"community|arabic_mmlu:high_school_government_and_politics|0": {
"acc_norm": 0.29533678756476683,
"acc_norm_stderr": 0.03292296639155142
},
"community|arabic_mmlu:high_school_macroeconomics|0": {
"acc_norm": 0.3076923076923077,
"acc_norm_stderr": 0.0234009289183105
},
"community|arabic_mmlu:high_school_mathematics|0": {
"acc_norm": 0.2851851851851852,
"acc_norm_stderr": 0.027528599210340492
},
"community|arabic_mmlu:high_school_microeconomics|0": {
"acc_norm": 0.2689075630252101,
"acc_norm_stderr": 0.028801392193631276
},
"community|arabic_mmlu:high_school_physics|0": {
"acc_norm": 0.24503311258278146,
"acc_norm_stderr": 0.03511807571804724
},
"community|arabic_mmlu:high_school_psychology|0": {
"acc_norm": 0.27889908256880735,
"acc_norm_stderr": 0.019227468876463517
},
"community|arabic_mmlu:high_school_statistics|0": {
"acc_norm": 0.1712962962962963,
"acc_norm_stderr": 0.025695341643824685
},
"community|arabic_mmlu:high_school_us_history|0": {
"acc_norm": 0.2107843137254902,
"acc_norm_stderr": 0.02862654791243739
},
"community|arabic_mmlu:high_school_world_history|0": {
"acc_norm": 0.2869198312236287,
"acc_norm_stderr": 0.029443773022594693
},
"community|arabic_mmlu:human_aging|0": {
"acc_norm": 0.38565022421524664,
"acc_norm_stderr": 0.03266842214289201
},
"community|arabic_mmlu:human_sexuality|0": {
"acc_norm": 0.31297709923664124,
"acc_norm_stderr": 0.04066962905677697
},
"community|arabic_mmlu:international_law|0": {
"acc_norm": 0.4628099173553719,
"acc_norm_stderr": 0.045517111961042175
},
"community|arabic_mmlu:jurisprudence|0": {
"acc_norm": 0.4166666666666667,
"acc_norm_stderr": 0.04766075165356461
},
"community|arabic_mmlu:logical_fallacies|0": {
"acc_norm": 0.26993865030674846,
"acc_norm_stderr": 0.03487825168497892
},
"community|arabic_mmlu:machine_learning|0": {
"acc_norm": 0.24107142857142858,
"acc_norm_stderr": 0.04059867246952687
},
"community|arabic_mmlu:management|0": {
"acc_norm": 0.2912621359223301,
"acc_norm_stderr": 0.04498676320572922
},
"community|arabic_mmlu:marketing|0": {
"acc_norm": 0.36752136752136755,
"acc_norm_stderr": 0.03158539157745636
},
"community|arabic_mmlu:medical_genetics|0": {
"acc_norm": 0.43,
"acc_norm_stderr": 0.04975698519562427
},
"community|arabic_mmlu:miscellaneous|0": {
"acc_norm": 0.3499361430395913,
"acc_norm_stderr": 0.017055679797150423
},
"community|arabic_mmlu:moral_disputes|0": {
"acc_norm": 0.3092485549132948,
"acc_norm_stderr": 0.024883140570071755
},
"community|arabic_mmlu:moral_scenarios|0": {
"acc_norm": 0.24804469273743016,
"acc_norm_stderr": 0.014444157808261438
},
"community|arabic_mmlu:nutrition|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.02699254433929723
},
"community|arabic_mmlu:philosophy|0": {
"acc_norm": 0.2990353697749196,
"acc_norm_stderr": 0.026003301117885142
},
"community|arabic_mmlu:prehistory|0": {
"acc_norm": 0.29012345679012347,
"acc_norm_stderr": 0.025251173936495026
},
"community|arabic_mmlu:professional_accounting|0": {
"acc_norm": 0.2624113475177305,
"acc_norm_stderr": 0.026244920349843007
},
"community|arabic_mmlu:professional_law|0": {
"acc_norm": 0.2685788787483703,
"acc_norm_stderr": 0.011320056629121727
},
"community|arabic_mmlu:professional_medicine|0": {
"acc_norm": 0.1801470588235294,
"acc_norm_stderr": 0.02334516361654486
},
"community|arabic_mmlu:professional_psychology|0": {
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.018249024411207657
},
"community|arabic_mmlu:public_relations|0": {
"acc_norm": 0.35454545454545455,
"acc_norm_stderr": 0.04582004841505417
},
"community|arabic_mmlu:security_studies|0": {
"acc_norm": 0.39591836734693875,
"acc_norm_stderr": 0.03130802899065686
},
"community|arabic_mmlu:sociology|0": {
"acc_norm": 0.26865671641791045,
"acc_norm_stderr": 0.031343283582089536
},
"community|arabic_mmlu:us_foreign_policy|0": {
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"community|arabic_mmlu:virology|0": {
"acc_norm": 0.3192771084337349,
"acc_norm_stderr": 0.0362933532994786
},
"community|arabic_mmlu:world_religions|0": {
"acc_norm": 0.3157894736842105,
"acc_norm_stderr": 0.035650796707083106
},
"community|arc_challenge_okapi_ar|0": {
"acc_norm": 0.30344827586206896,
"acc_norm_stderr": 0.013504462552572259
},
"community|arc_easy_ar|0": {
"acc_norm": 0.26353637901861254,
"acc_norm_stderr": 0.009062835055516204
},
"community|boolq_ar|0": {
"acc_norm": 0.6346625766871166,
"acc_norm_stderr": 0.008434830228397713
},
"community|copa_ext_ar|0": {
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.05288198530254015
},
"community|hellaswag_okapi_ar|0": {
"acc_norm": 0.25482499182204776,
"acc_norm_stderr": 0.0045505666760969074
},
"community|openbook_qa_ext_ar|0": {
"acc_norm": 0.34545454545454546,
"acc_norm_stderr": 0.02139448746962015
},
"community|piqa_ar|0": {
"acc_norm": 0.49918166939443537,
"acc_norm_stderr": 0.011681717192194765
},
"community|race_ar|0": {
"acc_norm": 0.30837898153783727,
"acc_norm_stderr": 0.006578715823306954
},
"community|sciq_ar|0": {
"acc_norm": 0.40100502512562813,
"acc_norm_stderr": 0.015545092290350384
},
"community|toxigen_ar|0": {
"acc_norm": 0.4320855614973262,
"acc_norm_stderr": 0.01620887578524445
},
"lighteval|xstory_cloze:ar|0": {
"acc": 0.5215089344804765,
"acc_stderr": 0.012855214257296596
},
"community|acva:_average|0": {
"acc_norm": 0.4421999317291661,
"acc_norm_stderr": 0.04714283425709815
},
"community|alghafa:_average|0": {
"acc_norm": 0.36216387961015806,
"acc_norm_stderr": 0.022203154575471073
},
"community|arabic_mmlu:_average|0": {
"acc_norm": 0.298978596756632,
"acc_norm_stderr": 0.033902653497116055
},
"all": {
"acc_norm": 0.3716890456080123,
"acc_norm_stderr": 0.0373717885990659,
"acc": 0.5215089344804765,
"acc_stderr": 0.012855214257296596
}
},
"versions": {
"community|acva:Algeria|0": 0,
"community|acva:Ancient_Egypt|0": 0,
"community|acva:Arab_Empire|0": 0,
"community|acva:Arabic_Architecture|0": 0,
"community|acva:Arabic_Art|0": 0,
"community|acva:Arabic_Astronomy|0": 0,
"community|acva:Arabic_Calligraphy|0": 0,
"community|acva:Arabic_Ceremony|0": 0,
"community|acva:Arabic_Clothing|0": 0,
"community|acva:Arabic_Culture|0": 0,
"community|acva:Arabic_Food|0": 0,
"community|acva:Arabic_Funeral|0": 0,
"community|acva:Arabic_Geography|0": 0,
"community|acva:Arabic_History|0": 0,
"community|acva:Arabic_Language_Origin|0": 0,
"community|acva:Arabic_Literature|0": 0,
"community|acva:Arabic_Math|0": 0,
"community|acva:Arabic_Medicine|0": 0,
"community|acva:Arabic_Music|0": 0,
"community|acva:Arabic_Ornament|0": 0,
"community|acva:Arabic_Philosophy|0": 0,
"community|acva:Arabic_Physics_and_Chemistry|0": 0,
"community|acva:Arabic_Wedding|0": 0,
"community|acva:Bahrain|0": 0,
"community|acva:Comoros|0": 0,
"community|acva:Egypt_modern|0": 0,
"community|acva:InfluenceFromAncientEgypt|0": 0,
"community|acva:InfluenceFromByzantium|0": 0,
"community|acva:InfluenceFromChina|0": 0,
"community|acva:InfluenceFromGreece|0": 0,
"community|acva:InfluenceFromIslam|0": 0,
"community|acva:InfluenceFromPersia|0": 0,
"community|acva:InfluenceFromRome|0": 0,
"community|acva:Iraq|0": 0,
"community|acva:Islam_Education|0": 0,
"community|acva:Islam_branches_and_schools|0": 0,
"community|acva:Islamic_law_system|0": 0,
"community|acva:Jordan|0": 0,
"community|acva:Kuwait|0": 0,
"community|acva:Lebanon|0": 0,
"community|acva:Libya|0": 0,
"community|acva:Mauritania|0": 0,
"community|acva:Mesopotamia_civilization|0": 0,
"community|acva:Morocco|0": 0,
"community|acva:Oman|0": 0,
"community|acva:Palestine|0": 0,
"community|acva:Qatar|0": 0,
"community|acva:Saudi_Arabia|0": 0,
"community|acva:Somalia|0": 0,
"community|acva:Sudan|0": 0,
"community|acva:Syria|0": 0,
"community|acva:Tunisia|0": 0,
"community|acva:United_Arab_Emirates|0": 0,
"community|acva:Yemen|0": 0,
"community|acva:communication|0": 0,
"community|acva:computer_and_phone|0": 0,
"community|acva:daily_life|0": 0,
"community|acva:entertainment|0": 0,
"community|alghafa:mcq_exams_test_ar|0": 0,
"community|alghafa:meta_ar_dialects|0": 0,
"community|alghafa:meta_ar_msa|0": 0,
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0,
"community|alghafa:multiple_choice_sentiment_task|0": 0,
"community|arabic_exams|0": 0,
"community|arabic_mmlu:abstract_algebra|0": 0,
"community|arabic_mmlu:anatomy|0": 0,
"community|arabic_mmlu:astronomy|0": 0,
"community|arabic_mmlu:business_ethics|0": 0,
"community|arabic_mmlu:clinical_knowledge|0": 0,
"community|arabic_mmlu:college_biology|0": 0,
"community|arabic_mmlu:college_chemistry|0": 0,
"community|arabic_mmlu:college_computer_science|0": 0,
"community|arabic_mmlu:college_mathematics|0": 0,
"community|arabic_mmlu:college_medicine|0": 0,
"community|arabic_mmlu:college_physics|0": 0,
"community|arabic_mmlu:computer_security|0": 0,
"community|arabic_mmlu:conceptual_physics|0": 0,
"community|arabic_mmlu:econometrics|0": 0,
"community|arabic_mmlu:electrical_engineering|0": 0,
"community|arabic_mmlu:elementary_mathematics|0": 0,
"community|arabic_mmlu:formal_logic|0": 0,
"community|arabic_mmlu:global_facts|0": 0,
"community|arabic_mmlu:high_school_biology|0": 0,
"community|arabic_mmlu:high_school_chemistry|0": 0,
"community|arabic_mmlu:high_school_computer_science|0": 0,
"community|arabic_mmlu:high_school_european_history|0": 0,
"community|arabic_mmlu:high_school_geography|0": 0,
"community|arabic_mmlu:high_school_government_and_politics|0": 0,
"community|arabic_mmlu:high_school_macroeconomics|0": 0,
"community|arabic_mmlu:high_school_mathematics|0": 0,
"community|arabic_mmlu:high_school_microeconomics|0": 0,
"community|arabic_mmlu:high_school_physics|0": 0,
"community|arabic_mmlu:high_school_psychology|0": 0,
"community|arabic_mmlu:high_school_statistics|0": 0,
"community|arabic_mmlu:high_school_us_history|0": 0,
"community|arabic_mmlu:high_school_world_history|0": 0,
"community|arabic_mmlu:human_aging|0": 0,
"community|arabic_mmlu:human_sexuality|0": 0,
"community|arabic_mmlu:international_law|0": 0,
"community|arabic_mmlu:jurisprudence|0": 0,
"community|arabic_mmlu:logical_fallacies|0": 0,
"community|arabic_mmlu:machine_learning|0": 0,
"community|arabic_mmlu:management|0": 0,
"community|arabic_mmlu:marketing|0": 0,
"community|arabic_mmlu:medical_genetics|0": 0,
"community|arabic_mmlu:miscellaneous|0": 0,
"community|arabic_mmlu:moral_disputes|0": 0,
"community|arabic_mmlu:moral_scenarios|0": 0,
"community|arabic_mmlu:nutrition|0": 0,
"community|arabic_mmlu:philosophy|0": 0,
"community|arabic_mmlu:prehistory|0": 0,
"community|arabic_mmlu:professional_accounting|0": 0,
"community|arabic_mmlu:professional_law|0": 0,
"community|arabic_mmlu:professional_medicine|0": 0,
"community|arabic_mmlu:professional_psychology|0": 0,
"community|arabic_mmlu:public_relations|0": 0,
"community|arabic_mmlu:security_studies|0": 0,
"community|arabic_mmlu:sociology|0": 0,
"community|arabic_mmlu:us_foreign_policy|0": 0,
"community|arabic_mmlu:virology|0": 0,
"community|arabic_mmlu:world_religions|0": 0,
"community|arc_challenge_okapi_ar|0": 0,
"community|arc_easy_ar|0": 0,
"community|boolq_ar|0": 0,
"community|copa_ext_ar|0": 0,
"community|hellaswag_okapi_ar|0": 0,
"community|openbook_qa_ext_ar|0": 0,
"community|piqa_ar|0": 0,
"community|race_ar|0": 0,
"community|sciq_ar|0": 0,
"community|toxigen_ar|0": 0,
"lighteval|xstory_cloze:ar|0": 0
},
"config_tasks": {
"community|acva:Algeria": {
"name": "acva:Algeria",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Algeria",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Ancient_Egypt": {
"name": "acva:Ancient_Egypt",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Ancient_Egypt",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 315,
"effective_num_docs": 315,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arab_Empire": {
"name": "acva:Arab_Empire",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arab_Empire",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Architecture": {
"name": "acva:Arabic_Architecture",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Architecture",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Art": {
"name": "acva:Arabic_Art",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Art",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Astronomy": {
"name": "acva:Arabic_Astronomy",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Astronomy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Calligraphy": {
"name": "acva:Arabic_Calligraphy",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Calligraphy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 255,
"effective_num_docs": 255,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Ceremony": {
"name": "acva:Arabic_Ceremony",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Ceremony",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 185,
"effective_num_docs": 185,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Clothing": {
"name": "acva:Arabic_Clothing",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Clothing",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Culture": {
"name": "acva:Arabic_Culture",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Culture",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Food": {
"name": "acva:Arabic_Food",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Food",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Funeral": {
"name": "acva:Arabic_Funeral",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Funeral",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 95,
"effective_num_docs": 95,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Geography": {
"name": "acva:Arabic_Geography",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Geography",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_History": {
"name": "acva:Arabic_History",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_History",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Language_Origin": {
"name": "acva:Arabic_Language_Origin",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Language_Origin",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 95,
"effective_num_docs": 95,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Literature": {
"name": "acva:Arabic_Literature",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Literature",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Math": {
"name": "acva:Arabic_Math",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Math",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Medicine": {
"name": "acva:Arabic_Medicine",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Medicine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Music": {
"name": "acva:Arabic_Music",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Music",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 139,
"effective_num_docs": 139,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Ornament": {
"name": "acva:Arabic_Ornament",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Ornament",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Philosophy": {
"name": "acva:Arabic_Philosophy",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Philosophy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Physics_and_Chemistry": {
"name": "acva:Arabic_Physics_and_Chemistry",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Physics_and_Chemistry",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Wedding": {
"name": "acva:Arabic_Wedding",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Wedding",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Bahrain": {
"name": "acva:Bahrain",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Bahrain",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Comoros": {
"name": "acva:Comoros",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Comoros",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Egypt_modern": {
"name": "acva:Egypt_modern",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Egypt_modern",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 95,
"effective_num_docs": 95,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromAncientEgypt": {
"name": "acva:InfluenceFromAncientEgypt",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromAncientEgypt",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromByzantium": {
"name": "acva:InfluenceFromByzantium",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromByzantium",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromChina": {
"name": "acva:InfluenceFromChina",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromChina",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromGreece": {
"name": "acva:InfluenceFromGreece",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromGreece",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromIslam": {
"name": "acva:InfluenceFromIslam",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromIslam",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromPersia": {
"name": "acva:InfluenceFromPersia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromPersia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 175,
"effective_num_docs": 175,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromRome": {
"name": "acva:InfluenceFromRome",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromRome",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Iraq": {
"name": "acva:Iraq",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Iraq",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 85,
"effective_num_docs": 85,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Islam_Education": {
"name": "acva:Islam_Education",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Islam_Education",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Islam_branches_and_schools": {
"name": "acva:Islam_branches_and_schools",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Islam_branches_and_schools",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 175,
"effective_num_docs": 175,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Islamic_law_system": {
"name": "acva:Islamic_law_system",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Islamic_law_system",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Jordan": {
"name": "acva:Jordan",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Jordan",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Kuwait": {
"name": "acva:Kuwait",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Kuwait",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Lebanon": {
"name": "acva:Lebanon",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Lebanon",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Libya": {
"name": "acva:Libya",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Libya",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Mauritania": {
"name": "acva:Mauritania",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Mauritania",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Mesopotamia_civilization": {
"name": "acva:Mesopotamia_civilization",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Mesopotamia_civilization",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 155,
"effective_num_docs": 155,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Morocco": {
"name": "acva:Morocco",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Morocco",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Oman": {
"name": "acva:Oman",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Oman",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Palestine": {
"name": "acva:Palestine",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Palestine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 85,
"effective_num_docs": 85,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Qatar": {
"name": "acva:Qatar",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Qatar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Saudi_Arabia": {
"name": "acva:Saudi_Arabia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Saudi_Arabia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Somalia": {
"name": "acva:Somalia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Somalia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Sudan": {
"name": "acva:Sudan",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Sudan",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Syria": {
"name": "acva:Syria",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Syria",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Tunisia": {
"name": "acva:Tunisia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Tunisia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:United_Arab_Emirates": {
"name": "acva:United_Arab_Emirates",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "United_Arab_Emirates",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 85,
"effective_num_docs": 85,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Yemen": {
"name": "acva:Yemen",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Yemen",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 10,
"effective_num_docs": 10,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:communication": {
"name": "acva:communication",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "communication",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 364,
"effective_num_docs": 364,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:computer_and_phone": {
"name": "acva:computer_and_phone",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "computer_and_phone",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 295,
"effective_num_docs": 295,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:daily_life": {
"name": "acva:daily_life",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "daily_life",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 337,
"effective_num_docs": 337,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:entertainment": {
"name": "acva:entertainment",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "entertainment",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 295,
"effective_num_docs": 295,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:mcq_exams_test_ar": {
"name": "alghafa:mcq_exams_test_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "mcq_exams_test_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 557,
"effective_num_docs": 557,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:meta_ar_dialects": {
"name": "alghafa:meta_ar_dialects",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_dialects",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 5395,
"effective_num_docs": 5395,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:meta_ar_msa": {
"name": "alghafa:meta_ar_msa",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_msa",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": {
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_facts_truefalse_balanced_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 75,
"effective_num_docs": 75,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task": {
"name": "alghafa:multiple_choice_grounded_statement_soqal_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_soqal_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": {
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": {
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 7995,
"effective_num_docs": 7995,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task": {
"name": "alghafa:multiple_choice_rating_sentiment_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 5995,
"effective_num_docs": 5995,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_sentiment_task": {
"name": "alghafa:multiple_choice_sentiment_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_sentiment_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1720,
"effective_num_docs": 1720,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_exams": {
"name": "arabic_exams",
"prompt_function": "arabic_exams",
"hf_repo": "OALL/Arabic_EXAMS",
"hf_subset": "default",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 537,
"effective_num_docs": 537,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:abstract_algebra": {
"name": "arabic_mmlu:abstract_algebra",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "abstract_algebra",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:anatomy": {
"name": "arabic_mmlu:anatomy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "anatomy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 135,
"effective_num_docs": 135,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:astronomy": {
"name": "arabic_mmlu:astronomy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "astronomy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 152,
"effective_num_docs": 152,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:business_ethics": {
"name": "arabic_mmlu:business_ethics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "business_ethics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:clinical_knowledge": {
"name": "arabic_mmlu:clinical_knowledge",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "clinical_knowledge",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_biology": {
"name": "arabic_mmlu:college_biology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_biology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 144,
"effective_num_docs": 144,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_chemistry": {
"name": "arabic_mmlu:college_chemistry",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_chemistry",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_computer_science": {
"name": "arabic_mmlu:college_computer_science",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_computer_science",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_mathematics": {
"name": "arabic_mmlu:college_mathematics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_mathematics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_medicine": {
"name": "arabic_mmlu:college_medicine",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_medicine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 173,
"effective_num_docs": 173,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_physics": {
"name": "arabic_mmlu:college_physics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_physics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:computer_security": {
"name": "arabic_mmlu:computer_security",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "computer_security",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:conceptual_physics": {
"name": "arabic_mmlu:conceptual_physics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "conceptual_physics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 235,
"effective_num_docs": 235,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:econometrics": {
"name": "arabic_mmlu:econometrics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "econometrics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 114,
"effective_num_docs": 114,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:electrical_engineering": {
"name": "arabic_mmlu:electrical_engineering",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "electrical_engineering",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:elementary_mathematics": {
"name": "arabic_mmlu:elementary_mathematics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "elementary_mathematics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 378,
"effective_num_docs": 378,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:formal_logic": {
"name": "arabic_mmlu:formal_logic",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "formal_logic",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 126,
"effective_num_docs": 126,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:global_facts": {
"name": "arabic_mmlu:global_facts",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "global_facts",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_biology": {
"name": "arabic_mmlu:high_school_biology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_biology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 310,
"effective_num_docs": 310,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_chemistry": {
"name": "arabic_mmlu:high_school_chemistry",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_chemistry",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 203,
"effective_num_docs": 203,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_computer_science": {
"name": "arabic_mmlu:high_school_computer_science",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_computer_science",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_european_history": {
"name": "arabic_mmlu:high_school_european_history",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_european_history",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 165,
"effective_num_docs": 165,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_geography": {
"name": "arabic_mmlu:high_school_geography",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_geography",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 198,
"effective_num_docs": 198,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_government_and_politics": {
"name": "arabic_mmlu:high_school_government_and_politics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_government_and_politics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 193,
"effective_num_docs": 193,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_macroeconomics": {
"name": "arabic_mmlu:high_school_macroeconomics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_macroeconomics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 390,
"effective_num_docs": 390,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_mathematics": {
"name": "arabic_mmlu:high_school_mathematics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_mathematics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 270,
"effective_num_docs": 270,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_microeconomics": {
"name": "arabic_mmlu:high_school_microeconomics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_microeconomics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_physics": {
"name": "arabic_mmlu:high_school_physics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_physics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 151,
"effective_num_docs": 151,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_psychology": {
"name": "arabic_mmlu:high_school_psychology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_psychology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 545,
"effective_num_docs": 545,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_statistics": {
"name": "arabic_mmlu:high_school_statistics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_statistics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 216,
"effective_num_docs": 216,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_us_history": {
"name": "arabic_mmlu:high_school_us_history",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_us_history",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 204,
"effective_num_docs": 204,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_world_history": {
"name": "arabic_mmlu:high_school_world_history",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_world_history",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 237,
"effective_num_docs": 237,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:human_aging": {
"name": "arabic_mmlu:human_aging",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "human_aging",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 223,
"effective_num_docs": 223,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:human_sexuality": {
"name": "arabic_mmlu:human_sexuality",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "human_sexuality",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 131,
"effective_num_docs": 131,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:international_law": {
"name": "arabic_mmlu:international_law",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "international_law",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 121,
"effective_num_docs": 121,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:jurisprudence": {
"name": "arabic_mmlu:jurisprudence",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "jurisprudence",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 108,
"effective_num_docs": 108,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:logical_fallacies": {
"name": "arabic_mmlu:logical_fallacies",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "logical_fallacies",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 163,
"effective_num_docs": 163,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:machine_learning": {
"name": "arabic_mmlu:machine_learning",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "machine_learning",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 112,
"effective_num_docs": 112,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:management": {
"name": "arabic_mmlu:management",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "management",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 103,
"effective_num_docs": 103,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:marketing": {
"name": "arabic_mmlu:marketing",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "marketing",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 234,
"effective_num_docs": 234,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:medical_genetics": {
"name": "arabic_mmlu:medical_genetics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "medical_genetics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:miscellaneous": {
"name": "arabic_mmlu:miscellaneous",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "miscellaneous",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 783,
"effective_num_docs": 783,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:moral_disputes": {
"name": "arabic_mmlu:moral_disputes",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "moral_disputes",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 346,
"effective_num_docs": 346,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:moral_scenarios": {
"name": "arabic_mmlu:moral_scenarios",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "moral_scenarios",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:nutrition": {
"name": "arabic_mmlu:nutrition",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "nutrition",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 306,
"effective_num_docs": 306,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:philosophy": {
"name": "arabic_mmlu:philosophy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "philosophy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 311,
"effective_num_docs": 311,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:prehistory": {
"name": "arabic_mmlu:prehistory",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "prehistory",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 324,
"effective_num_docs": 324,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_accounting": {
"name": "arabic_mmlu:professional_accounting",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_accounting",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 282,
"effective_num_docs": 282,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_law": {
"name": "arabic_mmlu:professional_law",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_law",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1534,
"effective_num_docs": 1534,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_medicine": {
"name": "arabic_mmlu:professional_medicine",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_medicine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 272,
"effective_num_docs": 272,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_psychology": {
"name": "arabic_mmlu:professional_psychology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_psychology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:public_relations": {
"name": "arabic_mmlu:public_relations",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "public_relations",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 110,
"effective_num_docs": 110,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:security_studies": {
"name": "arabic_mmlu:security_studies",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "security_studies",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 245,
"effective_num_docs": 245,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:sociology": {
"name": "arabic_mmlu:sociology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "sociology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 201,
"effective_num_docs": 201,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:us_foreign_policy": {
"name": "arabic_mmlu:us_foreign_policy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "us_foreign_policy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:virology": {
"name": "arabic_mmlu:virology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "virology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 166,
"effective_num_docs": 166,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:world_religions": {
"name": "arabic_mmlu:world_religions",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "world_religions",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 171,
"effective_num_docs": 171,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arc_challenge_okapi_ar": {
"name": "arc_challenge_okapi_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "arc_challenge_okapi_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1160,
"effective_num_docs": 1160,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arc_easy_ar": {
"name": "arc_easy_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "arc_easy_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 2364,
"effective_num_docs": 2364,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|boolq_ar": {
"name": "boolq_ar",
"prompt_function": "boolq_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "boolq_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 3260,
"effective_num_docs": 3260,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|copa_ext_ar": {
"name": "copa_ext_ar",
"prompt_function": "copa_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "copa_ext_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 90,
"effective_num_docs": 90,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|hellaswag_okapi_ar": {
"name": "hellaswag_okapi_ar",
"prompt_function": "hellaswag_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "hellaswag_okapi_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 9171,
"effective_num_docs": 9171,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|openbook_qa_ext_ar": {
"name": "openbook_qa_ext_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "openbook_qa_ext_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 495,
"effective_num_docs": 495,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|piqa_ar": {
"name": "piqa_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "piqa_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1833,
"effective_num_docs": 1833,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|race_ar": {
"name": "race_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "race_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 4929,
"effective_num_docs": 4929,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|sciq_ar": {
"name": "sciq_ar",
"prompt_function": "sciq_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "sciq_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 995,
"effective_num_docs": 995,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|toxigen_ar": {
"name": "toxigen_ar",
"prompt_function": "toxigen_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "toxigen_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 935,
"effective_num_docs": 935,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"lighteval|xstory_cloze:ar": {
"name": "xstory_cloze:ar",
"prompt_function": "storycloze",
"hf_repo": "juletxara/xstory_cloze",
"hf_subset": "ar",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"training",
"eval"
],
"evaluation_splits": [
"eval"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 1511,
"effective_num_docs": 1511,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
}
},
"summary_tasks": {
"community|acva:Algeria|0": {
"hashes": {
"hash_examples": "da5a3003cd46f6f9",
"hash_full_prompts": "da5a3003cd46f6f9",
"hash_input_tokens": "1e9313f85fa3be1a",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Ancient_Egypt|0": {
"hashes": {
"hash_examples": "52d6f767fede195b",
"hash_full_prompts": "52d6f767fede195b",
"hash_input_tokens": "e77d904627992cb3",
"hash_cont_tokens": "5f7d1751e6ad9399"
},
"truncated": 0,
"non_truncated": 315,
"padded": 630,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arab_Empire|0": {
"hashes": {
"hash_examples": "8dacff6a79804a75",
"hash_full_prompts": "8dacff6a79804a75",
"hash_input_tokens": "d3dc0b8f656b3ef0",
"hash_cont_tokens": "8783a9653c6992cc"
},
"truncated": 0,
"non_truncated": 265,
"padded": 530,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Architecture|0": {
"hashes": {
"hash_examples": "df286cd862d9f6bb",
"hash_full_prompts": "df286cd862d9f6bb",
"hash_input_tokens": "ab9348c5a6ab6a53",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Art|0": {
"hashes": {
"hash_examples": "112883d764118a49",
"hash_full_prompts": "112883d764118a49",
"hash_input_tokens": "5213dd867fd6591c",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Astronomy|0": {
"hashes": {
"hash_examples": "20dcdf2454bf8671",
"hash_full_prompts": "20dcdf2454bf8671",
"hash_input_tokens": "97c1a8aa5ff86d6a",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Calligraphy|0": {
"hashes": {
"hash_examples": "3a9f9d1ebe868a15",
"hash_full_prompts": "3a9f9d1ebe868a15",
"hash_input_tokens": "ebf2b5fdf79a8a3e",
"hash_cont_tokens": "1e63d5b9bb8d45b9"
},
"truncated": 0,
"non_truncated": 255,
"padded": 510,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Ceremony|0": {
"hashes": {
"hash_examples": "c927630f8d2f44da",
"hash_full_prompts": "c927630f8d2f44da",
"hash_input_tokens": "76e30350059d0d16",
"hash_cont_tokens": "587187a4b9ec6b9f"
},
"truncated": 0,
"non_truncated": 185,
"padded": 370,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Clothing|0": {
"hashes": {
"hash_examples": "6ad0740c2ac6ac92",
"hash_full_prompts": "6ad0740c2ac6ac92",
"hash_input_tokens": "acf01813ffb586a9",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Culture|0": {
"hashes": {
"hash_examples": "2177bd857ad872ae",
"hash_full_prompts": "2177bd857ad872ae",
"hash_input_tokens": "5c166c6091c03e3a",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Food|0": {
"hashes": {
"hash_examples": "a6ada65b71d7c9c5",
"hash_full_prompts": "a6ada65b71d7c9c5",
"hash_input_tokens": "a1f462e3e7b9dcb6",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Funeral|0": {
"hashes": {
"hash_examples": "fcee39dc29eaae91",
"hash_full_prompts": "fcee39dc29eaae91",
"hash_input_tokens": "c48f6967d410e4d4",
"hash_cont_tokens": "a53062899e4fc8e9"
},
"truncated": 0,
"non_truncated": 95,
"padded": 190,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Geography|0": {
"hashes": {
"hash_examples": "d36eda7c89231c02",
"hash_full_prompts": "d36eda7c89231c02",
"hash_input_tokens": "a3a2ba37d9542f66",
"hash_cont_tokens": "c15aaf8e70b82ff0"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_History|0": {
"hashes": {
"hash_examples": "6354ac0d6db6a5fc",
"hash_full_prompts": "6354ac0d6db6a5fc",
"hash_input_tokens": "a6750b67e2603cbe",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Language_Origin|0": {
"hashes": {
"hash_examples": "ddc967c8aca34402",
"hash_full_prompts": "ddc967c8aca34402",
"hash_input_tokens": "36ff201216bec29e",
"hash_cont_tokens": "a53062899e4fc8e9"
},
"truncated": 0,
"non_truncated": 95,
"padded": 190,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Literature|0": {
"hashes": {
"hash_examples": "4305379fd46be5d8",
"hash_full_prompts": "4305379fd46be5d8",
"hash_input_tokens": "748ed6e0eac54dba",
"hash_cont_tokens": "c15aaf8e70b82ff0"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Math|0": {
"hashes": {
"hash_examples": "dec621144f4d28be",
"hash_full_prompts": "dec621144f4d28be",
"hash_input_tokens": "ddd9d116cd92508c",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Medicine|0": {
"hashes": {
"hash_examples": "2b344cdae9495ff2",
"hash_full_prompts": "2b344cdae9495ff2",
"hash_input_tokens": "be848aaebf7adfed",
"hash_cont_tokens": "c15aaf8e70b82ff0"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Music|0": {
"hashes": {
"hash_examples": "0c54624d881944ce",
"hash_full_prompts": "0c54624d881944ce",
"hash_input_tokens": "6cb0fc091c241cbc",
"hash_cont_tokens": "1859e6278efcea1b"
},
"truncated": 0,
"non_truncated": 139,
"padded": 278,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Ornament|0": {
"hashes": {
"hash_examples": "251a4a84289d8bc1",
"hash_full_prompts": "251a4a84289d8bc1",
"hash_input_tokens": "671ba465f86f4d22",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Philosophy|0": {
"hashes": {
"hash_examples": "3f86fb9c94c13d22",
"hash_full_prompts": "3f86fb9c94c13d22",
"hash_input_tokens": "ef28c5fb6cd83459",
"hash_cont_tokens": "c15aaf8e70b82ff0"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Physics_and_Chemistry|0": {
"hashes": {
"hash_examples": "8fec65af3695b62a",
"hash_full_prompts": "8fec65af3695b62a",
"hash_input_tokens": "617b8805dafb5a40",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Wedding|0": {
"hashes": {
"hash_examples": "9cc3477184d7a4b8",
"hash_full_prompts": "9cc3477184d7a4b8",
"hash_input_tokens": "485b44f043385d55",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Bahrain|0": {
"hashes": {
"hash_examples": "c92e803a0fa8b9e2",
"hash_full_prompts": "c92e803a0fa8b9e2",
"hash_input_tokens": "0914a02dbd7734c4",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Comoros|0": {
"hashes": {
"hash_examples": "06e5d4bba8e54cae",
"hash_full_prompts": "06e5d4bba8e54cae",
"hash_input_tokens": "c6f848fa5ed3fa4c",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Egypt_modern|0": {
"hashes": {
"hash_examples": "c6ec369164f93446",
"hash_full_prompts": "c6ec369164f93446",
"hash_input_tokens": "05b74bd196752b3a",
"hash_cont_tokens": "a53062899e4fc8e9"
},
"truncated": 0,
"non_truncated": 95,
"padded": 190,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromAncientEgypt|0": {
"hashes": {
"hash_examples": "b9d56d74818b9bd4",
"hash_full_prompts": "b9d56d74818b9bd4",
"hash_input_tokens": "ae335753f1884308",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromByzantium|0": {
"hashes": {
"hash_examples": "5316c9624e7e59b8",
"hash_full_prompts": "5316c9624e7e59b8",
"hash_input_tokens": "46cd915bbd4ecfc1",
"hash_cont_tokens": "c15aaf8e70b82ff0"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromChina|0": {
"hashes": {
"hash_examples": "87894bce95a56411",
"hash_full_prompts": "87894bce95a56411",
"hash_input_tokens": "649581dd25f4275c",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromGreece|0": {
"hashes": {
"hash_examples": "0baa78a27e469312",
"hash_full_prompts": "0baa78a27e469312",
"hash_input_tokens": "a58b8668338c5912",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromIslam|0": {
"hashes": {
"hash_examples": "0c2532cde6541ff2",
"hash_full_prompts": "0c2532cde6541ff2",
"hash_input_tokens": "42dfe7eed8a2941a",
"hash_cont_tokens": "c15aaf8e70b82ff0"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromPersia|0": {
"hashes": {
"hash_examples": "efcd8112dc53c6e5",
"hash_full_prompts": "efcd8112dc53c6e5",
"hash_input_tokens": "35119b4a5e95546e",
"hash_cont_tokens": "7305f8930f27cd4e"
},
"truncated": 0,
"non_truncated": 175,
"padded": 350,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromRome|0": {
"hashes": {
"hash_examples": "9db61480e2e85fd3",
"hash_full_prompts": "9db61480e2e85fd3",
"hash_input_tokens": "22dd59ec50bc7f55",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Iraq|0": {
"hashes": {
"hash_examples": "96dac3dfa8d2f41f",
"hash_full_prompts": "96dac3dfa8d2f41f",
"hash_input_tokens": "15c6c630ce78a43a",
"hash_cont_tokens": "79b22c57a64b25bf"
},
"truncated": 0,
"non_truncated": 85,
"padded": 170,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Islam_Education|0": {
"hashes": {
"hash_examples": "0d80355f6a4cb51b",
"hash_full_prompts": "0d80355f6a4cb51b",
"hash_input_tokens": "259d6850e62b92ae",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Islam_branches_and_schools|0": {
"hashes": {
"hash_examples": "5cedce1be2c3ad50",
"hash_full_prompts": "5cedce1be2c3ad50",
"hash_input_tokens": "6370ae55f5de219d",
"hash_cont_tokens": "7305f8930f27cd4e"
},
"truncated": 0,
"non_truncated": 175,
"padded": 350,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Islamic_law_system|0": {
"hashes": {
"hash_examples": "c0e6db8bc84e105e",
"hash_full_prompts": "c0e6db8bc84e105e",
"hash_input_tokens": "13c2d96b0c9a09fd",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Jordan|0": {
"hashes": {
"hash_examples": "33deb5b4e5ddd6a1",
"hash_full_prompts": "33deb5b4e5ddd6a1",
"hash_input_tokens": "765bafe590d38a1e",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Kuwait|0": {
"hashes": {
"hash_examples": "eb41773346d7c46c",
"hash_full_prompts": "eb41773346d7c46c",
"hash_input_tokens": "a2849f5449b8c8e6",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Lebanon|0": {
"hashes": {
"hash_examples": "25932dbf4c13d34f",
"hash_full_prompts": "25932dbf4c13d34f",
"hash_input_tokens": "5eababbc02a72011",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Libya|0": {
"hashes": {
"hash_examples": "f2c4db63cd402926",
"hash_full_prompts": "f2c4db63cd402926",
"hash_input_tokens": "a0a61a2934aa8588",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Mauritania|0": {
"hashes": {
"hash_examples": "8723ab5fdf286b54",
"hash_full_prompts": "8723ab5fdf286b54",
"hash_input_tokens": "3305077e89c73cc1",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Mesopotamia_civilization|0": {
"hashes": {
"hash_examples": "c33f5502a6130ca9",
"hash_full_prompts": "c33f5502a6130ca9",
"hash_input_tokens": "fba9bf53a39a5e66",
"hash_cont_tokens": "d6ed79461fc11fb7"
},
"truncated": 0,
"non_truncated": 155,
"padded": 310,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Morocco|0": {
"hashes": {
"hash_examples": "588a5ed27904b1ae",
"hash_full_prompts": "588a5ed27904b1ae",
"hash_input_tokens": "f2d07a53c01ea75b",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Oman|0": {
"hashes": {
"hash_examples": "d447c52b94248b69",
"hash_full_prompts": "d447c52b94248b69",
"hash_input_tokens": "abd5d013d4827423",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Palestine|0": {
"hashes": {
"hash_examples": "19197e076ad14ff5",
"hash_full_prompts": "19197e076ad14ff5",
"hash_input_tokens": "f63c9ceb34df7df3",
"hash_cont_tokens": "79b22c57a64b25bf"
},
"truncated": 0,
"non_truncated": 85,
"padded": 170,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Qatar|0": {
"hashes": {
"hash_examples": "cf0736fa185b28f6",
"hash_full_prompts": "cf0736fa185b28f6",
"hash_input_tokens": "e77f47d9cdef193f",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Saudi_Arabia|0": {
"hashes": {
"hash_examples": "69beda6e1b85a08d",
"hash_full_prompts": "69beda6e1b85a08d",
"hash_input_tokens": "0e86a22ba434d16f",
"hash_cont_tokens": "b3ed374c07f6a1ba"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Somalia|0": {
"hashes": {
"hash_examples": "b387940c65784fbf",
"hash_full_prompts": "b387940c65784fbf",
"hash_input_tokens": "0a457ba31a06b646",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Sudan|0": {
"hashes": {
"hash_examples": "e02c32b9d2dd0c3f",
"hash_full_prompts": "e02c32b9d2dd0c3f",
"hash_input_tokens": "7ef35b1866aefeb8",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Syria|0": {
"hashes": {
"hash_examples": "60a6f8fe73bda4bb",
"hash_full_prompts": "60a6f8fe73bda4bb",
"hash_input_tokens": "14612d92408b6f53",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Tunisia|0": {
"hashes": {
"hash_examples": "34bb15d3830c5649",
"hash_full_prompts": "34bb15d3830c5649",
"hash_input_tokens": "94e1e6412e958f1e",
"hash_cont_tokens": "e32fa30311c5caac"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:United_Arab_Emirates|0": {
"hashes": {
"hash_examples": "98a0ba78172718ce",
"hash_full_prompts": "98a0ba78172718ce",
"hash_input_tokens": "39ced768789209e9",
"hash_cont_tokens": "79b22c57a64b25bf"
},
"truncated": 0,
"non_truncated": 85,
"padded": 170,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Yemen|0": {
"hashes": {
"hash_examples": "18e9bcccbb4ced7a",
"hash_full_prompts": "18e9bcccbb4ced7a",
"hash_input_tokens": "e1055179539d5df9",
"hash_cont_tokens": "546600309db314db"
},
"truncated": 0,
"non_truncated": 10,
"padded": 20,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:communication|0": {
"hashes": {
"hash_examples": "9ff28ab5eab5c97b",
"hash_full_prompts": "9ff28ab5eab5c97b",
"hash_input_tokens": "333e0ea5f34f6953",
"hash_cont_tokens": "922348efeac48904"
},
"truncated": 0,
"non_truncated": 364,
"padded": 728,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:computer_and_phone|0": {
"hashes": {
"hash_examples": "37bac2f086aaf6c2",
"hash_full_prompts": "37bac2f086aaf6c2",
"hash_input_tokens": "ddc82e9d58e65edb",
"hash_cont_tokens": "21c566d247b85282"
},
"truncated": 0,
"non_truncated": 295,
"padded": 590,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:daily_life|0": {
"hashes": {
"hash_examples": "bf07363c1c252e2f",
"hash_full_prompts": "bf07363c1c252e2f",
"hash_input_tokens": "2e8ed14f3286ff87",
"hash_cont_tokens": "35a4aa65a4889d29"
},
"truncated": 0,
"non_truncated": 337,
"padded": 674,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:entertainment|0": {
"hashes": {
"hash_examples": "37077bc00f0ac56a",
"hash_full_prompts": "37077bc00f0ac56a",
"hash_input_tokens": "276aaa57b4751350",
"hash_cont_tokens": "21c566d247b85282"
},
"truncated": 0,
"non_truncated": 295,
"padded": 590,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:mcq_exams_test_ar|0": {
"hashes": {
"hash_examples": "c07a5e78c5c0b8fe",
"hash_full_prompts": "c07a5e78c5c0b8fe",
"hash_input_tokens": "de47b28d9ee76872",
"hash_cont_tokens": "d625c55cecf56c98"
},
"truncated": 0,
"non_truncated": 557,
"padded": 2228,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_dialects|0": {
"hashes": {
"hash_examples": "c0b6081f83e14064",
"hash_full_prompts": "c0b6081f83e14064",
"hash_input_tokens": "66cc392b1fa43b8f",
"hash_cont_tokens": "dc457b275d6de4e2"
},
"truncated": 0,
"non_truncated": 5395,
"padded": 21580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_msa|0": {
"hashes": {
"hash_examples": "64eb78a7c5b7484b",
"hash_full_prompts": "64eb78a7c5b7484b",
"hash_input_tokens": "ba5efa6a176a6ac8",
"hash_cont_tokens": "046b55645def694d"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"hashes": {
"hash_examples": "54fc3502c1c02c06",
"hash_full_prompts": "54fc3502c1c02c06",
"hash_input_tokens": "26cd6b68c5d6db98",
"hash_cont_tokens": "21a564f3caaf138d"
},
"truncated": 0,
"non_truncated": 75,
"padded": 150,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"hashes": {
"hash_examples": "46572d83696552ae",
"hash_full_prompts": "46572d83696552ae",
"hash_input_tokens": "9e81a3f0362312da",
"hash_cont_tokens": "fb98c023b6c1db18"
},
"truncated": 0,
"non_truncated": 150,
"padded": 750,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"hashes": {
"hash_examples": "f430d97ff715bc1c",
"hash_full_prompts": "f430d97ff715bc1c",
"hash_input_tokens": "dbb7094dc20a2217",
"hash_cont_tokens": "0f0bc76437c61af7"
},
"truncated": 0,
"non_truncated": 150,
"padded": 750,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"hashes": {
"hash_examples": "6b70a7416584f98c",
"hash_full_prompts": "6b70a7416584f98c",
"hash_input_tokens": "a6bd3f145f15ac4e",
"hash_cont_tokens": "b0cc8cf11c102265"
},
"truncated": 0,
"non_truncated": 7995,
"padded": 15990,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"hashes": {
"hash_examples": "bc2005cc9d2f436e",
"hash_full_prompts": "bc2005cc9d2f436e",
"hash_input_tokens": "edb6a6b8d9abe8fb",
"hash_cont_tokens": "7c4c9000eafebe43"
},
"truncated": 0,
"non_truncated": 5995,
"padded": 17985,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"hashes": {
"hash_examples": "6fb0e254ea5945d8",
"hash_full_prompts": "6fb0e254ea5945d8",
"hash_input_tokens": "7bc69c3b355b2e42",
"hash_cont_tokens": "02aed8bf71d25cfb"
},
"truncated": 0,
"non_truncated": 1720,
"padded": 5160,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_exams|0": {
"hashes": {
"hash_examples": "6d721df351722656",
"hash_full_prompts": "6d721df351722656",
"hash_input_tokens": "119af2b205ecdab0",
"hash_cont_tokens": "75d35d1d02a02179"
},
"truncated": 0,
"non_truncated": 537,
"padded": 2148,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:abstract_algebra|0": {
"hashes": {
"hash_examples": "f2ddca8f45c0a511",
"hash_full_prompts": "f2ddca8f45c0a511",
"hash_input_tokens": "3c79e2e3cd88e82a",
"hash_cont_tokens": "00a223315c15a9ce"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:anatomy|0": {
"hashes": {
"hash_examples": "dfdbc1b83107668d",
"hash_full_prompts": "dfdbc1b83107668d",
"hash_input_tokens": "dcc709bb9a718e19",
"hash_cont_tokens": "e7cfbda8199e7611"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:astronomy|0": {
"hashes": {
"hash_examples": "9736a606002a848e",
"hash_full_prompts": "9736a606002a848e",
"hash_input_tokens": "32721f4ff43f99fc",
"hash_cont_tokens": "d5464986c8d97559"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:business_ethics|0": {
"hashes": {
"hash_examples": "735e452fbb6dc63d",
"hash_full_prompts": "735e452fbb6dc63d",
"hash_input_tokens": "79681f25b58e4722",
"hash_cont_tokens": "00a223315c15a9ce"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:clinical_knowledge|0": {
"hashes": {
"hash_examples": "6ab0ca4da98aedcf",
"hash_full_prompts": "6ab0ca4da98aedcf",
"hash_input_tokens": "139c7fd077a23d18",
"hash_cont_tokens": "048c2bb287cd81e3"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1060,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_biology|0": {
"hashes": {
"hash_examples": "17e4e390848018a4",
"hash_full_prompts": "17e4e390848018a4",
"hash_input_tokens": "ce5945e51fb6b37f",
"hash_cont_tokens": "f77b26b202ef9b2b"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_chemistry|0": {
"hashes": {
"hash_examples": "4abb169f6dfd234b",
"hash_full_prompts": "4abb169f6dfd234b",
"hash_input_tokens": "69b28619f44dab85",
"hash_cont_tokens": "00a223315c15a9ce"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_computer_science|0": {
"hashes": {
"hash_examples": "a369e2e941358a1e",
"hash_full_prompts": "a369e2e941358a1e",
"hash_input_tokens": "82b8a8471c1bc6cc",
"hash_cont_tokens": "00a223315c15a9ce"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_mathematics|0": {
"hashes": {
"hash_examples": "d7be03b8b6020bff",
"hash_full_prompts": "d7be03b8b6020bff",
"hash_input_tokens": "74bf2673f9472f10",
"hash_cont_tokens": "00a223315c15a9ce"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_medicine|0": {
"hashes": {
"hash_examples": "0518a00f097346bf",
"hash_full_prompts": "0518a00f097346bf",
"hash_input_tokens": "33f507fc7b90da8d",
"hash_cont_tokens": "76fa38a751ac9bc2"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_physics|0": {
"hashes": {
"hash_examples": "5d842cd49bc70e12",
"hash_full_prompts": "5d842cd49bc70e12",
"hash_input_tokens": "cdd011c51f22d100",
"hash_cont_tokens": "c786ae78224e9572"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:computer_security|0": {
"hashes": {
"hash_examples": "8e85d9f85be9b32f",
"hash_full_prompts": "8e85d9f85be9b32f",
"hash_input_tokens": "a8e4ca0e62299418",
"hash_cont_tokens": "00a223315c15a9ce"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:conceptual_physics|0": {
"hashes": {
"hash_examples": "7964b55a0a49502b",
"hash_full_prompts": "7964b55a0a49502b",
"hash_input_tokens": "7c9e47f0be1bc4a0",
"hash_cont_tokens": "cdabd8b4dc5070a7"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:econometrics|0": {
"hashes": {
"hash_examples": "1e192eae38347257",
"hash_full_prompts": "1e192eae38347257",
"hash_input_tokens": "4ffbea809ce9fdcf",
"hash_cont_tokens": "79f7b7012bc5cae3"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:electrical_engineering|0": {
"hashes": {
"hash_examples": "cf97671d5c441da1",
"hash_full_prompts": "cf97671d5c441da1",
"hash_input_tokens": "44107229f6d23816",
"hash_cont_tokens": "98c8c04cfecc2e3a"
},
"truncated": 0,
"non_truncated": 145,
"padded": 580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:elementary_mathematics|0": {
"hashes": {
"hash_examples": "6f49107ed43c40c5",
"hash_full_prompts": "6f49107ed43c40c5",
"hash_input_tokens": "4c24c129f5a52c4c",
"hash_cont_tokens": "55f0c3be194d2e8f"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:formal_logic|0": {
"hashes": {
"hash_examples": "7922c376008ba77b",
"hash_full_prompts": "7922c376008ba77b",
"hash_input_tokens": "94d98d524e6f0333",
"hash_cont_tokens": "e715b1eaed2453d1"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:global_facts|0": {
"hashes": {
"hash_examples": "11f9813185047d5b",
"hash_full_prompts": "11f9813185047d5b",
"hash_input_tokens": "2cdca15c982f7469",
"hash_cont_tokens": "00a223315c15a9ce"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_biology|0": {
"hashes": {
"hash_examples": "2a804b1d90cbe66e",
"hash_full_prompts": "2a804b1d90cbe66e",
"hash_input_tokens": "882e829e7bee5534",
"hash_cont_tokens": "b5a22be1545a5885"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_chemistry|0": {
"hashes": {
"hash_examples": "0032168adabc53b4",
"hash_full_prompts": "0032168adabc53b4",
"hash_input_tokens": "c65ce5084684dc46",
"hash_cont_tokens": "41f59f267986db24"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_computer_science|0": {
"hashes": {
"hash_examples": "f2fb8740f9df980f",
"hash_full_prompts": "f2fb8740f9df980f",
"hash_input_tokens": "814cbe647168bd03",
"hash_cont_tokens": "00a223315c15a9ce"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_european_history|0": {
"hashes": {
"hash_examples": "73509021e7e66435",
"hash_full_prompts": "73509021e7e66435",
"hash_input_tokens": "b282f95f27435632",
"hash_cont_tokens": "ba925766deaa3c15"
},
"truncated": 0,
"non_truncated": 165,
"padded": 660,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_geography|0": {
"hashes": {
"hash_examples": "9e08d1894940ff42",
"hash_full_prompts": "9e08d1894940ff42",
"hash_input_tokens": "cf63dcdbbc4e349b",
"hash_cont_tokens": "ba892eb0674f32f0"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_government_and_politics|0": {
"hashes": {
"hash_examples": "64b7e97817ca6c76",
"hash_full_prompts": "64b7e97817ca6c76",
"hash_input_tokens": "101490b3eb41b0a4",
"hash_cont_tokens": "da6ef64a42a0438e"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_macroeconomics|0": {
"hashes": {
"hash_examples": "9f582da8534bd2ef",
"hash_full_prompts": "9f582da8534bd2ef",
"hash_input_tokens": "f601c7abd9e1ccae",
"hash_cont_tokens": "6660aeaa8368b295"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_mathematics|0": {
"hashes": {
"hash_examples": "fd54f1c10d423c51",
"hash_full_prompts": "fd54f1c10d423c51",
"hash_input_tokens": "cb100a3e17ebcfcc",
"hash_cont_tokens": "07575f7140327432"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_microeconomics|0": {
"hashes": {
"hash_examples": "7037896925aaf42f",
"hash_full_prompts": "7037896925aaf42f",
"hash_input_tokens": "d4531075db87835b",
"hash_cont_tokens": "19bfd23905b3bec3"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_physics|0": {
"hashes": {
"hash_examples": "60c3776215167dae",
"hash_full_prompts": "60c3776215167dae",
"hash_input_tokens": "ef3f5f2ad2779ec8",
"hash_cont_tokens": "0562b521128fbe6b"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_psychology|0": {
"hashes": {
"hash_examples": "61176bfd5da1298f",
"hash_full_prompts": "61176bfd5da1298f",
"hash_input_tokens": "f073e7393c5ec9a9",
"hash_cont_tokens": "cfa877943c39a466"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_statistics|0": {
"hashes": {
"hash_examples": "40dfeebd1ea10f76",
"hash_full_prompts": "40dfeebd1ea10f76",
"hash_input_tokens": "811d7fd7e3fff1cb",
"hash_cont_tokens": "e58c5a09e16fdd84"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_us_history|0": {
"hashes": {
"hash_examples": "03daa510ba917f4d",
"hash_full_prompts": "03daa510ba917f4d",
"hash_input_tokens": "366ea66b0a8ecb69",
"hash_cont_tokens": "b93a8c4f2a2af9f0"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_world_history|0": {
"hashes": {
"hash_examples": "be075ffd579f43c2",
"hash_full_prompts": "be075ffd579f43c2",
"hash_input_tokens": "2933234f8e745ce0",
"hash_cont_tokens": "54a49424057f2e6b"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:human_aging|0": {
"hashes": {
"hash_examples": "caa5b69f640bd1ef",
"hash_full_prompts": "caa5b69f640bd1ef",
"hash_input_tokens": "f0fb7f7a8a6e3b5a",
"hash_cont_tokens": "eec7417389927586"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:human_sexuality|0": {
"hashes": {
"hash_examples": "5ed2e38fb25a3767",
"hash_full_prompts": "5ed2e38fb25a3767",
"hash_input_tokens": "74c84a564383aa9a",
"hash_cont_tokens": "9dc6bb3c4ecb3178"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:international_law|0": {
"hashes": {
"hash_examples": "4e3e9e28d1b96484",
"hash_full_prompts": "4e3e9e28d1b96484",
"hash_input_tokens": "d84bcb27b1e9ebea",
"hash_cont_tokens": "192d0de6ed8ebc9b"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:jurisprudence|0": {
"hashes": {
"hash_examples": "e264b755366310b3",
"hash_full_prompts": "e264b755366310b3",
"hash_input_tokens": "91b32d656273fbdb",
"hash_cont_tokens": "75dcdf05908542a5"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:logical_fallacies|0": {
"hashes": {
"hash_examples": "a4ab6965a3e38071",
"hash_full_prompts": "a4ab6965a3e38071",
"hash_input_tokens": "7944d642b084326a",
"hash_cont_tokens": "ffd60b4e1fa30f63"
},
"truncated": 0,
"non_truncated": 163,
"padded": 652,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:machine_learning|0": {
"hashes": {
"hash_examples": "b92320efa6636b40",
"hash_full_prompts": "b92320efa6636b40",
"hash_input_tokens": "038ede6e6cbad95b",
"hash_cont_tokens": "4f0e7389d8fa87d9"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:management|0": {
"hashes": {
"hash_examples": "c9ee4872a850fe20",
"hash_full_prompts": "c9ee4872a850fe20",
"hash_input_tokens": "585d9a3b56cd93a4",
"hash_cont_tokens": "75a8c1e4c452cb1d"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:marketing|0": {
"hashes": {
"hash_examples": "0c151b70f6a047e3",
"hash_full_prompts": "0c151b70f6a047e3",
"hash_input_tokens": "0a8b6e6cd1d6e221",
"hash_cont_tokens": "727f87587fa3a95b"
},
"truncated": 0,
"non_truncated": 234,
"padded": 936,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:medical_genetics|0": {
"hashes": {
"hash_examples": "513f6cb8fca3a24e",
"hash_full_prompts": "513f6cb8fca3a24e",
"hash_input_tokens": "d8def5662cba9b2a",
"hash_cont_tokens": "00a223315c15a9ce"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:miscellaneous|0": {
"hashes": {
"hash_examples": "259a190d635331db",
"hash_full_prompts": "259a190d635331db",
"hash_input_tokens": "74c38e12c56b9465",
"hash_cont_tokens": "73f36e25a6fef508"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3124,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:moral_disputes|0": {
"hashes": {
"hash_examples": "b85052c48a0b7bc3",
"hash_full_prompts": "b85052c48a0b7bc3",
"hash_input_tokens": "a0580bb05c57fd36",
"hash_cont_tokens": "6b2c2a581ad40b69"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1384,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:moral_scenarios|0": {
"hashes": {
"hash_examples": "28d0b069ef00dd00",
"hash_full_prompts": "28d0b069ef00dd00",
"hash_input_tokens": "0204b8aebac53efd",
"hash_cont_tokens": "af64a7a018654c30"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:nutrition|0": {
"hashes": {
"hash_examples": "00c9bc5f1d305b2f",
"hash_full_prompts": "00c9bc5f1d305b2f",
"hash_input_tokens": "8e5911b1b054335c",
"hash_cont_tokens": "6e20cc0d504ecac3"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1218,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:philosophy|0": {
"hashes": {
"hash_examples": "a458c08454a3fd5f",
"hash_full_prompts": "a458c08454a3fd5f",
"hash_input_tokens": "623a854d447e136e",
"hash_cont_tokens": "aa4ac747c265c6ee"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1234,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:prehistory|0": {
"hashes": {
"hash_examples": "d6a0ecbdbb670e9c",
"hash_full_prompts": "d6a0ecbdbb670e9c",
"hash_input_tokens": "857185f963da6de0",
"hash_cont_tokens": "77d8e266e29e78db"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1288,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_accounting|0": {
"hashes": {
"hash_examples": "b4a95fe480b6540e",
"hash_full_prompts": "b4a95fe480b6540e",
"hash_input_tokens": "6deb2e92127d78f4",
"hash_cont_tokens": "e25888a5ba75c974"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1128,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_law|0": {
"hashes": {
"hash_examples": "c2be9651cdbdde3b",
"hash_full_prompts": "c2be9651cdbdde3b",
"hash_input_tokens": "fc490d9398a1f709",
"hash_cont_tokens": "d1bfd61f4ba42a8b"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6132,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_medicine|0": {
"hashes": {
"hash_examples": "26ce92416288f273",
"hash_full_prompts": "26ce92416288f273",
"hash_input_tokens": "f8c3b2f9f8388ae3",
"hash_cont_tokens": "6606c8af95c09c78"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_psychology|0": {
"hashes": {
"hash_examples": "71ea5f182ea9a641",
"hash_full_prompts": "71ea5f182ea9a641",
"hash_input_tokens": "6267a95abdc30e51",
"hash_cont_tokens": "b061897bffe334ec"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2440,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:public_relations|0": {
"hashes": {
"hash_examples": "125adc21f91f8d77",
"hash_full_prompts": "125adc21f91f8d77",
"hash_input_tokens": "afdde34346b14e30",
"hash_cont_tokens": "5c3107c12bceb18e"
},
"truncated": 0,
"non_truncated": 110,
"padded": 436,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:security_studies|0": {
"hashes": {
"hash_examples": "3c18b216c099fb26",
"hash_full_prompts": "3c18b216c099fb26",
"hash_input_tokens": "43ecd0cc543262ab",
"hash_cont_tokens": "ce4361df75a6e6a7"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:sociology|0": {
"hashes": {
"hash_examples": "3f2a9634cef7417d",
"hash_full_prompts": "3f2a9634cef7417d",
"hash_input_tokens": "cbcc64774739219d",
"hash_cont_tokens": "cbbeed1c4c5a128e"
},
"truncated": 0,
"non_truncated": 201,
"padded": 802,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:us_foreign_policy|0": {
"hashes": {
"hash_examples": "22249da54056475e",
"hash_full_prompts": "22249da54056475e",
"hash_input_tokens": "eac53d61967694e3",
"hash_cont_tokens": "00a223315c15a9ce"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:virology|0": {
"hashes": {
"hash_examples": "9d194b9471dc624e",
"hash_full_prompts": "9d194b9471dc624e",
"hash_input_tokens": "7f6a909fd6f59037",
"hash_cont_tokens": "84e4fe7f10383133"
},
"truncated": 0,
"non_truncated": 166,
"padded": 660,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:world_religions|0": {
"hashes": {
"hash_examples": "229e5fe50082b064",
"hash_full_prompts": "229e5fe50082b064",
"hash_input_tokens": "7ada75ea5233a1e2",
"hash_cont_tokens": "a0fac287dd015964"
},
"truncated": 0,
"non_truncated": 171,
"padded": 678,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arc_challenge_okapi_ar|0": {
"hashes": {
"hash_examples": "ab893807673bc355",
"hash_full_prompts": "ab893807673bc355",
"hash_input_tokens": "ad57ce24f7bfadf8",
"hash_cont_tokens": "726609a9298bbada"
},
"truncated": 0,
"non_truncated": 1160,
"padded": 4630,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arc_easy_ar|0": {
"hashes": {
"hash_examples": "acb688624acc3d04",
"hash_full_prompts": "acb688624acc3d04",
"hash_input_tokens": "5dbda4553c7dc716",
"hash_cont_tokens": "f3c33db663397968"
},
"truncated": 0,
"non_truncated": 2364,
"padded": 9418,
"non_padded": 38,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|boolq_ar|0": {
"hashes": {
"hash_examples": "48355a67867e0c32",
"hash_full_prompts": "48355a67867e0c32",
"hash_input_tokens": "a1c69d5194b15044",
"hash_cont_tokens": "912f7226532bcd5b"
},
"truncated": 0,
"non_truncated": 3260,
"padded": 6519,
"non_padded": 1,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|copa_ext_ar|0": {
"hashes": {
"hash_examples": "9bb83301bb72eecf",
"hash_full_prompts": "9bb83301bb72eecf",
"hash_input_tokens": "5846db1d65422e15",
"hash_cont_tokens": "6240ddc895d662c5"
},
"truncated": 0,
"non_truncated": 90,
"padded": 180,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|hellaswag_okapi_ar|0": {
"hashes": {
"hash_examples": "6e8cf57a322dfadd",
"hash_full_prompts": "6e8cf57a322dfadd",
"hash_input_tokens": "c6b23825004c8ee7",
"hash_cont_tokens": "7b105a1c159805da"
},
"truncated": 0,
"non_truncated": 9171,
"padded": 36632,
"non_padded": 52,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|openbook_qa_ext_ar|0": {
"hashes": {
"hash_examples": "923d41eb0aca93eb",
"hash_full_prompts": "923d41eb0aca93eb",
"hash_input_tokens": "a3be37edb4c43c30",
"hash_cont_tokens": "d1357c9db83cc945"
},
"truncated": 0,
"non_truncated": 495,
"padded": 1970,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|piqa_ar|0": {
"hashes": {
"hash_examples": "94bc205a520d3ea0",
"hash_full_prompts": "94bc205a520d3ea0",
"hash_input_tokens": "3752e63d24f89da2",
"hash_cont_tokens": "661c43850095d871"
},
"truncated": 0,
"non_truncated": 1833,
"padded": 3651,
"non_padded": 15,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|race_ar|0": {
"hashes": {
"hash_examples": "de65130bae647516",
"hash_full_prompts": "de65130bae647516",
"hash_input_tokens": "182795ceef54e534",
"hash_cont_tokens": "fd2244c08d29bb74"
},
"truncated": 0,
"non_truncated": 4929,
"padded": 19712,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|sciq_ar|0": {
"hashes": {
"hash_examples": "97d0997cf06a5f63",
"hash_full_prompts": "97d0997cf06a5f63",
"hash_input_tokens": "bbc1422195ec09c1",
"hash_cont_tokens": "6a5e07329e5e5f51"
},
"truncated": 0,
"non_truncated": 995,
"padded": 3970,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|toxigen_ar|0": {
"hashes": {
"hash_examples": "1e139513004a9a2e",
"hash_full_prompts": "1e139513004a9a2e",
"hash_input_tokens": "d6b99143ed453236",
"hash_cont_tokens": "95916816b9079ef8"
},
"truncated": 0,
"non_truncated": 935,
"padded": 1858,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|xstory_cloze:ar|0": {
"hashes": {
"hash_examples": "865426a22c787481",
"hash_full_prompts": "865426a22c787481",
"hash_input_tokens": "cf025e8c2af27374",
"hash_cont_tokens": "0ec4f03b9b94acd4"
},
"truncated": 0,
"non_truncated": 1511,
"padded": 2991,
"non_padded": 31,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "e154f441790694db",
"hash_full_prompts": "e154f441790694db",
"hash_input_tokens": "449d59a502674c1a",
"hash_cont_tokens": "9bcd6174151685d3"
},
"truncated": 0,
"non_truncated": 72964,
"padded": 235380,
"non_padded": 243,
"num_truncated_few_shots": 0
}
}