results / Artples /L-MChat-Small /results_2024-05-27T14-39-45.128547.json
Hamza-Alobeidli's picture
Updating model Artples/L-MChat-Small
88b0794 verified
raw
history blame
No virus
194 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 509.334391095,
"end_time": 4542.695363527,
"total_evaluation_time_secondes": "4033.3609724320004",
"model_name": "Artples/L-MChat-Small",
"model_sha": "cc97bc850353c2a50cf6530e368ed2bc6689ba2c",
"model_dtype": "torch.bfloat16",
"model_size": "5.19 GB",
"config": null
},
"results": {
"community|acva:Algeria|0": {
"acc_norm": 0.48717948717948717,
"acc_norm_stderr": 0.03588610523192216
},
"community|acva:Ancient_Egypt|0": {
"acc_norm": 0.9015873015873016,
"acc_norm_stderr": 0.01680988100419675
},
"community|acva:Arab_Empire|0": {
"acc_norm": 0.690566037735849,
"acc_norm_stderr": 0.028450154794118634
},
"community|acva:Arabic_Architecture|0": {
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.03581804596782233
},
"community|acva:Arabic_Art|0": {
"acc_norm": 0.6512820512820513,
"acc_norm_stderr": 0.034215338466705415
},
"community|acva:Arabic_Astronomy|0": {
"acc_norm": 0.5282051282051282,
"acc_norm_stderr": 0.035840746749208334
},
"community|acva:Arabic_Calligraphy|0": {
"acc_norm": 0.5137254901960784,
"acc_norm_stderr": 0.0313609674469424
},
"community|acva:Arabic_Ceremony|0": {
"acc_norm": 0.4864864864864865,
"acc_norm_stderr": 0.03684702401944814
},
"community|acva:Arabic_Clothing|0": {
"acc_norm": 0.5025641025641026,
"acc_norm_stderr": 0.035897435897435895
},
"community|acva:Arabic_Culture|0": {
"acc_norm": 0.7435897435897436,
"acc_norm_stderr": 0.03134970994274493
},
"community|acva:Arabic_Food|0": {
"acc_norm": 0.558974358974359,
"acc_norm_stderr": 0.0356473293185358
},
"community|acva:Arabic_Funeral|0": {
"acc_norm": 0.5263157894736842,
"acc_norm_stderr": 0.05149958471474543
},
"community|acva:Arabic_Geography|0": {
"acc_norm": 0.41379310344827586,
"acc_norm_stderr": 0.04104269211806232
},
"community|acva:Arabic_History|0": {
"acc_norm": 0.6820512820512821,
"acc_norm_stderr": 0.03343383454355787
},
"community|acva:Arabic_Language_Origin|0": {
"acc_norm": 0.4842105263157895,
"acc_norm_stderr": 0.05154534179593067
},
"community|acva:Arabic_Literature|0": {
"acc_norm": 0.5310344827586206,
"acc_norm_stderr": 0.04158632762097828
},
"community|acva:Arabic_Math|0": {
"acc_norm": 0.6974358974358974,
"acc_norm_stderr": 0.03298070870085618
},
"community|acva:Arabic_Medicine|0": {
"acc_norm": 0.5103448275862069,
"acc_norm_stderr": 0.04165774775728763
},
"community|acva:Arabic_Music|0": {
"acc_norm": 0.7769784172661871,
"acc_norm_stderr": 0.03543548499561939
},
"community|acva:Arabic_Ornament|0": {
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.03581804596782233
},
"community|acva:Arabic_Philosophy|0": {
"acc_norm": 0.46206896551724136,
"acc_norm_stderr": 0.04154659671707548
},
"community|acva:Arabic_Physics_and_Chemistry|0": {
"acc_norm": 0.5076923076923077,
"acc_norm_stderr": 0.03589365940635213
},
"community|acva:Arabic_Wedding|0": {
"acc_norm": 0.6,
"acc_norm_stderr": 0.035172622905632896
},
"community|acva:Bahrain|0": {
"acc_norm": 0.6888888888888889,
"acc_norm_stderr": 0.06979205927323111
},
"community|acva:Comoros|0": {
"acc_norm": 0.6222222222222222,
"acc_norm_stderr": 0.07309112127323451
},
"community|acva:Egypt_modern|0": {
"acc_norm": 0.6736842105263158,
"acc_norm_stderr": 0.04835966701461423
},
"community|acva:InfluenceFromAncientEgypt|0": {
"acc_norm": 0.39487179487179486,
"acc_norm_stderr": 0.035095456022620375
},
"community|acva:InfluenceFromByzantium|0": {
"acc_norm": 0.2827586206896552,
"acc_norm_stderr": 0.03752833958003337
},
"community|acva:InfluenceFromChina|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03384487217112065
},
"community|acva:InfluenceFromGreece|0": {
"acc_norm": 0.36923076923076925,
"acc_norm_stderr": 0.034648411418637566
},
"community|acva:InfluenceFromIslam|0": {
"acc_norm": 0.6896551724137931,
"acc_norm_stderr": 0.03855289616378947
},
"community|acva:InfluenceFromPersia|0": {
"acc_norm": 0.3028571428571429,
"acc_norm_stderr": 0.03483414676585985
},
"community|acva:InfluenceFromRome|0": {
"acc_norm": 0.4307692307692308,
"acc_norm_stderr": 0.035552132520587594
},
"community|acva:Iraq|0": {
"acc_norm": 0.5176470588235295,
"acc_norm_stderr": 0.05452048340661897
},
"community|acva:Islam_Education|0": {
"acc_norm": 0.558974358974359,
"acc_norm_stderr": 0.03564732931853579
},
"community|acva:Islam_branches_and_schools|0": {
"acc_norm": 0.5028571428571429,
"acc_norm_stderr": 0.037904283318347436
},
"community|acva:Islamic_law_system|0": {
"acc_norm": 0.5948717948717949,
"acc_norm_stderr": 0.03524577495610961
},
"community|acva:Jordan|0": {
"acc_norm": 0.6444444444444445,
"acc_norm_stderr": 0.07216392363431012
},
"community|acva:Kuwait|0": {
"acc_norm": 0.6888888888888889,
"acc_norm_stderr": 0.06979205927323111
},
"community|acva:Lebanon|0": {
"acc_norm": 0.7333333333333333,
"acc_norm_stderr": 0.06666666666666668
},
"community|acva:Libya|0": {
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.0752101433090355
},
"community|acva:Mauritania|0": {
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.0752101433090355
},
"community|acva:Mesopotamia_civilization|0": {
"acc_norm": 0.49032258064516127,
"acc_norm_stderr": 0.04028360076525542
},
"community|acva:Morocco|0": {
"acc_norm": 0.7555555555555555,
"acc_norm_stderr": 0.06478835438717
},
"community|acva:Oman|0": {
"acc_norm": 0.8,
"acc_norm_stderr": 0.06030226891555273
},
"community|acva:Palestine|0": {
"acc_norm": 0.7411764705882353,
"acc_norm_stderr": 0.04778846120374094
},
"community|acva:Qatar|0": {
"acc_norm": 0.5777777777777777,
"acc_norm_stderr": 0.07446027270295806
},
"community|acva:Saudi_Arabia|0": {
"acc_norm": 0.6717948717948717,
"acc_norm_stderr": 0.03371243782413707
},
"community|acva:Somalia|0": {
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.07491109582924915
},
"community|acva:Sudan|0": {
"acc_norm": 0.6444444444444445,
"acc_norm_stderr": 0.07216392363431012
},
"community|acva:Syria|0": {
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.07106690545187012
},
"community|acva:Tunisia|0": {
"acc_norm": 0.6444444444444445,
"acc_norm_stderr": 0.07216392363431011
},
"community|acva:United_Arab_Emirates|0": {
"acc_norm": 0.7529411764705882,
"acc_norm_stderr": 0.047058823529411785
},
"community|acva:Yemen|0": {
"acc_norm": 0.6,
"acc_norm_stderr": 0.16329931618554522
},
"community|acva:communication|0": {
"acc_norm": 0.5879120879120879,
"acc_norm_stderr": 0.0258343667152563
},
"community|acva:computer_and_phone|0": {
"acc_norm": 0.5389830508474577,
"acc_norm_stderr": 0.0290718276412662
},
"community|acva:daily_life|0": {
"acc_norm": 0.7270029673590505,
"acc_norm_stderr": 0.024303980960050656
},
"community|acva:entertainment|0": {
"acc_norm": 0.711864406779661,
"acc_norm_stderr": 0.026413346524541644
},
"community|alghafa:mcq_exams_test_ar|0": {
"acc_norm": 0.2513464991023339,
"acc_norm_stderr": 0.01839668001069939
},
"community|alghafa:meta_ar_dialects|0": {
"acc_norm": 0.24411492122335496,
"acc_norm_stderr": 0.005848837806074586
},
"community|alghafa:meta_ar_msa|0": {
"acc_norm": 0.24692737430167597,
"acc_norm_stderr": 0.014422292204808836
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"acc_norm": 0.52,
"acc_norm_stderr": 0.05807730170189531
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.03622779862191887
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"acc_norm": 0.24666666666666667,
"acc_norm_stderr": 0.03531471376356937
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"acc_norm": 0.499812382739212,
"acc_norm_stderr": 0.005592267043694276
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"acc_norm": 0.3402835696413678,
"acc_norm_stderr": 0.006119849906257789
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"acc_norm": 0.33372093023255817,
"acc_norm_stderr": 0.011373178876838192
},
"community|arabic_exams|0": {
"acc_norm": 0.2383612662942272,
"acc_norm_stderr": 0.01840390396129298
},
"community|arabic_mmlu:abstract_algebra|0": {
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"community|arabic_mmlu:anatomy|0": {
"acc_norm": 0.18518518518518517,
"acc_norm_stderr": 0.03355677216313142
},
"community|arabic_mmlu:astronomy|0": {
"acc_norm": 0.18421052631578946,
"acc_norm_stderr": 0.0315469804508223
},
"community|arabic_mmlu:business_ethics|0": {
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"community|arabic_mmlu:clinical_knowledge|0": {
"acc_norm": 0.21509433962264152,
"acc_norm_stderr": 0.02528839450289137
},
"community|arabic_mmlu:college_biology|0": {
"acc_norm": 0.25,
"acc_norm_stderr": 0.03621034121889507
},
"community|arabic_mmlu:college_chemistry|0": {
"acc_norm": 0.19,
"acc_norm_stderr": 0.039427724440366234
},
"community|arabic_mmlu:college_computer_science|0": {
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"community|arabic_mmlu:college_mathematics|0": {
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"community|arabic_mmlu:college_medicine|0": {
"acc_norm": 0.2138728323699422,
"acc_norm_stderr": 0.031265112061730445
},
"community|arabic_mmlu:college_physics|0": {
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.041583075330832865
},
"community|arabic_mmlu:computer_security|0": {
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"community|arabic_mmlu:conceptual_physics|0": {
"acc_norm": 0.2723404255319149,
"acc_norm_stderr": 0.029101290698386698
},
"community|arabic_mmlu:econometrics|0": {
"acc_norm": 0.2719298245614035,
"acc_norm_stderr": 0.04185774424022056
},
"community|arabic_mmlu:electrical_engineering|0": {
"acc_norm": 0.25517241379310346,
"acc_norm_stderr": 0.03632984052707842
},
"community|arabic_mmlu:elementary_mathematics|0": {
"acc_norm": 0.21164021164021163,
"acc_norm_stderr": 0.021037331505262886
},
"community|arabic_mmlu:formal_logic|0": {
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.03932537680392872
},
"community|arabic_mmlu:global_facts|0": {
"acc_norm": 0.19,
"acc_norm_stderr": 0.039427724440366234
},
"community|arabic_mmlu:high_school_biology|0": {
"acc_norm": 0.18064516129032257,
"acc_norm_stderr": 0.021886178567172548
},
"community|arabic_mmlu:high_school_chemistry|0": {
"acc_norm": 0.1724137931034483,
"acc_norm_stderr": 0.026577672183036572
},
"community|arabic_mmlu:high_school_computer_science|0": {
"acc_norm": 0.22,
"acc_norm_stderr": 0.041633319989322716
},
"community|arabic_mmlu:high_school_european_history|0": {
"acc_norm": 0.21818181818181817,
"acc_norm_stderr": 0.03225078108306289
},
"community|arabic_mmlu:high_school_geography|0": {
"acc_norm": 0.17676767676767677,
"acc_norm_stderr": 0.027178752639044915
},
"community|arabic_mmlu:high_school_government_and_politics|0": {
"acc_norm": 0.19689119170984457,
"acc_norm_stderr": 0.028697873971860664
},
"community|arabic_mmlu:high_school_macroeconomics|0": {
"acc_norm": 0.19743589743589743,
"acc_norm_stderr": 0.020182646968674844
},
"community|arabic_mmlu:high_school_mathematics|0": {
"acc_norm": 0.22592592592592592,
"acc_norm_stderr": 0.02549753263960955
},
"community|arabic_mmlu:high_school_microeconomics|0": {
"acc_norm": 0.21428571428571427,
"acc_norm_stderr": 0.026653531596715494
},
"community|arabic_mmlu:high_school_physics|0": {
"acc_norm": 0.2052980132450331,
"acc_norm_stderr": 0.03297986648473836
},
"community|arabic_mmlu:high_school_psychology|0": {
"acc_norm": 0.1889908256880734,
"acc_norm_stderr": 0.01678548115920364
},
"community|arabic_mmlu:high_school_statistics|0": {
"acc_norm": 0.1574074074074074,
"acc_norm_stderr": 0.024837173518242387
},
"community|arabic_mmlu:high_school_us_history|0": {
"acc_norm": 0.25,
"acc_norm_stderr": 0.03039153369274154
},
"community|arabic_mmlu:high_school_world_history|0": {
"acc_norm": 0.270042194092827,
"acc_norm_stderr": 0.028900721906293426
},
"community|arabic_mmlu:human_aging|0": {
"acc_norm": 0.29596412556053814,
"acc_norm_stderr": 0.030636591348699813
},
"community|arabic_mmlu:human_sexuality|0": {
"acc_norm": 0.25190839694656486,
"acc_norm_stderr": 0.03807387116306086
},
"community|arabic_mmlu:international_law|0": {
"acc_norm": 0.2396694214876033,
"acc_norm_stderr": 0.03896878985070417
},
"community|arabic_mmlu:jurisprudence|0": {
"acc_norm": 0.26851851851851855,
"acc_norm_stderr": 0.04284467968052192
},
"community|arabic_mmlu:logical_fallacies|0": {
"acc_norm": 0.22085889570552147,
"acc_norm_stderr": 0.032591773927421776
},
"community|arabic_mmlu:machine_learning|0": {
"acc_norm": 0.32142857142857145,
"acc_norm_stderr": 0.04432804055291519
},
"community|arabic_mmlu:management|0": {
"acc_norm": 0.17475728155339806,
"acc_norm_stderr": 0.037601780060266224
},
"community|arabic_mmlu:marketing|0": {
"acc_norm": 0.3034188034188034,
"acc_norm_stderr": 0.03011821010694267
},
"community|arabic_mmlu:medical_genetics|0": {
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"community|arabic_mmlu:miscellaneous|0": {
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.015302380123542089
},
"community|arabic_mmlu:moral_disputes|0": {
"acc_norm": 0.2514450867052023,
"acc_norm_stderr": 0.02335736578587404
},
"community|arabic_mmlu:moral_scenarios|0": {
"acc_norm": 0.23798882681564246,
"acc_norm_stderr": 0.014242630070574915
},
"community|arabic_mmlu:nutrition|0": {
"acc_norm": 0.23202614379084968,
"acc_norm_stderr": 0.02417084087934101
},
"community|arabic_mmlu:philosophy|0": {
"acc_norm": 0.1864951768488746,
"acc_norm_stderr": 0.02212243977248078
},
"community|arabic_mmlu:prehistory|0": {
"acc_norm": 0.21604938271604937,
"acc_norm_stderr": 0.02289916291844581
},
"community|arabic_mmlu:professional_accounting|0": {
"acc_norm": 0.24822695035460993,
"acc_norm_stderr": 0.025770015644290396
},
"community|arabic_mmlu:professional_law|0": {
"acc_norm": 0.24511082138200782,
"acc_norm_stderr": 0.010986307870045517
},
"community|arabic_mmlu:professional_medicine|0": {
"acc_norm": 0.19117647058823528,
"acc_norm_stderr": 0.02388688192244034
},
"community|arabic_mmlu:professional_psychology|0": {
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.017630827375148383
},
"community|arabic_mmlu:public_relations|0": {
"acc_norm": 0.22727272727272727,
"acc_norm_stderr": 0.04013964554072775
},
"community|arabic_mmlu:security_studies|0": {
"acc_norm": 0.18775510204081633,
"acc_norm_stderr": 0.02500025603954621
},
"community|arabic_mmlu:sociology|0": {
"acc_norm": 0.24378109452736318,
"acc_norm_stderr": 0.03036049015401465
},
"community|arabic_mmlu:us_foreign_policy|0": {
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"community|arabic_mmlu:virology|0": {
"acc_norm": 0.28313253012048195,
"acc_norm_stderr": 0.03507295431370518
},
"community|arabic_mmlu:world_religions|0": {
"acc_norm": 0.3216374269005848,
"acc_norm_stderr": 0.03582529442573122
},
"community|arc_challenge_okapi_ar|0": {
"acc_norm": 0.2525862068965517,
"acc_norm_stderr": 0.012762732057795888
},
"community|arc_easy_ar|0": {
"acc_norm": 0.2593062605752961,
"acc_norm_stderr": 0.00901558634852351
},
"community|boolq_ar|0": {
"acc_norm": 0.40950920245398775,
"acc_norm_stderr": 0.008613828474130074
},
"community|copa_ext_ar|0": {
"acc_norm": 0.4888888888888889,
"acc_norm_stderr": 0.05298680599073449
},
"community|hellaswag_okapi_ar|0": {
"acc_norm": 0.24882782684549123,
"acc_norm_stderr": 0.004514758778737428
},
"community|openbook_qa_ext_ar|0": {
"acc_norm": 0.3656565656565657,
"acc_norm_stderr": 0.021668828786750326
},
"community|piqa_ar|0": {
"acc_norm": 0.5040916530278232,
"acc_norm_stderr": 0.011681341688982008
},
"community|race_ar|0": {
"acc_norm": 0.2803814161087442,
"acc_norm_stderr": 0.006398680832644407
},
"community|sciq_ar|0": {
"acc_norm": 0.3085427135678392,
"acc_norm_stderr": 0.0146503207768712
},
"community|toxigen_ar|0": {
"acc_norm": 0.4320855614973262,
"acc_norm_stderr": 0.01620887578524445
},
"lighteval|xstory_cloze:ar|0": {
"acc": 0.47187293183322304,
"acc_stderr": 0.012846749995797692
},
"community|acva:_average|0": {
"acc_norm": 0.5823110081794227,
"acc_norm_stderr": 0.04701755519626288
},
"community|alghafa:_average|0": {
"acc_norm": 0.3277265567304262,
"acc_norm_stderr": 0.021263657770639627
},
"community|arabic_mmlu:_average|0": {
"acc_norm": 0.23589525196402603,
"acc_norm_stderr": 0.03174802148728372
},
"all": {
"acc_norm": 0.3996877363610559,
"acc_norm_stderr": 0.0363328445153769,
"acc": 0.47187293183322304,
"acc_stderr": 0.012846749995797692
}
},
"versions": {
"community|acva:Algeria|0": 0,
"community|acva:Ancient_Egypt|0": 0,
"community|acva:Arab_Empire|0": 0,
"community|acva:Arabic_Architecture|0": 0,
"community|acva:Arabic_Art|0": 0,
"community|acva:Arabic_Astronomy|0": 0,
"community|acva:Arabic_Calligraphy|0": 0,
"community|acva:Arabic_Ceremony|0": 0,
"community|acva:Arabic_Clothing|0": 0,
"community|acva:Arabic_Culture|0": 0,
"community|acva:Arabic_Food|0": 0,
"community|acva:Arabic_Funeral|0": 0,
"community|acva:Arabic_Geography|0": 0,
"community|acva:Arabic_History|0": 0,
"community|acva:Arabic_Language_Origin|0": 0,
"community|acva:Arabic_Literature|0": 0,
"community|acva:Arabic_Math|0": 0,
"community|acva:Arabic_Medicine|0": 0,
"community|acva:Arabic_Music|0": 0,
"community|acva:Arabic_Ornament|0": 0,
"community|acva:Arabic_Philosophy|0": 0,
"community|acva:Arabic_Physics_and_Chemistry|0": 0,
"community|acva:Arabic_Wedding|0": 0,
"community|acva:Bahrain|0": 0,
"community|acva:Comoros|0": 0,
"community|acva:Egypt_modern|0": 0,
"community|acva:InfluenceFromAncientEgypt|0": 0,
"community|acva:InfluenceFromByzantium|0": 0,
"community|acva:InfluenceFromChina|0": 0,
"community|acva:InfluenceFromGreece|0": 0,
"community|acva:InfluenceFromIslam|0": 0,
"community|acva:InfluenceFromPersia|0": 0,
"community|acva:InfluenceFromRome|0": 0,
"community|acva:Iraq|0": 0,
"community|acva:Islam_Education|0": 0,
"community|acva:Islam_branches_and_schools|0": 0,
"community|acva:Islamic_law_system|0": 0,
"community|acva:Jordan|0": 0,
"community|acva:Kuwait|0": 0,
"community|acva:Lebanon|0": 0,
"community|acva:Libya|0": 0,
"community|acva:Mauritania|0": 0,
"community|acva:Mesopotamia_civilization|0": 0,
"community|acva:Morocco|0": 0,
"community|acva:Oman|0": 0,
"community|acva:Palestine|0": 0,
"community|acva:Qatar|0": 0,
"community|acva:Saudi_Arabia|0": 0,
"community|acva:Somalia|0": 0,
"community|acva:Sudan|0": 0,
"community|acva:Syria|0": 0,
"community|acva:Tunisia|0": 0,
"community|acva:United_Arab_Emirates|0": 0,
"community|acva:Yemen|0": 0,
"community|acva:communication|0": 0,
"community|acva:computer_and_phone|0": 0,
"community|acva:daily_life|0": 0,
"community|acva:entertainment|0": 0,
"community|alghafa:mcq_exams_test_ar|0": 0,
"community|alghafa:meta_ar_dialects|0": 0,
"community|alghafa:meta_ar_msa|0": 0,
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0,
"community|alghafa:multiple_choice_sentiment_task|0": 0,
"community|arabic_exams|0": 0,
"community|arabic_mmlu:abstract_algebra|0": 0,
"community|arabic_mmlu:anatomy|0": 0,
"community|arabic_mmlu:astronomy|0": 0,
"community|arabic_mmlu:business_ethics|0": 0,
"community|arabic_mmlu:clinical_knowledge|0": 0,
"community|arabic_mmlu:college_biology|0": 0,
"community|arabic_mmlu:college_chemistry|0": 0,
"community|arabic_mmlu:college_computer_science|0": 0,
"community|arabic_mmlu:college_mathematics|0": 0,
"community|arabic_mmlu:college_medicine|0": 0,
"community|arabic_mmlu:college_physics|0": 0,
"community|arabic_mmlu:computer_security|0": 0,
"community|arabic_mmlu:conceptual_physics|0": 0,
"community|arabic_mmlu:econometrics|0": 0,
"community|arabic_mmlu:electrical_engineering|0": 0,
"community|arabic_mmlu:elementary_mathematics|0": 0,
"community|arabic_mmlu:formal_logic|0": 0,
"community|arabic_mmlu:global_facts|0": 0,
"community|arabic_mmlu:high_school_biology|0": 0,
"community|arabic_mmlu:high_school_chemistry|0": 0,
"community|arabic_mmlu:high_school_computer_science|0": 0,
"community|arabic_mmlu:high_school_european_history|0": 0,
"community|arabic_mmlu:high_school_geography|0": 0,
"community|arabic_mmlu:high_school_government_and_politics|0": 0,
"community|arabic_mmlu:high_school_macroeconomics|0": 0,
"community|arabic_mmlu:high_school_mathematics|0": 0,
"community|arabic_mmlu:high_school_microeconomics|0": 0,
"community|arabic_mmlu:high_school_physics|0": 0,
"community|arabic_mmlu:high_school_psychology|0": 0,
"community|arabic_mmlu:high_school_statistics|0": 0,
"community|arabic_mmlu:high_school_us_history|0": 0,
"community|arabic_mmlu:high_school_world_history|0": 0,
"community|arabic_mmlu:human_aging|0": 0,
"community|arabic_mmlu:human_sexuality|0": 0,
"community|arabic_mmlu:international_law|0": 0,
"community|arabic_mmlu:jurisprudence|0": 0,
"community|arabic_mmlu:logical_fallacies|0": 0,
"community|arabic_mmlu:machine_learning|0": 0,
"community|arabic_mmlu:management|0": 0,
"community|arabic_mmlu:marketing|0": 0,
"community|arabic_mmlu:medical_genetics|0": 0,
"community|arabic_mmlu:miscellaneous|0": 0,
"community|arabic_mmlu:moral_disputes|0": 0,
"community|arabic_mmlu:moral_scenarios|0": 0,
"community|arabic_mmlu:nutrition|0": 0,
"community|arabic_mmlu:philosophy|0": 0,
"community|arabic_mmlu:prehistory|0": 0,
"community|arabic_mmlu:professional_accounting|0": 0,
"community|arabic_mmlu:professional_law|0": 0,
"community|arabic_mmlu:professional_medicine|0": 0,
"community|arabic_mmlu:professional_psychology|0": 0,
"community|arabic_mmlu:public_relations|0": 0,
"community|arabic_mmlu:security_studies|0": 0,
"community|arabic_mmlu:sociology|0": 0,
"community|arabic_mmlu:us_foreign_policy|0": 0,
"community|arabic_mmlu:virology|0": 0,
"community|arabic_mmlu:world_religions|0": 0,
"community|arc_challenge_okapi_ar|0": 0,
"community|arc_easy_ar|0": 0,
"community|boolq_ar|0": 0,
"community|copa_ext_ar|0": 0,
"community|hellaswag_okapi_ar|0": 0,
"community|openbook_qa_ext_ar|0": 0,
"community|piqa_ar|0": 0,
"community|race_ar|0": 0,
"community|sciq_ar|0": 0,
"community|toxigen_ar|0": 0,
"lighteval|xstory_cloze:ar|0": 0
},
"config_tasks": {
"community|acva:Algeria": {
"name": "acva:Algeria",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Algeria",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Ancient_Egypt": {
"name": "acva:Ancient_Egypt",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Ancient_Egypt",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 315,
"effective_num_docs": 315,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arab_Empire": {
"name": "acva:Arab_Empire",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arab_Empire",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Architecture": {
"name": "acva:Arabic_Architecture",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Architecture",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Art": {
"name": "acva:Arabic_Art",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Art",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Astronomy": {
"name": "acva:Arabic_Astronomy",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Astronomy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Calligraphy": {
"name": "acva:Arabic_Calligraphy",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Calligraphy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 255,
"effective_num_docs": 255,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Ceremony": {
"name": "acva:Arabic_Ceremony",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Ceremony",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 185,
"effective_num_docs": 185,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Clothing": {
"name": "acva:Arabic_Clothing",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Clothing",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Culture": {
"name": "acva:Arabic_Culture",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Culture",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Food": {
"name": "acva:Arabic_Food",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Food",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Funeral": {
"name": "acva:Arabic_Funeral",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Funeral",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 95,
"effective_num_docs": 95,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Geography": {
"name": "acva:Arabic_Geography",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Geography",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_History": {
"name": "acva:Arabic_History",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_History",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Language_Origin": {
"name": "acva:Arabic_Language_Origin",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Language_Origin",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 95,
"effective_num_docs": 95,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Literature": {
"name": "acva:Arabic_Literature",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Literature",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Math": {
"name": "acva:Arabic_Math",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Math",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Medicine": {
"name": "acva:Arabic_Medicine",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Medicine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Music": {
"name": "acva:Arabic_Music",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Music",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 139,
"effective_num_docs": 139,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Ornament": {
"name": "acva:Arabic_Ornament",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Ornament",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Philosophy": {
"name": "acva:Arabic_Philosophy",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Philosophy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Physics_and_Chemistry": {
"name": "acva:Arabic_Physics_and_Chemistry",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Physics_and_Chemistry",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Wedding": {
"name": "acva:Arabic_Wedding",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Wedding",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Bahrain": {
"name": "acva:Bahrain",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Bahrain",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Comoros": {
"name": "acva:Comoros",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Comoros",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Egypt_modern": {
"name": "acva:Egypt_modern",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Egypt_modern",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 95,
"effective_num_docs": 95,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromAncientEgypt": {
"name": "acva:InfluenceFromAncientEgypt",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromAncientEgypt",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromByzantium": {
"name": "acva:InfluenceFromByzantium",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromByzantium",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromChina": {
"name": "acva:InfluenceFromChina",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromChina",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromGreece": {
"name": "acva:InfluenceFromGreece",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromGreece",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromIslam": {
"name": "acva:InfluenceFromIslam",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromIslam",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromPersia": {
"name": "acva:InfluenceFromPersia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromPersia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 175,
"effective_num_docs": 175,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromRome": {
"name": "acva:InfluenceFromRome",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromRome",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Iraq": {
"name": "acva:Iraq",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Iraq",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 85,
"effective_num_docs": 85,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Islam_Education": {
"name": "acva:Islam_Education",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Islam_Education",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Islam_branches_and_schools": {
"name": "acva:Islam_branches_and_schools",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Islam_branches_and_schools",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 175,
"effective_num_docs": 175,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Islamic_law_system": {
"name": "acva:Islamic_law_system",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Islamic_law_system",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Jordan": {
"name": "acva:Jordan",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Jordan",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Kuwait": {
"name": "acva:Kuwait",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Kuwait",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Lebanon": {
"name": "acva:Lebanon",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Lebanon",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Libya": {
"name": "acva:Libya",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Libya",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Mauritania": {
"name": "acva:Mauritania",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Mauritania",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Mesopotamia_civilization": {
"name": "acva:Mesopotamia_civilization",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Mesopotamia_civilization",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 155,
"effective_num_docs": 155,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Morocco": {
"name": "acva:Morocco",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Morocco",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Oman": {
"name": "acva:Oman",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Oman",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Palestine": {
"name": "acva:Palestine",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Palestine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 85,
"effective_num_docs": 85,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Qatar": {
"name": "acva:Qatar",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Qatar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Saudi_Arabia": {
"name": "acva:Saudi_Arabia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Saudi_Arabia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Somalia": {
"name": "acva:Somalia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Somalia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Sudan": {
"name": "acva:Sudan",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Sudan",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Syria": {
"name": "acva:Syria",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Syria",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Tunisia": {
"name": "acva:Tunisia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Tunisia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:United_Arab_Emirates": {
"name": "acva:United_Arab_Emirates",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "United_Arab_Emirates",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 85,
"effective_num_docs": 85,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Yemen": {
"name": "acva:Yemen",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Yemen",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 10,
"effective_num_docs": 10,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:communication": {
"name": "acva:communication",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "communication",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 364,
"effective_num_docs": 364,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:computer_and_phone": {
"name": "acva:computer_and_phone",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "computer_and_phone",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 295,
"effective_num_docs": 295,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:daily_life": {
"name": "acva:daily_life",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "daily_life",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 337,
"effective_num_docs": 337,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:entertainment": {
"name": "acva:entertainment",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "entertainment",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 295,
"effective_num_docs": 295,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:mcq_exams_test_ar": {
"name": "alghafa:mcq_exams_test_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "mcq_exams_test_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 557,
"effective_num_docs": 557,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:meta_ar_dialects": {
"name": "alghafa:meta_ar_dialects",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_dialects",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 5395,
"effective_num_docs": 5395,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:meta_ar_msa": {
"name": "alghafa:meta_ar_msa",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_msa",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": {
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_facts_truefalse_balanced_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 75,
"effective_num_docs": 75,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task": {
"name": "alghafa:multiple_choice_grounded_statement_soqal_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_soqal_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": {
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": {
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 7995,
"effective_num_docs": 7995,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task": {
"name": "alghafa:multiple_choice_rating_sentiment_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 5995,
"effective_num_docs": 5995,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_sentiment_task": {
"name": "alghafa:multiple_choice_sentiment_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_sentiment_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1720,
"effective_num_docs": 1720,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_exams": {
"name": "arabic_exams",
"prompt_function": "arabic_exams",
"hf_repo": "OALL/Arabic_EXAMS",
"hf_subset": "default",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 537,
"effective_num_docs": 537,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:abstract_algebra": {
"name": "arabic_mmlu:abstract_algebra",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "abstract_algebra",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:anatomy": {
"name": "arabic_mmlu:anatomy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "anatomy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 135,
"effective_num_docs": 135,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:astronomy": {
"name": "arabic_mmlu:astronomy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "astronomy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 152,
"effective_num_docs": 152,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:business_ethics": {
"name": "arabic_mmlu:business_ethics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "business_ethics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:clinical_knowledge": {
"name": "arabic_mmlu:clinical_knowledge",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "clinical_knowledge",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_biology": {
"name": "arabic_mmlu:college_biology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_biology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 144,
"effective_num_docs": 144,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_chemistry": {
"name": "arabic_mmlu:college_chemistry",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_chemistry",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_computer_science": {
"name": "arabic_mmlu:college_computer_science",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_computer_science",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_mathematics": {
"name": "arabic_mmlu:college_mathematics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_mathematics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_medicine": {
"name": "arabic_mmlu:college_medicine",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_medicine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 173,
"effective_num_docs": 173,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_physics": {
"name": "arabic_mmlu:college_physics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_physics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:computer_security": {
"name": "arabic_mmlu:computer_security",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "computer_security",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:conceptual_physics": {
"name": "arabic_mmlu:conceptual_physics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "conceptual_physics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 235,
"effective_num_docs": 235,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:econometrics": {
"name": "arabic_mmlu:econometrics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "econometrics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 114,
"effective_num_docs": 114,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:electrical_engineering": {
"name": "arabic_mmlu:electrical_engineering",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "electrical_engineering",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:elementary_mathematics": {
"name": "arabic_mmlu:elementary_mathematics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "elementary_mathematics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 378,
"effective_num_docs": 378,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:formal_logic": {
"name": "arabic_mmlu:formal_logic",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "formal_logic",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 126,
"effective_num_docs": 126,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:global_facts": {
"name": "arabic_mmlu:global_facts",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "global_facts",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_biology": {
"name": "arabic_mmlu:high_school_biology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_biology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 310,
"effective_num_docs": 310,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_chemistry": {
"name": "arabic_mmlu:high_school_chemistry",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_chemistry",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 203,
"effective_num_docs": 203,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_computer_science": {
"name": "arabic_mmlu:high_school_computer_science",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_computer_science",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_european_history": {
"name": "arabic_mmlu:high_school_european_history",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_european_history",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 165,
"effective_num_docs": 165,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_geography": {
"name": "arabic_mmlu:high_school_geography",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_geography",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 198,
"effective_num_docs": 198,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_government_and_politics": {
"name": "arabic_mmlu:high_school_government_and_politics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_government_and_politics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 193,
"effective_num_docs": 193,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_macroeconomics": {
"name": "arabic_mmlu:high_school_macroeconomics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_macroeconomics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 390,
"effective_num_docs": 390,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_mathematics": {
"name": "arabic_mmlu:high_school_mathematics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_mathematics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 270,
"effective_num_docs": 270,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_microeconomics": {
"name": "arabic_mmlu:high_school_microeconomics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_microeconomics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_physics": {
"name": "arabic_mmlu:high_school_physics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_physics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 151,
"effective_num_docs": 151,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_psychology": {
"name": "arabic_mmlu:high_school_psychology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_psychology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 545,
"effective_num_docs": 545,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_statistics": {
"name": "arabic_mmlu:high_school_statistics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_statistics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 216,
"effective_num_docs": 216,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_us_history": {
"name": "arabic_mmlu:high_school_us_history",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_us_history",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 204,
"effective_num_docs": 204,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_world_history": {
"name": "arabic_mmlu:high_school_world_history",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_world_history",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 237,
"effective_num_docs": 237,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:human_aging": {
"name": "arabic_mmlu:human_aging",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "human_aging",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 223,
"effective_num_docs": 223,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:human_sexuality": {
"name": "arabic_mmlu:human_sexuality",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "human_sexuality",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 131,
"effective_num_docs": 131,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:international_law": {
"name": "arabic_mmlu:international_law",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "international_law",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 121,
"effective_num_docs": 121,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:jurisprudence": {
"name": "arabic_mmlu:jurisprudence",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "jurisprudence",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 108,
"effective_num_docs": 108,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:logical_fallacies": {
"name": "arabic_mmlu:logical_fallacies",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "logical_fallacies",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 163,
"effective_num_docs": 163,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:machine_learning": {
"name": "arabic_mmlu:machine_learning",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "machine_learning",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 112,
"effective_num_docs": 112,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:management": {
"name": "arabic_mmlu:management",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "management",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 103,
"effective_num_docs": 103,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:marketing": {
"name": "arabic_mmlu:marketing",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "marketing",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 234,
"effective_num_docs": 234,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:medical_genetics": {
"name": "arabic_mmlu:medical_genetics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "medical_genetics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:miscellaneous": {
"name": "arabic_mmlu:miscellaneous",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "miscellaneous",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 783,
"effective_num_docs": 783,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:moral_disputes": {
"name": "arabic_mmlu:moral_disputes",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "moral_disputes",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 346,
"effective_num_docs": 346,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:moral_scenarios": {
"name": "arabic_mmlu:moral_scenarios",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "moral_scenarios",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:nutrition": {
"name": "arabic_mmlu:nutrition",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "nutrition",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 306,
"effective_num_docs": 306,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:philosophy": {
"name": "arabic_mmlu:philosophy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "philosophy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 311,
"effective_num_docs": 311,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:prehistory": {
"name": "arabic_mmlu:prehistory",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "prehistory",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 324,
"effective_num_docs": 324,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_accounting": {
"name": "arabic_mmlu:professional_accounting",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_accounting",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 282,
"effective_num_docs": 282,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_law": {
"name": "arabic_mmlu:professional_law",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_law",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1534,
"effective_num_docs": 1534,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_medicine": {
"name": "arabic_mmlu:professional_medicine",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_medicine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 272,
"effective_num_docs": 272,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_psychology": {
"name": "arabic_mmlu:professional_psychology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_psychology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:public_relations": {
"name": "arabic_mmlu:public_relations",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "public_relations",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 110,
"effective_num_docs": 110,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:security_studies": {
"name": "arabic_mmlu:security_studies",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "security_studies",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 245,
"effective_num_docs": 245,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:sociology": {
"name": "arabic_mmlu:sociology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "sociology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 201,
"effective_num_docs": 201,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:us_foreign_policy": {
"name": "arabic_mmlu:us_foreign_policy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "us_foreign_policy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:virology": {
"name": "arabic_mmlu:virology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "virology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 166,
"effective_num_docs": 166,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:world_religions": {
"name": "arabic_mmlu:world_religions",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "world_religions",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 171,
"effective_num_docs": 171,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arc_challenge_okapi_ar": {
"name": "arc_challenge_okapi_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "arc_challenge_okapi_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1160,
"effective_num_docs": 1160,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arc_easy_ar": {
"name": "arc_easy_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "arc_easy_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 2364,
"effective_num_docs": 2364,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|boolq_ar": {
"name": "boolq_ar",
"prompt_function": "boolq_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "boolq_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 3260,
"effective_num_docs": 3260,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|copa_ext_ar": {
"name": "copa_ext_ar",
"prompt_function": "copa_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "copa_ext_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 90,
"effective_num_docs": 90,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|hellaswag_okapi_ar": {
"name": "hellaswag_okapi_ar",
"prompt_function": "hellaswag_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "hellaswag_okapi_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 9171,
"effective_num_docs": 9171,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|openbook_qa_ext_ar": {
"name": "openbook_qa_ext_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "openbook_qa_ext_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 495,
"effective_num_docs": 495,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|piqa_ar": {
"name": "piqa_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "piqa_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1833,
"effective_num_docs": 1833,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|race_ar": {
"name": "race_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "race_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 4929,
"effective_num_docs": 4929,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|sciq_ar": {
"name": "sciq_ar",
"prompt_function": "sciq_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "sciq_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 995,
"effective_num_docs": 995,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|toxigen_ar": {
"name": "toxigen_ar",
"prompt_function": "toxigen_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "toxigen_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 935,
"effective_num_docs": 935,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"lighteval|xstory_cloze:ar": {
"name": "xstory_cloze:ar",
"prompt_function": "storycloze",
"hf_repo": "juletxara/xstory_cloze",
"hf_subset": "ar",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"training",
"eval"
],
"evaluation_splits": [
"eval"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 1511,
"effective_num_docs": 1511,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
}
},
"summary_tasks": {
"community|acva:Algeria|0": {
"hashes": {
"hash_examples": "da5a3003cd46f6f9",
"hash_full_prompts": "da5a3003cd46f6f9",
"hash_input_tokens": "b41724a80bf4ff7d",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Ancient_Egypt|0": {
"hashes": {
"hash_examples": "52d6f767fede195b",
"hash_full_prompts": "52d6f767fede195b",
"hash_input_tokens": "a4c13d220aefcf58",
"hash_cont_tokens": "b5152e38187fe33b"
},
"truncated": 0,
"non_truncated": 315,
"padded": 630,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arab_Empire|0": {
"hashes": {
"hash_examples": "8dacff6a79804a75",
"hash_full_prompts": "8dacff6a79804a75",
"hash_input_tokens": "4e18cb1ed3cf5075",
"hash_cont_tokens": "49dc04b00c3eadd7"
},
"truncated": 0,
"non_truncated": 265,
"padded": 530,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Architecture|0": {
"hashes": {
"hash_examples": "df286cd862d9f6bb",
"hash_full_prompts": "df286cd862d9f6bb",
"hash_input_tokens": "ca39559224936509",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Art|0": {
"hashes": {
"hash_examples": "112883d764118a49",
"hash_full_prompts": "112883d764118a49",
"hash_input_tokens": "8aaaec9cd7d15361",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Astronomy|0": {
"hashes": {
"hash_examples": "20dcdf2454bf8671",
"hash_full_prompts": "20dcdf2454bf8671",
"hash_input_tokens": "190551282c3897fd",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Calligraphy|0": {
"hashes": {
"hash_examples": "3a9f9d1ebe868a15",
"hash_full_prompts": "3a9f9d1ebe868a15",
"hash_input_tokens": "76ef27371d2650f6",
"hash_cont_tokens": "1f39edf86e11a588"
},
"truncated": 0,
"non_truncated": 255,
"padded": 510,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Ceremony|0": {
"hashes": {
"hash_examples": "c927630f8d2f44da",
"hash_full_prompts": "c927630f8d2f44da",
"hash_input_tokens": "af044f478ce78780",
"hash_cont_tokens": "fee4ff59b24f917a"
},
"truncated": 0,
"non_truncated": 185,
"padded": 370,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Clothing|0": {
"hashes": {
"hash_examples": "6ad0740c2ac6ac92",
"hash_full_prompts": "6ad0740c2ac6ac92",
"hash_input_tokens": "d176db58b6533d96",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Culture|0": {
"hashes": {
"hash_examples": "2177bd857ad872ae",
"hash_full_prompts": "2177bd857ad872ae",
"hash_input_tokens": "7476b854482d3b1f",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Food|0": {
"hashes": {
"hash_examples": "a6ada65b71d7c9c5",
"hash_full_prompts": "a6ada65b71d7c9c5",
"hash_input_tokens": "39b38316d84cb75a",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Funeral|0": {
"hashes": {
"hash_examples": "fcee39dc29eaae91",
"hash_full_prompts": "fcee39dc29eaae91",
"hash_input_tokens": "c9557117752c0495",
"hash_cont_tokens": "2d8b8db7b4c366ad"
},
"truncated": 0,
"non_truncated": 95,
"padded": 190,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Geography|0": {
"hashes": {
"hash_examples": "d36eda7c89231c02",
"hash_full_prompts": "d36eda7c89231c02",
"hash_input_tokens": "934b7078f90eeb96",
"hash_cont_tokens": "1e71c8793c636601"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_History|0": {
"hashes": {
"hash_examples": "6354ac0d6db6a5fc",
"hash_full_prompts": "6354ac0d6db6a5fc",
"hash_input_tokens": "9420e0e9f0ff069e",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Language_Origin|0": {
"hashes": {
"hash_examples": "ddc967c8aca34402",
"hash_full_prompts": "ddc967c8aca34402",
"hash_input_tokens": "d9392390fe959aff",
"hash_cont_tokens": "2d8b8db7b4c366ad"
},
"truncated": 0,
"non_truncated": 95,
"padded": 190,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Literature|0": {
"hashes": {
"hash_examples": "4305379fd46be5d8",
"hash_full_prompts": "4305379fd46be5d8",
"hash_input_tokens": "336173f497a8e48d",
"hash_cont_tokens": "1e71c8793c636601"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Math|0": {
"hashes": {
"hash_examples": "dec621144f4d28be",
"hash_full_prompts": "dec621144f4d28be",
"hash_input_tokens": "77a166514d7d9576",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Medicine|0": {
"hashes": {
"hash_examples": "2b344cdae9495ff2",
"hash_full_prompts": "2b344cdae9495ff2",
"hash_input_tokens": "8532117a25b57942",
"hash_cont_tokens": "1e71c8793c636601"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Music|0": {
"hashes": {
"hash_examples": "0c54624d881944ce",
"hash_full_prompts": "0c54624d881944ce",
"hash_input_tokens": "e4ceb078920ab360",
"hash_cont_tokens": "81ea5db7e2bb34df"
},
"truncated": 0,
"non_truncated": 139,
"padded": 278,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Ornament|0": {
"hashes": {
"hash_examples": "251a4a84289d8bc1",
"hash_full_prompts": "251a4a84289d8bc1",
"hash_input_tokens": "0843def599ffdcbe",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Philosophy|0": {
"hashes": {
"hash_examples": "3f86fb9c94c13d22",
"hash_full_prompts": "3f86fb9c94c13d22",
"hash_input_tokens": "29af6eaed6acc3e3",
"hash_cont_tokens": "1e71c8793c636601"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Physics_and_Chemistry|0": {
"hashes": {
"hash_examples": "8fec65af3695b62a",
"hash_full_prompts": "8fec65af3695b62a",
"hash_input_tokens": "66b7608a2882eac9",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Wedding|0": {
"hashes": {
"hash_examples": "9cc3477184d7a4b8",
"hash_full_prompts": "9cc3477184d7a4b8",
"hash_input_tokens": "0255ffff4c57cf31",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Bahrain|0": {
"hashes": {
"hash_examples": "c92e803a0fa8b9e2",
"hash_full_prompts": "c92e803a0fa8b9e2",
"hash_input_tokens": "8b66a6459f1bcdb6",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Comoros|0": {
"hashes": {
"hash_examples": "06e5d4bba8e54cae",
"hash_full_prompts": "06e5d4bba8e54cae",
"hash_input_tokens": "0daae945d80a7513",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Egypt_modern|0": {
"hashes": {
"hash_examples": "c6ec369164f93446",
"hash_full_prompts": "c6ec369164f93446",
"hash_input_tokens": "0e425fab0852aa5f",
"hash_cont_tokens": "2d8b8db7b4c366ad"
},
"truncated": 0,
"non_truncated": 95,
"padded": 190,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromAncientEgypt|0": {
"hashes": {
"hash_examples": "b9d56d74818b9bd4",
"hash_full_prompts": "b9d56d74818b9bd4",
"hash_input_tokens": "3f1e0529735d039f",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromByzantium|0": {
"hashes": {
"hash_examples": "5316c9624e7e59b8",
"hash_full_prompts": "5316c9624e7e59b8",
"hash_input_tokens": "113f31ccc7fa38cd",
"hash_cont_tokens": "1e71c8793c636601"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromChina|0": {
"hashes": {
"hash_examples": "87894bce95a56411",
"hash_full_prompts": "87894bce95a56411",
"hash_input_tokens": "20763112a39adaae",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromGreece|0": {
"hashes": {
"hash_examples": "0baa78a27e469312",
"hash_full_prompts": "0baa78a27e469312",
"hash_input_tokens": "815f3c28fbace677",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromIslam|0": {
"hashes": {
"hash_examples": "0c2532cde6541ff2",
"hash_full_prompts": "0c2532cde6541ff2",
"hash_input_tokens": "3180a61f22363b4e",
"hash_cont_tokens": "1e71c8793c636601"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromPersia|0": {
"hashes": {
"hash_examples": "efcd8112dc53c6e5",
"hash_full_prompts": "efcd8112dc53c6e5",
"hash_input_tokens": "f6dc12728cd8d113",
"hash_cont_tokens": "2e4c5353c7da3ce5"
},
"truncated": 0,
"non_truncated": 175,
"padded": 350,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromRome|0": {
"hashes": {
"hash_examples": "9db61480e2e85fd3",
"hash_full_prompts": "9db61480e2e85fd3",
"hash_input_tokens": "9d6b10c2a16942e3",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Iraq|0": {
"hashes": {
"hash_examples": "96dac3dfa8d2f41f",
"hash_full_prompts": "96dac3dfa8d2f41f",
"hash_input_tokens": "a8dd43762b1b7a87",
"hash_cont_tokens": "931e3b1ab5e4bb64"
},
"truncated": 0,
"non_truncated": 85,
"padded": 170,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Islam_Education|0": {
"hashes": {
"hash_examples": "0d80355f6a4cb51b",
"hash_full_prompts": "0d80355f6a4cb51b",
"hash_input_tokens": "11d2bbbf5203d99c",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Islam_branches_and_schools|0": {
"hashes": {
"hash_examples": "5cedce1be2c3ad50",
"hash_full_prompts": "5cedce1be2c3ad50",
"hash_input_tokens": "a30f55caee5dec5f",
"hash_cont_tokens": "2e4c5353c7da3ce5"
},
"truncated": 0,
"non_truncated": 175,
"padded": 350,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Islamic_law_system|0": {
"hashes": {
"hash_examples": "c0e6db8bc84e105e",
"hash_full_prompts": "c0e6db8bc84e105e",
"hash_input_tokens": "3d4f6b54b4064f8b",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Jordan|0": {
"hashes": {
"hash_examples": "33deb5b4e5ddd6a1",
"hash_full_prompts": "33deb5b4e5ddd6a1",
"hash_input_tokens": "4a15d7c7d99c4fe8",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Kuwait|0": {
"hashes": {
"hash_examples": "eb41773346d7c46c",
"hash_full_prompts": "eb41773346d7c46c",
"hash_input_tokens": "e6fd2ecdfbf124e2",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Lebanon|0": {
"hashes": {
"hash_examples": "25932dbf4c13d34f",
"hash_full_prompts": "25932dbf4c13d34f",
"hash_input_tokens": "a25bf61c29654616",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Libya|0": {
"hashes": {
"hash_examples": "f2c4db63cd402926",
"hash_full_prompts": "f2c4db63cd402926",
"hash_input_tokens": "bada3d871ffb75b1",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Mauritania|0": {
"hashes": {
"hash_examples": "8723ab5fdf286b54",
"hash_full_prompts": "8723ab5fdf286b54",
"hash_input_tokens": "87904fc8b261a143",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Mesopotamia_civilization|0": {
"hashes": {
"hash_examples": "c33f5502a6130ca9",
"hash_full_prompts": "c33f5502a6130ca9",
"hash_input_tokens": "2764279e33a2a42d",
"hash_cont_tokens": "bd3cfefdb7f020ea"
},
"truncated": 0,
"non_truncated": 155,
"padded": 310,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Morocco|0": {
"hashes": {
"hash_examples": "588a5ed27904b1ae",
"hash_full_prompts": "588a5ed27904b1ae",
"hash_input_tokens": "2f46b974b38abbaf",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Oman|0": {
"hashes": {
"hash_examples": "d447c52b94248b69",
"hash_full_prompts": "d447c52b94248b69",
"hash_input_tokens": "f7d9f004f469076e",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Palestine|0": {
"hashes": {
"hash_examples": "19197e076ad14ff5",
"hash_full_prompts": "19197e076ad14ff5",
"hash_input_tokens": "e13a47067a0e8e09",
"hash_cont_tokens": "931e3b1ab5e4bb64"
},
"truncated": 0,
"non_truncated": 85,
"padded": 170,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Qatar|0": {
"hashes": {
"hash_examples": "cf0736fa185b28f6",
"hash_full_prompts": "cf0736fa185b28f6",
"hash_input_tokens": "ad051f0eeb374bbe",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Saudi_Arabia|0": {
"hashes": {
"hash_examples": "69beda6e1b85a08d",
"hash_full_prompts": "69beda6e1b85a08d",
"hash_input_tokens": "04e35523eb1f0a18",
"hash_cont_tokens": "d1d4fc99519883fd"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Somalia|0": {
"hashes": {
"hash_examples": "b387940c65784fbf",
"hash_full_prompts": "b387940c65784fbf",
"hash_input_tokens": "74f511f050765514",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Sudan|0": {
"hashes": {
"hash_examples": "e02c32b9d2dd0c3f",
"hash_full_prompts": "e02c32b9d2dd0c3f",
"hash_input_tokens": "d400e74d74cf7fe5",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Syria|0": {
"hashes": {
"hash_examples": "60a6f8fe73bda4bb",
"hash_full_prompts": "60a6f8fe73bda4bb",
"hash_input_tokens": "0a8a9f585d0ab693",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Tunisia|0": {
"hashes": {
"hash_examples": "34bb15d3830c5649",
"hash_full_prompts": "34bb15d3830c5649",
"hash_input_tokens": "ba2428ae3888a3eb",
"hash_cont_tokens": "986cb8561faea726"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:United_Arab_Emirates|0": {
"hashes": {
"hash_examples": "98a0ba78172718ce",
"hash_full_prompts": "98a0ba78172718ce",
"hash_input_tokens": "9fa80e8d1c312de9",
"hash_cont_tokens": "931e3b1ab5e4bb64"
},
"truncated": 0,
"non_truncated": 85,
"padded": 170,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Yemen|0": {
"hashes": {
"hash_examples": "18e9bcccbb4ced7a",
"hash_full_prompts": "18e9bcccbb4ced7a",
"hash_input_tokens": "8c15c4e00000bbbe",
"hash_cont_tokens": "a77a4d1eb7d881fc"
},
"truncated": 0,
"non_truncated": 10,
"padded": 20,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:communication|0": {
"hashes": {
"hash_examples": "9ff28ab5eab5c97b",
"hash_full_prompts": "9ff28ab5eab5c97b",
"hash_input_tokens": "484b9f65989a53d9",
"hash_cont_tokens": "82d3ef8e84dfe821"
},
"truncated": 0,
"non_truncated": 364,
"padded": 728,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:computer_and_phone|0": {
"hashes": {
"hash_examples": "37bac2f086aaf6c2",
"hash_full_prompts": "37bac2f086aaf6c2",
"hash_input_tokens": "305efc5b0d9348fb",
"hash_cont_tokens": "e6d1b9910719fadd"
},
"truncated": 0,
"non_truncated": 295,
"padded": 590,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:daily_life|0": {
"hashes": {
"hash_examples": "bf07363c1c252e2f",
"hash_full_prompts": "bf07363c1c252e2f",
"hash_input_tokens": "2b3e9e36618723f7",
"hash_cont_tokens": "9fae0ef7e881352e"
},
"truncated": 0,
"non_truncated": 337,
"padded": 674,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:entertainment|0": {
"hashes": {
"hash_examples": "37077bc00f0ac56a",
"hash_full_prompts": "37077bc00f0ac56a",
"hash_input_tokens": "7aa1d037397aa921",
"hash_cont_tokens": "e6d1b9910719fadd"
},
"truncated": 0,
"non_truncated": 295,
"padded": 590,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:mcq_exams_test_ar|0": {
"hashes": {
"hash_examples": "c07a5e78c5c0b8fe",
"hash_full_prompts": "c07a5e78c5c0b8fe",
"hash_input_tokens": "69c6ce7cc6f193ee",
"hash_cont_tokens": "b9713cde2c723199"
},
"truncated": 0,
"non_truncated": 557,
"padded": 2228,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_dialects|0": {
"hashes": {
"hash_examples": "c0b6081f83e14064",
"hash_full_prompts": "c0b6081f83e14064",
"hash_input_tokens": "eb57e93b4be16d62",
"hash_cont_tokens": "5c353e7543420e42"
},
"truncated": 0,
"non_truncated": 5395,
"padded": 21580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_msa|0": {
"hashes": {
"hash_examples": "64eb78a7c5b7484b",
"hash_full_prompts": "64eb78a7c5b7484b",
"hash_input_tokens": "7410f66b1ebf77ca",
"hash_cont_tokens": "7a45a4a7a6f4cb76"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"hashes": {
"hash_examples": "54fc3502c1c02c06",
"hash_full_prompts": "54fc3502c1c02c06",
"hash_input_tokens": "2c01b14091d1a8b9",
"hash_cont_tokens": "64d06418c9e36ee6"
},
"truncated": 0,
"non_truncated": 75,
"padded": 150,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"hashes": {
"hash_examples": "46572d83696552ae",
"hash_full_prompts": "46572d83696552ae",
"hash_input_tokens": "43678f078f2ba1ee",
"hash_cont_tokens": "d736202962d745cc"
},
"truncated": 5,
"non_truncated": 145,
"padded": 745,
"non_padded": 5,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"hashes": {
"hash_examples": "f430d97ff715bc1c",
"hash_full_prompts": "f430d97ff715bc1c",
"hash_input_tokens": "5b7ec89122551121",
"hash_cont_tokens": "acce08d807c3c053"
},
"truncated": 0,
"non_truncated": 150,
"padded": 750,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"hashes": {
"hash_examples": "6b70a7416584f98c",
"hash_full_prompts": "6b70a7416584f98c",
"hash_input_tokens": "abd50c5152a48b67",
"hash_cont_tokens": "02b5b2bdd50d0efe"
},
"truncated": 0,
"non_truncated": 7995,
"padded": 15990,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"hashes": {
"hash_examples": "bc2005cc9d2f436e",
"hash_full_prompts": "bc2005cc9d2f436e",
"hash_input_tokens": "7b2976baede1d4c1",
"hash_cont_tokens": "c96c231131cced9f"
},
"truncated": 0,
"non_truncated": 5995,
"padded": 17985,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"hashes": {
"hash_examples": "6fb0e254ea5945d8",
"hash_full_prompts": "6fb0e254ea5945d8",
"hash_input_tokens": "0dda31b5320b4279",
"hash_cont_tokens": "7dc2ba3f89d37012"
},
"truncated": 0,
"non_truncated": 1720,
"padded": 5160,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_exams|0": {
"hashes": {
"hash_examples": "6d721df351722656",
"hash_full_prompts": "6d721df351722656",
"hash_input_tokens": "4b60265927690100",
"hash_cont_tokens": "f0510f6022272e8b"
},
"truncated": 0,
"non_truncated": 537,
"padded": 2148,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:abstract_algebra|0": {
"hashes": {
"hash_examples": "f2ddca8f45c0a511",
"hash_full_prompts": "f2ddca8f45c0a511",
"hash_input_tokens": "42ee2c82713ff6ba",
"hash_cont_tokens": "796231ec04ed60fb"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:anatomy|0": {
"hashes": {
"hash_examples": "dfdbc1b83107668d",
"hash_full_prompts": "dfdbc1b83107668d",
"hash_input_tokens": "ca12b9774c0ae395",
"hash_cont_tokens": "9de4c5466d1e0484"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:astronomy|0": {
"hashes": {
"hash_examples": "9736a606002a848e",
"hash_full_prompts": "9736a606002a848e",
"hash_input_tokens": "5317974b9444fd9e",
"hash_cont_tokens": "11048c401bc70638"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:business_ethics|0": {
"hashes": {
"hash_examples": "735e452fbb6dc63d",
"hash_full_prompts": "735e452fbb6dc63d",
"hash_input_tokens": "0b1ad13a25f8b8cc",
"hash_cont_tokens": "796231ec04ed60fb"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:clinical_knowledge|0": {
"hashes": {
"hash_examples": "6ab0ca4da98aedcf",
"hash_full_prompts": "6ab0ca4da98aedcf",
"hash_input_tokens": "98161d41e3649855",
"hash_cont_tokens": "73dcc694a51e0a83"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1060,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_biology|0": {
"hashes": {
"hash_examples": "17e4e390848018a4",
"hash_full_prompts": "17e4e390848018a4",
"hash_input_tokens": "2d4e13408ec2409d",
"hash_cont_tokens": "29c9dc3aa97775a1"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_chemistry|0": {
"hashes": {
"hash_examples": "4abb169f6dfd234b",
"hash_full_prompts": "4abb169f6dfd234b",
"hash_input_tokens": "55f1f68c6e76e362",
"hash_cont_tokens": "796231ec04ed60fb"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_computer_science|0": {
"hashes": {
"hash_examples": "a369e2e941358a1e",
"hash_full_prompts": "a369e2e941358a1e",
"hash_input_tokens": "e165f70ecc346a03",
"hash_cont_tokens": "796231ec04ed60fb"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_mathematics|0": {
"hashes": {
"hash_examples": "d7be03b8b6020bff",
"hash_full_prompts": "d7be03b8b6020bff",
"hash_input_tokens": "7a08778730b4efe9",
"hash_cont_tokens": "796231ec04ed60fb"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_medicine|0": {
"hashes": {
"hash_examples": "0518a00f097346bf",
"hash_full_prompts": "0518a00f097346bf",
"hash_input_tokens": "630049b8937fe94d",
"hash_cont_tokens": "435d65a5ab9a93f9"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_physics|0": {
"hashes": {
"hash_examples": "5d842cd49bc70e12",
"hash_full_prompts": "5d842cd49bc70e12",
"hash_input_tokens": "1a2a3118776794ff",
"hash_cont_tokens": "f0082ed8c18436ee"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:computer_security|0": {
"hashes": {
"hash_examples": "8e85d9f85be9b32f",
"hash_full_prompts": "8e85d9f85be9b32f",
"hash_input_tokens": "05f287fa101c5ba4",
"hash_cont_tokens": "796231ec04ed60fb"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:conceptual_physics|0": {
"hashes": {
"hash_examples": "7964b55a0a49502b",
"hash_full_prompts": "7964b55a0a49502b",
"hash_input_tokens": "eee6e59ffa87c354",
"hash_cont_tokens": "dda0def6532b6f5e"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:econometrics|0": {
"hashes": {
"hash_examples": "1e192eae38347257",
"hash_full_prompts": "1e192eae38347257",
"hash_input_tokens": "0e716a324a44de6e",
"hash_cont_tokens": "60a6dc2b5566a21a"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:electrical_engineering|0": {
"hashes": {
"hash_examples": "cf97671d5c441da1",
"hash_full_prompts": "cf97671d5c441da1",
"hash_input_tokens": "eb80165b556ee54b",
"hash_cont_tokens": "695151de2055c453"
},
"truncated": 0,
"non_truncated": 145,
"padded": 580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:elementary_mathematics|0": {
"hashes": {
"hash_examples": "6f49107ed43c40c5",
"hash_full_prompts": "6f49107ed43c40c5",
"hash_input_tokens": "12fa85fe510031a8",
"hash_cont_tokens": "d088855d2ff4275f"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:formal_logic|0": {
"hashes": {
"hash_examples": "7922c376008ba77b",
"hash_full_prompts": "7922c376008ba77b",
"hash_input_tokens": "5bafb36723c936d4",
"hash_cont_tokens": "4fc998c4a7ceb190"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:global_facts|0": {
"hashes": {
"hash_examples": "11f9813185047d5b",
"hash_full_prompts": "11f9813185047d5b",
"hash_input_tokens": "631ae46a71f88faa",
"hash_cont_tokens": "796231ec04ed60fb"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_biology|0": {
"hashes": {
"hash_examples": "2a804b1d90cbe66e",
"hash_full_prompts": "2a804b1d90cbe66e",
"hash_input_tokens": "a92c84fe1857ea53",
"hash_cont_tokens": "a6f53f8daa468618"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_chemistry|0": {
"hashes": {
"hash_examples": "0032168adabc53b4",
"hash_full_prompts": "0032168adabc53b4",
"hash_input_tokens": "d0a268c1e9055acf",
"hash_cont_tokens": "507a5f3e551886f0"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_computer_science|0": {
"hashes": {
"hash_examples": "f2fb8740f9df980f",
"hash_full_prompts": "f2fb8740f9df980f",
"hash_input_tokens": "cddcc127696ce8df",
"hash_cont_tokens": "796231ec04ed60fb"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_european_history|0": {
"hashes": {
"hash_examples": "73509021e7e66435",
"hash_full_prompts": "73509021e7e66435",
"hash_input_tokens": "3fcff6ce26800f62",
"hash_cont_tokens": "bcae125cb9fb2d84"
},
"truncated": 8,
"non_truncated": 157,
"padded": 652,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_geography|0": {
"hashes": {
"hash_examples": "9e08d1894940ff42",
"hash_full_prompts": "9e08d1894940ff42",
"hash_input_tokens": "0bffa1bee1a49b5d",
"hash_cont_tokens": "313f0999303697bc"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_government_and_politics|0": {
"hashes": {
"hash_examples": "64b7e97817ca6c76",
"hash_full_prompts": "64b7e97817ca6c76",
"hash_input_tokens": "b74b0a509328138a",
"hash_cont_tokens": "3b2eb6b44a2eb6ae"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_macroeconomics|0": {
"hashes": {
"hash_examples": "9f582da8534bd2ef",
"hash_full_prompts": "9f582da8534bd2ef",
"hash_input_tokens": "ee7fe4315c3b1c97",
"hash_cont_tokens": "9e02fba0cd4b4c1d"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_mathematics|0": {
"hashes": {
"hash_examples": "fd54f1c10d423c51",
"hash_full_prompts": "fd54f1c10d423c51",
"hash_input_tokens": "1ee9a4a575cd2f8e",
"hash_cont_tokens": "d654e1bc3b1c3084"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_microeconomics|0": {
"hashes": {
"hash_examples": "7037896925aaf42f",
"hash_full_prompts": "7037896925aaf42f",
"hash_input_tokens": "f822820d17d458d2",
"hash_cont_tokens": "b8c153288ca79098"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_physics|0": {
"hashes": {
"hash_examples": "60c3776215167dae",
"hash_full_prompts": "60c3776215167dae",
"hash_input_tokens": "bf67011e13f94350",
"hash_cont_tokens": "d0c9d5d039dc341f"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_psychology|0": {
"hashes": {
"hash_examples": "61176bfd5da1298f",
"hash_full_prompts": "61176bfd5da1298f",
"hash_input_tokens": "3b165959b3d01b68",
"hash_cont_tokens": "27cc5c748fde016d"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_statistics|0": {
"hashes": {
"hash_examples": "40dfeebd1ea10f76",
"hash_full_prompts": "40dfeebd1ea10f76",
"hash_input_tokens": "645ed107301351a5",
"hash_cont_tokens": "2abbeeaf504fff3d"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_us_history|0": {
"hashes": {
"hash_examples": "03daa510ba917f4d",
"hash_full_prompts": "03daa510ba917f4d",
"hash_input_tokens": "e5e76c5075bd1010",
"hash_cont_tokens": "5306022715bd51cb"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_world_history|0": {
"hashes": {
"hash_examples": "be075ffd579f43c2",
"hash_full_prompts": "be075ffd579f43c2",
"hash_input_tokens": "52e507b2495e8a93",
"hash_cont_tokens": "5b9b218a43c02319"
},
"truncated": 12,
"non_truncated": 225,
"padded": 936,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:human_aging|0": {
"hashes": {
"hash_examples": "caa5b69f640bd1ef",
"hash_full_prompts": "caa5b69f640bd1ef",
"hash_input_tokens": "ca3c2139d6e25a3d",
"hash_cont_tokens": "ee65c8985e64846f"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:human_sexuality|0": {
"hashes": {
"hash_examples": "5ed2e38fb25a3767",
"hash_full_prompts": "5ed2e38fb25a3767",
"hash_input_tokens": "f2c111c7f565aa25",
"hash_cont_tokens": "1b3af67dcfd6a12e"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:international_law|0": {
"hashes": {
"hash_examples": "4e3e9e28d1b96484",
"hash_full_prompts": "4e3e9e28d1b96484",
"hash_input_tokens": "b52345a1ec5a3270",
"hash_cont_tokens": "e30a221db3c7c99c"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:jurisprudence|0": {
"hashes": {
"hash_examples": "e264b755366310b3",
"hash_full_prompts": "e264b755366310b3",
"hash_input_tokens": "27fcf545bf4e0b77",
"hash_cont_tokens": "9355dd1aca613494"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:logical_fallacies|0": {
"hashes": {
"hash_examples": "a4ab6965a3e38071",
"hash_full_prompts": "a4ab6965a3e38071",
"hash_input_tokens": "4b1f9f77eaf7844f",
"hash_cont_tokens": "8597746dc4c0ee95"
},
"truncated": 0,
"non_truncated": 163,
"padded": 652,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:machine_learning|0": {
"hashes": {
"hash_examples": "b92320efa6636b40",
"hash_full_prompts": "b92320efa6636b40",
"hash_input_tokens": "8f7ae2ae8f7aafd0",
"hash_cont_tokens": "bcd57eca4368fcf7"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:management|0": {
"hashes": {
"hash_examples": "c9ee4872a850fe20",
"hash_full_prompts": "c9ee4872a850fe20",
"hash_input_tokens": "4d0adbd0d1550144",
"hash_cont_tokens": "3dc490f84ccb3429"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:marketing|0": {
"hashes": {
"hash_examples": "0c151b70f6a047e3",
"hash_full_prompts": "0c151b70f6a047e3",
"hash_input_tokens": "d34e1c13f45a921c",
"hash_cont_tokens": "242ab855355506e2"
},
"truncated": 0,
"non_truncated": 234,
"padded": 936,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:medical_genetics|0": {
"hashes": {
"hash_examples": "513f6cb8fca3a24e",
"hash_full_prompts": "513f6cb8fca3a24e",
"hash_input_tokens": "2b432f476f3229be",
"hash_cont_tokens": "796231ec04ed60fb"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:miscellaneous|0": {
"hashes": {
"hash_examples": "259a190d635331db",
"hash_full_prompts": "259a190d635331db",
"hash_input_tokens": "c0538ad6ae8e5594",
"hash_cont_tokens": "12f355d006ae696b"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3132,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:moral_disputes|0": {
"hashes": {
"hash_examples": "b85052c48a0b7bc3",
"hash_full_prompts": "b85052c48a0b7bc3",
"hash_input_tokens": "82ee524ba99360d9",
"hash_cont_tokens": "31aedc12c5ed8257"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1384,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:moral_scenarios|0": {
"hashes": {
"hash_examples": "28d0b069ef00dd00",
"hash_full_prompts": "28d0b069ef00dd00",
"hash_input_tokens": "0c7a2b34d9bc1716",
"hash_cont_tokens": "e607fafd3ed07f88"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:nutrition|0": {
"hashes": {
"hash_examples": "00c9bc5f1d305b2f",
"hash_full_prompts": "00c9bc5f1d305b2f",
"hash_input_tokens": "02dc6df4c5ef0d7b",
"hash_cont_tokens": "78dcb7475cbe2637"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:philosophy|0": {
"hashes": {
"hash_examples": "a458c08454a3fd5f",
"hash_full_prompts": "a458c08454a3fd5f",
"hash_input_tokens": "cd6e7b442e655fb7",
"hash_cont_tokens": "d41615c8a1cb0342"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1244,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:prehistory|0": {
"hashes": {
"hash_examples": "d6a0ecbdbb670e9c",
"hash_full_prompts": "d6a0ecbdbb670e9c",
"hash_input_tokens": "465b22a09952d7d0",
"hash_cont_tokens": "76ee98644fc72db9"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_accounting|0": {
"hashes": {
"hash_examples": "b4a95fe480b6540e",
"hash_full_prompts": "b4a95fe480b6540e",
"hash_input_tokens": "8afbc44ea0a26213",
"hash_cont_tokens": "8b55a34060a829d6"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1128,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_law|0": {
"hashes": {
"hash_examples": "c2be9651cdbdde3b",
"hash_full_prompts": "c2be9651cdbdde3b",
"hash_input_tokens": "28f63c9903d1abb4",
"hash_cont_tokens": "7128701f90aeaded"
},
"truncated": 44,
"non_truncated": 1490,
"padded": 6092,
"non_padded": 44,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_medicine|0": {
"hashes": {
"hash_examples": "26ce92416288f273",
"hash_full_prompts": "26ce92416288f273",
"hash_input_tokens": "b48c06f1600a8253",
"hash_cont_tokens": "ab8311d41572ad1c"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_psychology|0": {
"hashes": {
"hash_examples": "71ea5f182ea9a641",
"hash_full_prompts": "71ea5f182ea9a641",
"hash_input_tokens": "898b378138717bf8",
"hash_cont_tokens": "88f4875fad1dd559"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2448,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:public_relations|0": {
"hashes": {
"hash_examples": "125adc21f91f8d77",
"hash_full_prompts": "125adc21f91f8d77",
"hash_input_tokens": "d7b6ab48c909105c",
"hash_cont_tokens": "c2cc499a14292f15"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:security_studies|0": {
"hashes": {
"hash_examples": "3c18b216c099fb26",
"hash_full_prompts": "3c18b216c099fb26",
"hash_input_tokens": "9af698937964f56d",
"hash_cont_tokens": "cdbf7823b1e03cee"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:sociology|0": {
"hashes": {
"hash_examples": "3f2a9634cef7417d",
"hash_full_prompts": "3f2a9634cef7417d",
"hash_input_tokens": "96eb3b3e266dd7bf",
"hash_cont_tokens": "9666e8b60d1ccbb0"
},
"truncated": 0,
"non_truncated": 201,
"padded": 804,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:us_foreign_policy|0": {
"hashes": {
"hash_examples": "22249da54056475e",
"hash_full_prompts": "22249da54056475e",
"hash_input_tokens": "4310f3c37578a8c4",
"hash_cont_tokens": "796231ec04ed60fb"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:virology|0": {
"hashes": {
"hash_examples": "9d194b9471dc624e",
"hash_full_prompts": "9d194b9471dc624e",
"hash_input_tokens": "9d6fe65a2b111615",
"hash_cont_tokens": "87d3f57242385be9"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:world_religions|0": {
"hashes": {
"hash_examples": "229e5fe50082b064",
"hash_full_prompts": "229e5fe50082b064",
"hash_input_tokens": "d334d418bcafc6ea",
"hash_cont_tokens": "75f830ad940081f6"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arc_challenge_okapi_ar|0": {
"hashes": {
"hash_examples": "ab893807673bc355",
"hash_full_prompts": "ab893807673bc355",
"hash_input_tokens": "fafde20317da89d8",
"hash_cont_tokens": "b9e3d72c576230f5"
},
"truncated": 0,
"non_truncated": 1160,
"padded": 4640,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arc_easy_ar|0": {
"hashes": {
"hash_examples": "acb688624acc3d04",
"hash_full_prompts": "acb688624acc3d04",
"hash_input_tokens": "c54c64494152d6a5",
"hash_cont_tokens": "e08bae51e68e4ea4"
},
"truncated": 0,
"non_truncated": 2364,
"padded": 9455,
"non_padded": 1,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|boolq_ar|0": {
"hashes": {
"hash_examples": "48355a67867e0c32",
"hash_full_prompts": "48355a67867e0c32",
"hash_input_tokens": "9ab0fca83ef7c44c",
"hash_cont_tokens": "1ce0dabeb5e3ffd5"
},
"truncated": 18,
"non_truncated": 3242,
"padded": 6500,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|copa_ext_ar|0": {
"hashes": {
"hash_examples": "9bb83301bb72eecf",
"hash_full_prompts": "9bb83301bb72eecf",
"hash_input_tokens": "7191f8e046b05460",
"hash_cont_tokens": "c9978194b1f0f4a1"
},
"truncated": 0,
"non_truncated": 90,
"padded": 180,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|hellaswag_okapi_ar|0": {
"hashes": {
"hash_examples": "6e8cf57a322dfadd",
"hash_full_prompts": "6e8cf57a322dfadd",
"hash_input_tokens": "7df920d0119dbc6f",
"hash_cont_tokens": "9908f4fc3f6d24df"
},
"truncated": 0,
"non_truncated": 9171,
"padded": 36656,
"non_padded": 28,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|openbook_qa_ext_ar|0": {
"hashes": {
"hash_examples": "923d41eb0aca93eb",
"hash_full_prompts": "923d41eb0aca93eb",
"hash_input_tokens": "008f45510cc326d2",
"hash_cont_tokens": "8769c0ea8fac7037"
},
"truncated": 0,
"non_truncated": 495,
"padded": 1968,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|piqa_ar|0": {
"hashes": {
"hash_examples": "94bc205a520d3ea0",
"hash_full_prompts": "94bc205a520d3ea0",
"hash_input_tokens": "704d11d6b3f7590d",
"hash_cont_tokens": "a965652e2ebb7c30"
},
"truncated": 10,
"non_truncated": 1823,
"padded": 3639,
"non_padded": 27,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|race_ar|0": {
"hashes": {
"hash_examples": "de65130bae647516",
"hash_full_prompts": "de65130bae647516",
"hash_input_tokens": "b8eb6e669fd6db2d",
"hash_cont_tokens": "f45444e1958bf420"
},
"truncated": 3441,
"non_truncated": 1488,
"padded": 16259,
"non_padded": 3457,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|sciq_ar|0": {
"hashes": {
"hash_examples": "dbf0bf1b7eaad255",
"hash_full_prompts": "dbf0bf1b7eaad255",
"hash_input_tokens": "2dd7183fa5a1f89a",
"hash_cont_tokens": "24a4ec4eb7db3012"
},
"truncated": 44,
"non_truncated": 951,
"padded": 3926,
"non_padded": 54,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|toxigen_ar|0": {
"hashes": {
"hash_examples": "1e139513004a9a2e",
"hash_full_prompts": "1e139513004a9a2e",
"hash_input_tokens": "8569475e882dc8b9",
"hash_cont_tokens": "ca8ced1bfaf0a5e0"
},
"truncated": 0,
"non_truncated": 935,
"padded": 1862,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|xstory_cloze:ar|0": {
"hashes": {
"hash_examples": "865426a22c787481",
"hash_full_prompts": "865426a22c787481",
"hash_input_tokens": "ac4a5face409471c",
"hash_cont_tokens": "061cd8fe8a7855aa"
},
"truncated": 0,
"non_truncated": 1511,
"padded": 2986,
"non_padded": 36,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "5024dd2ff6cede4d",
"hash_full_prompts": "5024dd2ff6cede4d",
"hash_input_tokens": "a6e41d9d0f44663a",
"hash_cont_tokens": "639d39f5fcc3b4d1"
},
"truncated": 3582,
"non_truncated": 69382,
"padded": 231911,
"non_padded": 3712,
"num_truncated_few_shots": 0
}
}