Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
results / MTSAIR /multi_verse_model /results_2024-05-25T20-44-16.132756.json
Hamza-Alobeidli's picture
Updating model MTSAIR/multi_verse_model
e18ea61 verified
raw
history blame
194 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 511.010693648,
"end_time": 8403.985433605,
"total_evaluation_time_secondes": "7892.974739957",
"model_name": "MTSAIR/multi_verse_model",
"model_sha": "a4ca706d1bbc263b95e223a80ad68b0f125840b3",
"model_dtype": "torch.bfloat16",
"model_size": "13.99 GB",
"config": null
},
"results": {
"community|acva:Algeria|0": {
"acc_norm": 0.6,
"acc_norm_stderr": 0.035172622905632896
},
"community|acva:Ancient_Egypt|0": {
"acc_norm": 0.6888888888888889,
"acc_norm_stderr": 0.026125675418954497
},
"community|acva:Arab_Empire|0": {
"acc_norm": 0.5509433962264151,
"acc_norm_stderr": 0.030612730713641092
},
"community|acva:Arabic_Architecture|0": {
"acc_norm": 0.6205128205128205,
"acc_norm_stderr": 0.03483959266365359
},
"community|acva:Arabic_Art|0": {
"acc_norm": 0.6358974358974359,
"acc_norm_stderr": 0.034546538677863885
},
"community|acva:Arabic_Astronomy|0": {
"acc_norm": 0.4717948717948718,
"acc_norm_stderr": 0.035840746749208334
},
"community|acva:Arabic_Calligraphy|0": {
"acc_norm": 0.7137254901960784,
"acc_norm_stderr": 0.028362211110152732
},
"community|acva:Arabic_Ceremony|0": {
"acc_norm": 0.6864864864864865,
"acc_norm_stderr": 0.03420071750756413
},
"community|acva:Arabic_Clothing|0": {
"acc_norm": 0.6205128205128205,
"acc_norm_stderr": 0.03483959266365359
},
"community|acva:Arabic_Culture|0": {
"acc_norm": 0.6820512820512821,
"acc_norm_stderr": 0.03343383454355787
},
"community|acva:Arabic_Food|0": {
"acc_norm": 0.6205128205128205,
"acc_norm_stderr": 0.03483959266365358
},
"community|acva:Arabic_Funeral|0": {
"acc_norm": 0.8105263157894737,
"acc_norm_stderr": 0.04041979281351405
},
"community|acva:Arabic_Geography|0": {
"acc_norm": 0.593103448275862,
"acc_norm_stderr": 0.04093793981266237
},
"community|acva:Arabic_History|0": {
"acc_norm": 0.5230769230769231,
"acc_norm_stderr": 0.0358596530894741
},
"community|acva:Arabic_Language_Origin|0": {
"acc_norm": 0.7157894736842105,
"acc_norm_stderr": 0.046520974798961987
},
"community|acva:Arabic_Literature|0": {
"acc_norm": 0.7103448275862069,
"acc_norm_stderr": 0.03780019230438014
},
"community|acva:Arabic_Math|0": {
"acc_norm": 0.4461538461538462,
"acc_norm_stderr": 0.03568913546569233
},
"community|acva:Arabic_Medicine|0": {
"acc_norm": 0.7103448275862069,
"acc_norm_stderr": 0.037800192304380135
},
"community|acva:Arabic_Music|0": {
"acc_norm": 0.7482014388489209,
"acc_norm_stderr": 0.03694846055443905
},
"community|acva:Arabic_Ornament|0": {
"acc_norm": 0.7743589743589744,
"acc_norm_stderr": 0.030010921825357008
},
"community|acva:Arabic_Philosophy|0": {
"acc_norm": 0.7448275862068966,
"acc_norm_stderr": 0.03632984052707842
},
"community|acva:Arabic_Physics_and_Chemistry|0": {
"acc_norm": 0.6102564102564103,
"acc_norm_stderr": 0.0350142477625637
},
"community|acva:Arabic_Wedding|0": {
"acc_norm": 0.7435897435897436,
"acc_norm_stderr": 0.03134970994274493
},
"community|acva:Bahrain|0": {
"acc_norm": 0.6888888888888889,
"acc_norm_stderr": 0.06979205927323111
},
"community|acva:Comoros|0": {
"acc_norm": 0.4666666666666667,
"acc_norm_stderr": 0.0752101433090355
},
"community|acva:Egypt_modern|0": {
"acc_norm": 0.6526315789473685,
"acc_norm_stderr": 0.04910947400776659
},
"community|acva:InfluenceFromAncientEgypt|0": {
"acc_norm": 0.8102564102564103,
"acc_norm_stderr": 0.0281510066376941
},
"community|acva:InfluenceFromByzantium|0": {
"acc_norm": 0.7517241379310344,
"acc_norm_stderr": 0.0360010569272777
},
"community|acva:InfluenceFromChina|0": {
"acc_norm": 0.28205128205128205,
"acc_norm_stderr": 0.03230798601799115
},
"community|acva:InfluenceFromGreece|0": {
"acc_norm": 0.8153846153846154,
"acc_norm_stderr": 0.027855716655754165
},
"community|acva:InfluenceFromIslam|0": {
"acc_norm": 0.8620689655172413,
"acc_norm_stderr": 0.02873563218390807
},
"community|acva:InfluenceFromPersia|0": {
"acc_norm": 0.7657142857142857,
"acc_norm_stderr": 0.032109360396926204
},
"community|acva:InfluenceFromRome|0": {
"acc_norm": 0.6615384615384615,
"acc_norm_stderr": 0.033972800327340937
},
"community|acva:Iraq|0": {
"acc_norm": 0.7058823529411765,
"acc_norm_stderr": 0.04971495616050099
},
"community|acva:Islam_Education|0": {
"acc_norm": 0.8,
"acc_norm_stderr": 0.028718326344709492
},
"community|acva:Islam_branches_and_schools|0": {
"acc_norm": 0.7257142857142858,
"acc_norm_stderr": 0.033822819375172945
},
"community|acva:Islamic_law_system|0": {
"acc_norm": 0.7794871794871795,
"acc_norm_stderr": 0.02976600466164412
},
"community|acva:Jordan|0": {
"acc_norm": 0.5777777777777777,
"acc_norm_stderr": 0.07446027270295805
},
"community|acva:Kuwait|0": {
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.06267511942419628
},
"community|acva:Lebanon|0": {
"acc_norm": 0.6,
"acc_norm_stderr": 0.07385489458759964
},
"community|acva:Libya|0": {
"acc_norm": 0.6444444444444445,
"acc_norm_stderr": 0.07216392363431012
},
"community|acva:Mauritania|0": {
"acc_norm": 0.7333333333333333,
"acc_norm_stderr": 0.06666666666666668
},
"community|acva:Mesopotamia_civilization|0": {
"acc_norm": 0.6580645161290323,
"acc_norm_stderr": 0.038224865159988686
},
"community|acva:Morocco|0": {
"acc_norm": 0.6888888888888889,
"acc_norm_stderr": 0.06979205927323111
},
"community|acva:Oman|0": {
"acc_norm": 0.6888888888888889,
"acc_norm_stderr": 0.06979205927323111
},
"community|acva:Palestine|0": {
"acc_norm": 0.6,
"acc_norm_stderr": 0.05345224838248487
},
"community|acva:Qatar|0": {
"acc_norm": 0.7111111111111111,
"acc_norm_stderr": 0.06832943242540508
},
"community|acva:Saudi_Arabia|0": {
"acc_norm": 0.6102564102564103,
"acc_norm_stderr": 0.035014247762563705
},
"community|acva:Somalia|0": {
"acc_norm": 0.6222222222222222,
"acc_norm_stderr": 0.07309112127323451
},
"community|acva:Sudan|0": {
"acc_norm": 0.6444444444444445,
"acc_norm_stderr": 0.07216392363431014
},
"community|acva:Syria|0": {
"acc_norm": 0.7555555555555555,
"acc_norm_stderr": 0.06478835438716998
},
"community|acva:Tunisia|0": {
"acc_norm": 0.6444444444444445,
"acc_norm_stderr": 0.07216392363431012
},
"community|acva:United_Arab_Emirates|0": {
"acc_norm": 0.5647058823529412,
"acc_norm_stderr": 0.05409572080481032
},
"community|acva:Yemen|0": {
"acc_norm": 0.4,
"acc_norm_stderr": 0.16329931618554522
},
"community|acva:communication|0": {
"acc_norm": 0.5054945054945055,
"acc_norm_stderr": 0.026241609463663662
},
"community|acva:computer_and_phone|0": {
"acc_norm": 0.6203389830508474,
"acc_norm_stderr": 0.02830341732920573
},
"community|acva:daily_life|0": {
"acc_norm": 0.7863501483679525,
"acc_norm_stderr": 0.022360918855282654
},
"community|acva:entertainment|0": {
"acc_norm": 0.6508474576271186,
"acc_norm_stderr": 0.02780184037657597
},
"community|alghafa:mcq_exams_test_ar|0": {
"acc_norm": 0.3231597845601436,
"acc_norm_stderr": 0.019834183948008267
},
"community|alghafa:meta_ar_dialects|0": {
"acc_norm": 0.3238183503243744,
"acc_norm_stderr": 0.006371283751353568
},
"community|alghafa:meta_ar_msa|0": {
"acc_norm": 0.39329608938547483,
"acc_norm_stderr": 0.016337268694270105
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"acc_norm": 0.52,
"acc_norm_stderr": 0.05807730170189531
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"acc_norm": 0.6266666666666667,
"acc_norm_stderr": 0.039625389762066365
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"acc_norm": 0.5066666666666667,
"acc_norm_stderr": 0.040957954833356194
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"acc_norm": 0.8213883677298311,
"acc_norm_stderr": 0.0042839764210925455
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"acc_norm": 0.49040867389491244,
"acc_norm_stderr": 0.006457013817394874
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"acc_norm": 0.42209302325581394,
"acc_norm_stderr": 0.011912286151621005
},
"community|arabic_exams|0": {
"acc_norm": 0.3240223463687151,
"acc_norm_stderr": 0.02021488411955933
},
"community|arabic_mmlu:abstract_algebra|0": {
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"community|arabic_mmlu:anatomy|0": {
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.038201699145179055
},
"community|arabic_mmlu:astronomy|0": {
"acc_norm": 0.3684210526315789,
"acc_norm_stderr": 0.03925523381052932
},
"community|arabic_mmlu:business_ethics|0": {
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956911
},
"community|arabic_mmlu:clinical_knowledge|0": {
"acc_norm": 0.4037735849056604,
"acc_norm_stderr": 0.030197611600197953
},
"community|arabic_mmlu:college_biology|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03942082639927214
},
"community|arabic_mmlu:college_chemistry|0": {
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"community|arabic_mmlu:college_computer_science|0": {
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768077
},
"community|arabic_mmlu:college_mathematics|0": {
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"community|arabic_mmlu:college_medicine|0": {
"acc_norm": 0.31213872832369943,
"acc_norm_stderr": 0.035331333893236574
},
"community|arabic_mmlu:college_physics|0": {
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.041583075330832865
},
"community|arabic_mmlu:computer_security|0": {
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"community|arabic_mmlu:conceptual_physics|0": {
"acc_norm": 0.3404255319148936,
"acc_norm_stderr": 0.030976692998534436
},
"community|arabic_mmlu:econometrics|0": {
"acc_norm": 0.2719298245614035,
"acc_norm_stderr": 0.04185774424022056
},
"community|arabic_mmlu:electrical_engineering|0": {
"acc_norm": 0.3931034482758621,
"acc_norm_stderr": 0.040703290137070705
},
"community|arabic_mmlu:elementary_mathematics|0": {
"acc_norm": 0.30423280423280424,
"acc_norm_stderr": 0.023695415009463087
},
"community|arabic_mmlu:formal_logic|0": {
"acc_norm": 0.30952380952380953,
"acc_norm_stderr": 0.041349130183033156
},
"community|arabic_mmlu:global_facts|0": {
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542129
},
"community|arabic_mmlu:high_school_biology|0": {
"acc_norm": 0.36451612903225805,
"acc_norm_stderr": 0.027379871229943235
},
"community|arabic_mmlu:high_school_chemistry|0": {
"acc_norm": 0.35467980295566504,
"acc_norm_stderr": 0.03366124489051448
},
"community|arabic_mmlu:high_school_computer_science|0": {
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001975
},
"community|arabic_mmlu:high_school_european_history|0": {
"acc_norm": 0.23030303030303031,
"acc_norm_stderr": 0.03287666758603489
},
"community|arabic_mmlu:high_school_geography|0": {
"acc_norm": 0.3434343434343434,
"acc_norm_stderr": 0.03383201223244441
},
"community|arabic_mmlu:high_school_government_and_politics|0": {
"acc_norm": 0.29015544041450775,
"acc_norm_stderr": 0.03275264467791516
},
"community|arabic_mmlu:high_school_macroeconomics|0": {
"acc_norm": 0.32051282051282054,
"acc_norm_stderr": 0.023661296393964273
},
"community|arabic_mmlu:high_school_mathematics|0": {
"acc_norm": 0.3148148148148148,
"acc_norm_stderr": 0.02831753349606648
},
"community|arabic_mmlu:high_school_microeconomics|0": {
"acc_norm": 0.2815126050420168,
"acc_norm_stderr": 0.029213549414372174
},
"community|arabic_mmlu:high_school_physics|0": {
"acc_norm": 0.271523178807947,
"acc_norm_stderr": 0.03631329803969653
},
"community|arabic_mmlu:high_school_psychology|0": {
"acc_norm": 0.28990825688073396,
"acc_norm_stderr": 0.0194530666092016
},
"community|arabic_mmlu:high_school_statistics|0": {
"acc_norm": 0.33796296296296297,
"acc_norm_stderr": 0.032259413526312945
},
"community|arabic_mmlu:high_school_us_history|0": {
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.03096451792692341
},
"community|arabic_mmlu:high_school_world_history|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.0306858205966108
},
"community|arabic_mmlu:human_aging|0": {
"acc_norm": 0.29596412556053814,
"acc_norm_stderr": 0.030636591348699782
},
"community|arabic_mmlu:human_sexuality|0": {
"acc_norm": 0.3435114503816794,
"acc_norm_stderr": 0.041649760719448786
},
"community|arabic_mmlu:international_law|0": {
"acc_norm": 0.45454545454545453,
"acc_norm_stderr": 0.045454545454545456
},
"community|arabic_mmlu:jurisprudence|0": {
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.04803752235190193
},
"community|arabic_mmlu:logical_fallacies|0": {
"acc_norm": 0.3987730061349693,
"acc_norm_stderr": 0.03847021420456026
},
"community|arabic_mmlu:machine_learning|0": {
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.04287858751340456
},
"community|arabic_mmlu:management|0": {
"acc_norm": 0.34951456310679613,
"acc_norm_stderr": 0.047211885060971716
},
"community|arabic_mmlu:marketing|0": {
"acc_norm": 0.47863247863247865,
"acc_norm_stderr": 0.032726164476349545
},
"community|arabic_mmlu:medical_genetics|0": {
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"community|arabic_mmlu:miscellaneous|0": {
"acc_norm": 0.3780332056194125,
"acc_norm_stderr": 0.017339844462104598
},
"community|arabic_mmlu:moral_disputes|0": {
"acc_norm": 0.42196531791907516,
"acc_norm_stderr": 0.02658923114217426
},
"community|arabic_mmlu:moral_scenarios|0": {
"acc_norm": 0.2569832402234637,
"acc_norm_stderr": 0.014614465821966335
},
"community|arabic_mmlu:nutrition|0": {
"acc_norm": 0.45098039215686275,
"acc_norm_stderr": 0.028491993586171563
},
"community|arabic_mmlu:philosophy|0": {
"acc_norm": 0.3954983922829582,
"acc_norm_stderr": 0.027770918531427834
},
"community|arabic_mmlu:prehistory|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.02622964917882116
},
"community|arabic_mmlu:professional_accounting|0": {
"acc_norm": 0.2695035460992908,
"acc_norm_stderr": 0.02646903681859063
},
"community|arabic_mmlu:professional_law|0": {
"acc_norm": 0.2953063885267275,
"acc_norm_stderr": 0.011651061936208823
},
"community|arabic_mmlu:professional_medicine|0": {
"acc_norm": 0.25735294117647056,
"acc_norm_stderr": 0.0265565194700415
},
"community|arabic_mmlu:professional_psychology|0": {
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.018249024411207668
},
"community|arabic_mmlu:public_relations|0": {
"acc_norm": 0.39090909090909093,
"acc_norm_stderr": 0.04673752333670237
},
"community|arabic_mmlu:security_studies|0": {
"acc_norm": 0.42448979591836733,
"acc_norm_stderr": 0.031642094879429414
},
"community|arabic_mmlu:sociology|0": {
"acc_norm": 0.46766169154228854,
"acc_norm_stderr": 0.035281314729336065
},
"community|arabic_mmlu:us_foreign_policy|0": {
"acc_norm": 0.45,
"acc_norm_stderr": 0.049999999999999996
},
"community|arabic_mmlu:virology|0": {
"acc_norm": 0.3373493975903614,
"acc_norm_stderr": 0.03680783690727581
},
"community|arabic_mmlu:world_religions|0": {
"acc_norm": 0.26900584795321636,
"acc_norm_stderr": 0.0340105262010409
},
"community|arc_challenge_okapi_ar|0": {
"acc_norm": 0.3620689655172414,
"acc_norm_stderr": 0.014116947443178733
},
"community|arc_easy_ar|0": {
"acc_norm": 0.3680203045685279,
"acc_norm_stderr": 0.009921003252755465
},
"community|boolq_ar|0": {
"acc_norm": 0.7039877300613497,
"acc_norm_stderr": 0.007996413087878369
},
"community|copa_ext_ar|0": {
"acc_norm": 0.4777777777777778,
"acc_norm_stderr": 0.05294752255076824
},
"community|hellaswag_okapi_ar|0": {
"acc_norm": 0.29702322538436376,
"acc_norm_stderr": 0.004771786733837027
},
"community|openbook_qa_ext_ar|0": {
"acc_norm": 0.4303030303030303,
"acc_norm_stderr": 0.0222764346182628
},
"community|piqa_ar|0": {
"acc_norm": 0.5739225313693399,
"acc_norm_stderr": 0.011553356838321862
},
"community|race_ar|0": {
"acc_norm": 0.41022519780888617,
"acc_norm_stderr": 0.007006787133034929
},
"community|sciq_ar|0": {
"acc_norm": 0.5195979899497487,
"acc_norm_stderr": 0.015846849993555182
},
"community|toxigen_ar|0": {
"acc_norm": 0.6042780748663101,
"acc_norm_stderr": 0.016000738844764214
},
"lighteval|xstory_cloze:ar|0": {
"acc": 0.5632031767041694,
"acc_stderr": 0.01276391225017363
},
"community|acva:_average|0": {
"acc_norm": 0.6599113108913827,
"acc_norm_stderr": 0.045715486109215185
},
"community|alghafa:_average|0": {
"acc_norm": 0.4919441802759871,
"acc_norm_stderr": 0.022650739897895362
},
"community|arabic_mmlu:_average|0": {
"acc_norm": 0.33719627537819813,
"acc_norm_stderr": 0.03504514733209233
},
"all": {
"acc_norm": 0.4962501372201233,
"acc_norm_stderr": 0.03730059982193123,
"acc": 0.5632031767041694,
"acc_stderr": 0.01276391225017363
}
},
"versions": {
"community|acva:Algeria|0": 0,
"community|acva:Ancient_Egypt|0": 0,
"community|acva:Arab_Empire|0": 0,
"community|acva:Arabic_Architecture|0": 0,
"community|acva:Arabic_Art|0": 0,
"community|acva:Arabic_Astronomy|0": 0,
"community|acva:Arabic_Calligraphy|0": 0,
"community|acva:Arabic_Ceremony|0": 0,
"community|acva:Arabic_Clothing|0": 0,
"community|acva:Arabic_Culture|0": 0,
"community|acva:Arabic_Food|0": 0,
"community|acva:Arabic_Funeral|0": 0,
"community|acva:Arabic_Geography|0": 0,
"community|acva:Arabic_History|0": 0,
"community|acva:Arabic_Language_Origin|0": 0,
"community|acva:Arabic_Literature|0": 0,
"community|acva:Arabic_Math|0": 0,
"community|acva:Arabic_Medicine|0": 0,
"community|acva:Arabic_Music|0": 0,
"community|acva:Arabic_Ornament|0": 0,
"community|acva:Arabic_Philosophy|0": 0,
"community|acva:Arabic_Physics_and_Chemistry|0": 0,
"community|acva:Arabic_Wedding|0": 0,
"community|acva:Bahrain|0": 0,
"community|acva:Comoros|0": 0,
"community|acva:Egypt_modern|0": 0,
"community|acva:InfluenceFromAncientEgypt|0": 0,
"community|acva:InfluenceFromByzantium|0": 0,
"community|acva:InfluenceFromChina|0": 0,
"community|acva:InfluenceFromGreece|0": 0,
"community|acva:InfluenceFromIslam|0": 0,
"community|acva:InfluenceFromPersia|0": 0,
"community|acva:InfluenceFromRome|0": 0,
"community|acva:Iraq|0": 0,
"community|acva:Islam_Education|0": 0,
"community|acva:Islam_branches_and_schools|0": 0,
"community|acva:Islamic_law_system|0": 0,
"community|acva:Jordan|0": 0,
"community|acva:Kuwait|0": 0,
"community|acva:Lebanon|0": 0,
"community|acva:Libya|0": 0,
"community|acva:Mauritania|0": 0,
"community|acva:Mesopotamia_civilization|0": 0,
"community|acva:Morocco|0": 0,
"community|acva:Oman|0": 0,
"community|acva:Palestine|0": 0,
"community|acva:Qatar|0": 0,
"community|acva:Saudi_Arabia|0": 0,
"community|acva:Somalia|0": 0,
"community|acva:Sudan|0": 0,
"community|acva:Syria|0": 0,
"community|acva:Tunisia|0": 0,
"community|acva:United_Arab_Emirates|0": 0,
"community|acva:Yemen|0": 0,
"community|acva:communication|0": 0,
"community|acva:computer_and_phone|0": 0,
"community|acva:daily_life|0": 0,
"community|acva:entertainment|0": 0,
"community|alghafa:mcq_exams_test_ar|0": 0,
"community|alghafa:meta_ar_dialects|0": 0,
"community|alghafa:meta_ar_msa|0": 0,
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0,
"community|alghafa:multiple_choice_sentiment_task|0": 0,
"community|arabic_exams|0": 0,
"community|arabic_mmlu:abstract_algebra|0": 0,
"community|arabic_mmlu:anatomy|0": 0,
"community|arabic_mmlu:astronomy|0": 0,
"community|arabic_mmlu:business_ethics|0": 0,
"community|arabic_mmlu:clinical_knowledge|0": 0,
"community|arabic_mmlu:college_biology|0": 0,
"community|arabic_mmlu:college_chemistry|0": 0,
"community|arabic_mmlu:college_computer_science|0": 0,
"community|arabic_mmlu:college_mathematics|0": 0,
"community|arabic_mmlu:college_medicine|0": 0,
"community|arabic_mmlu:college_physics|0": 0,
"community|arabic_mmlu:computer_security|0": 0,
"community|arabic_mmlu:conceptual_physics|0": 0,
"community|arabic_mmlu:econometrics|0": 0,
"community|arabic_mmlu:electrical_engineering|0": 0,
"community|arabic_mmlu:elementary_mathematics|0": 0,
"community|arabic_mmlu:formal_logic|0": 0,
"community|arabic_mmlu:global_facts|0": 0,
"community|arabic_mmlu:high_school_biology|0": 0,
"community|arabic_mmlu:high_school_chemistry|0": 0,
"community|arabic_mmlu:high_school_computer_science|0": 0,
"community|arabic_mmlu:high_school_european_history|0": 0,
"community|arabic_mmlu:high_school_geography|0": 0,
"community|arabic_mmlu:high_school_government_and_politics|0": 0,
"community|arabic_mmlu:high_school_macroeconomics|0": 0,
"community|arabic_mmlu:high_school_mathematics|0": 0,
"community|arabic_mmlu:high_school_microeconomics|0": 0,
"community|arabic_mmlu:high_school_physics|0": 0,
"community|arabic_mmlu:high_school_psychology|0": 0,
"community|arabic_mmlu:high_school_statistics|0": 0,
"community|arabic_mmlu:high_school_us_history|0": 0,
"community|arabic_mmlu:high_school_world_history|0": 0,
"community|arabic_mmlu:human_aging|0": 0,
"community|arabic_mmlu:human_sexuality|0": 0,
"community|arabic_mmlu:international_law|0": 0,
"community|arabic_mmlu:jurisprudence|0": 0,
"community|arabic_mmlu:logical_fallacies|0": 0,
"community|arabic_mmlu:machine_learning|0": 0,
"community|arabic_mmlu:management|0": 0,
"community|arabic_mmlu:marketing|0": 0,
"community|arabic_mmlu:medical_genetics|0": 0,
"community|arabic_mmlu:miscellaneous|0": 0,
"community|arabic_mmlu:moral_disputes|0": 0,
"community|arabic_mmlu:moral_scenarios|0": 0,
"community|arabic_mmlu:nutrition|0": 0,
"community|arabic_mmlu:philosophy|0": 0,
"community|arabic_mmlu:prehistory|0": 0,
"community|arabic_mmlu:professional_accounting|0": 0,
"community|arabic_mmlu:professional_law|0": 0,
"community|arabic_mmlu:professional_medicine|0": 0,
"community|arabic_mmlu:professional_psychology|0": 0,
"community|arabic_mmlu:public_relations|0": 0,
"community|arabic_mmlu:security_studies|0": 0,
"community|arabic_mmlu:sociology|0": 0,
"community|arabic_mmlu:us_foreign_policy|0": 0,
"community|arabic_mmlu:virology|0": 0,
"community|arabic_mmlu:world_religions|0": 0,
"community|arc_challenge_okapi_ar|0": 0,
"community|arc_easy_ar|0": 0,
"community|boolq_ar|0": 0,
"community|copa_ext_ar|0": 0,
"community|hellaswag_okapi_ar|0": 0,
"community|openbook_qa_ext_ar|0": 0,
"community|piqa_ar|0": 0,
"community|race_ar|0": 0,
"community|sciq_ar|0": 0,
"community|toxigen_ar|0": 0,
"lighteval|xstory_cloze:ar|0": 0
},
"config_tasks": {
"community|acva:Algeria": {
"name": "acva:Algeria",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Algeria",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Ancient_Egypt": {
"name": "acva:Ancient_Egypt",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Ancient_Egypt",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 315,
"effective_num_docs": 315,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arab_Empire": {
"name": "acva:Arab_Empire",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arab_Empire",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Architecture": {
"name": "acva:Arabic_Architecture",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Architecture",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Art": {
"name": "acva:Arabic_Art",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Art",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Astronomy": {
"name": "acva:Arabic_Astronomy",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Astronomy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Calligraphy": {
"name": "acva:Arabic_Calligraphy",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Calligraphy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 255,
"effective_num_docs": 255,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Ceremony": {
"name": "acva:Arabic_Ceremony",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Ceremony",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 185,
"effective_num_docs": 185,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Clothing": {
"name": "acva:Arabic_Clothing",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Clothing",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Culture": {
"name": "acva:Arabic_Culture",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Culture",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Food": {
"name": "acva:Arabic_Food",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Food",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Funeral": {
"name": "acva:Arabic_Funeral",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Funeral",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 95,
"effective_num_docs": 95,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Geography": {
"name": "acva:Arabic_Geography",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Geography",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_History": {
"name": "acva:Arabic_History",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_History",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Language_Origin": {
"name": "acva:Arabic_Language_Origin",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Language_Origin",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 95,
"effective_num_docs": 95,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Literature": {
"name": "acva:Arabic_Literature",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Literature",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Math": {
"name": "acva:Arabic_Math",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Math",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Medicine": {
"name": "acva:Arabic_Medicine",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Medicine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Music": {
"name": "acva:Arabic_Music",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Music",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 139,
"effective_num_docs": 139,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Ornament": {
"name": "acva:Arabic_Ornament",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Ornament",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Philosophy": {
"name": "acva:Arabic_Philosophy",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Philosophy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Physics_and_Chemistry": {
"name": "acva:Arabic_Physics_and_Chemistry",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Physics_and_Chemistry",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Arabic_Wedding": {
"name": "acva:Arabic_Wedding",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Arabic_Wedding",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Bahrain": {
"name": "acva:Bahrain",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Bahrain",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Comoros": {
"name": "acva:Comoros",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Comoros",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Egypt_modern": {
"name": "acva:Egypt_modern",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Egypt_modern",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 95,
"effective_num_docs": 95,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromAncientEgypt": {
"name": "acva:InfluenceFromAncientEgypt",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromAncientEgypt",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromByzantium": {
"name": "acva:InfluenceFromByzantium",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromByzantium",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromChina": {
"name": "acva:InfluenceFromChina",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromChina",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromGreece": {
"name": "acva:InfluenceFromGreece",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromGreece",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromIslam": {
"name": "acva:InfluenceFromIslam",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromIslam",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromPersia": {
"name": "acva:InfluenceFromPersia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromPersia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 175,
"effective_num_docs": 175,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:InfluenceFromRome": {
"name": "acva:InfluenceFromRome",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "InfluenceFromRome",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Iraq": {
"name": "acva:Iraq",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Iraq",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 85,
"effective_num_docs": 85,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Islam_Education": {
"name": "acva:Islam_Education",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Islam_Education",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Islam_branches_and_schools": {
"name": "acva:Islam_branches_and_schools",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Islam_branches_and_schools",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 175,
"effective_num_docs": 175,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Islamic_law_system": {
"name": "acva:Islamic_law_system",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Islamic_law_system",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Jordan": {
"name": "acva:Jordan",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Jordan",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Kuwait": {
"name": "acva:Kuwait",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Kuwait",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Lebanon": {
"name": "acva:Lebanon",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Lebanon",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Libya": {
"name": "acva:Libya",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Libya",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Mauritania": {
"name": "acva:Mauritania",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Mauritania",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Mesopotamia_civilization": {
"name": "acva:Mesopotamia_civilization",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Mesopotamia_civilization",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 155,
"effective_num_docs": 155,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Morocco": {
"name": "acva:Morocco",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Morocco",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Oman": {
"name": "acva:Oman",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Oman",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Palestine": {
"name": "acva:Palestine",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Palestine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 85,
"effective_num_docs": 85,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Qatar": {
"name": "acva:Qatar",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Qatar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Saudi_Arabia": {
"name": "acva:Saudi_Arabia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Saudi_Arabia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 195,
"effective_num_docs": 195,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Somalia": {
"name": "acva:Somalia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Somalia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Sudan": {
"name": "acva:Sudan",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Sudan",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Syria": {
"name": "acva:Syria",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Syria",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Tunisia": {
"name": "acva:Tunisia",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Tunisia",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 45,
"effective_num_docs": 45,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:United_Arab_Emirates": {
"name": "acva:United_Arab_Emirates",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "United_Arab_Emirates",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 85,
"effective_num_docs": 85,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:Yemen": {
"name": "acva:Yemen",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "Yemen",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 10,
"effective_num_docs": 10,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:communication": {
"name": "acva:communication",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "communication",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 364,
"effective_num_docs": 364,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:computer_and_phone": {
"name": "acva:computer_and_phone",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "computer_and_phone",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 295,
"effective_num_docs": 295,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:daily_life": {
"name": "acva:daily_life",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "daily_life",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 337,
"effective_num_docs": 337,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|acva:entertainment": {
"name": "acva:entertainment",
"prompt_function": "acva",
"hf_repo": "OALL/ACVA",
"hf_subset": "entertainment",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 295,
"effective_num_docs": 295,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:mcq_exams_test_ar": {
"name": "alghafa:mcq_exams_test_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "mcq_exams_test_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 557,
"effective_num_docs": 557,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:meta_ar_dialects": {
"name": "alghafa:meta_ar_dialects",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_dialects",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 5395,
"effective_num_docs": 5395,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:meta_ar_msa": {
"name": "alghafa:meta_ar_msa",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_msa",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": {
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_facts_truefalse_balanced_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 75,
"effective_num_docs": 75,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task": {
"name": "alghafa:multiple_choice_grounded_statement_soqal_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_soqal_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": {
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": {
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 7995,
"effective_num_docs": 7995,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task": {
"name": "alghafa:multiple_choice_rating_sentiment_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 5995,
"effective_num_docs": 5995,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|alghafa:multiple_choice_sentiment_task": {
"name": "alghafa:multiple_choice_sentiment_task",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_sentiment_task",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1720,
"effective_num_docs": 1720,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_exams": {
"name": "arabic_exams",
"prompt_function": "arabic_exams",
"hf_repo": "OALL/Arabic_EXAMS",
"hf_subset": "default",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 537,
"effective_num_docs": 537,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:abstract_algebra": {
"name": "arabic_mmlu:abstract_algebra",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "abstract_algebra",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:anatomy": {
"name": "arabic_mmlu:anatomy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "anatomy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 135,
"effective_num_docs": 135,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:astronomy": {
"name": "arabic_mmlu:astronomy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "astronomy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 152,
"effective_num_docs": 152,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:business_ethics": {
"name": "arabic_mmlu:business_ethics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "business_ethics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:clinical_knowledge": {
"name": "arabic_mmlu:clinical_knowledge",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "clinical_knowledge",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_biology": {
"name": "arabic_mmlu:college_biology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_biology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 144,
"effective_num_docs": 144,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_chemistry": {
"name": "arabic_mmlu:college_chemistry",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_chemistry",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_computer_science": {
"name": "arabic_mmlu:college_computer_science",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_computer_science",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_mathematics": {
"name": "arabic_mmlu:college_mathematics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_mathematics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_medicine": {
"name": "arabic_mmlu:college_medicine",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_medicine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 173,
"effective_num_docs": 173,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:college_physics": {
"name": "arabic_mmlu:college_physics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "college_physics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:computer_security": {
"name": "arabic_mmlu:computer_security",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "computer_security",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:conceptual_physics": {
"name": "arabic_mmlu:conceptual_physics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "conceptual_physics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 235,
"effective_num_docs": 235,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:econometrics": {
"name": "arabic_mmlu:econometrics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "econometrics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 114,
"effective_num_docs": 114,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:electrical_engineering": {
"name": "arabic_mmlu:electrical_engineering",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "electrical_engineering",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:elementary_mathematics": {
"name": "arabic_mmlu:elementary_mathematics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "elementary_mathematics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 378,
"effective_num_docs": 378,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:formal_logic": {
"name": "arabic_mmlu:formal_logic",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "formal_logic",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 126,
"effective_num_docs": 126,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:global_facts": {
"name": "arabic_mmlu:global_facts",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "global_facts",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_biology": {
"name": "arabic_mmlu:high_school_biology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_biology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 310,
"effective_num_docs": 310,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_chemistry": {
"name": "arabic_mmlu:high_school_chemistry",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_chemistry",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 203,
"effective_num_docs": 203,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_computer_science": {
"name": "arabic_mmlu:high_school_computer_science",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_computer_science",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_european_history": {
"name": "arabic_mmlu:high_school_european_history",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_european_history",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 165,
"effective_num_docs": 165,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_geography": {
"name": "arabic_mmlu:high_school_geography",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_geography",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 198,
"effective_num_docs": 198,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_government_and_politics": {
"name": "arabic_mmlu:high_school_government_and_politics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_government_and_politics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 193,
"effective_num_docs": 193,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_macroeconomics": {
"name": "arabic_mmlu:high_school_macroeconomics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_macroeconomics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 390,
"effective_num_docs": 390,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_mathematics": {
"name": "arabic_mmlu:high_school_mathematics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_mathematics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 270,
"effective_num_docs": 270,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_microeconomics": {
"name": "arabic_mmlu:high_school_microeconomics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_microeconomics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_physics": {
"name": "arabic_mmlu:high_school_physics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_physics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 151,
"effective_num_docs": 151,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_psychology": {
"name": "arabic_mmlu:high_school_psychology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_psychology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 545,
"effective_num_docs": 545,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_statistics": {
"name": "arabic_mmlu:high_school_statistics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_statistics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 216,
"effective_num_docs": 216,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_us_history": {
"name": "arabic_mmlu:high_school_us_history",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_us_history",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 204,
"effective_num_docs": 204,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:high_school_world_history": {
"name": "arabic_mmlu:high_school_world_history",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "high_school_world_history",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 237,
"effective_num_docs": 237,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:human_aging": {
"name": "arabic_mmlu:human_aging",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "human_aging",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 223,
"effective_num_docs": 223,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:human_sexuality": {
"name": "arabic_mmlu:human_sexuality",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "human_sexuality",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 131,
"effective_num_docs": 131,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:international_law": {
"name": "arabic_mmlu:international_law",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "international_law",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 121,
"effective_num_docs": 121,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:jurisprudence": {
"name": "arabic_mmlu:jurisprudence",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "jurisprudence",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 108,
"effective_num_docs": 108,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:logical_fallacies": {
"name": "arabic_mmlu:logical_fallacies",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "logical_fallacies",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 163,
"effective_num_docs": 163,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:machine_learning": {
"name": "arabic_mmlu:machine_learning",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "machine_learning",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 112,
"effective_num_docs": 112,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:management": {
"name": "arabic_mmlu:management",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "management",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 103,
"effective_num_docs": 103,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:marketing": {
"name": "arabic_mmlu:marketing",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "marketing",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 234,
"effective_num_docs": 234,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:medical_genetics": {
"name": "arabic_mmlu:medical_genetics",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "medical_genetics",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:miscellaneous": {
"name": "arabic_mmlu:miscellaneous",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "miscellaneous",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 783,
"effective_num_docs": 783,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:moral_disputes": {
"name": "arabic_mmlu:moral_disputes",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "moral_disputes",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 346,
"effective_num_docs": 346,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:moral_scenarios": {
"name": "arabic_mmlu:moral_scenarios",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "moral_scenarios",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:nutrition": {
"name": "arabic_mmlu:nutrition",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "nutrition",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 306,
"effective_num_docs": 306,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:philosophy": {
"name": "arabic_mmlu:philosophy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "philosophy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 311,
"effective_num_docs": 311,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:prehistory": {
"name": "arabic_mmlu:prehistory",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "prehistory",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 324,
"effective_num_docs": 324,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_accounting": {
"name": "arabic_mmlu:professional_accounting",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_accounting",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 282,
"effective_num_docs": 282,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_law": {
"name": "arabic_mmlu:professional_law",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_law",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1534,
"effective_num_docs": 1534,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_medicine": {
"name": "arabic_mmlu:professional_medicine",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_medicine",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 272,
"effective_num_docs": 272,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:professional_psychology": {
"name": "arabic_mmlu:professional_psychology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "professional_psychology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:public_relations": {
"name": "arabic_mmlu:public_relations",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "public_relations",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 110,
"effective_num_docs": 110,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:security_studies": {
"name": "arabic_mmlu:security_studies",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "security_studies",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 245,
"effective_num_docs": 245,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:sociology": {
"name": "arabic_mmlu:sociology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "sociology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 201,
"effective_num_docs": 201,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:us_foreign_policy": {
"name": "arabic_mmlu:us_foreign_policy",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "us_foreign_policy",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:virology": {
"name": "arabic_mmlu:virology",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "virology",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 166,
"effective_num_docs": 166,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arabic_mmlu:world_religions": {
"name": "arabic_mmlu:world_religions",
"prompt_function": "mmlu_arabic",
"hf_repo": "OALL/Arabic_MMLU",
"hf_subset": "world_religions",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 171,
"effective_num_docs": 171,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arc_challenge_okapi_ar": {
"name": "arc_challenge_okapi_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "arc_challenge_okapi_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1160,
"effective_num_docs": 1160,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|arc_easy_ar": {
"name": "arc_easy_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "arc_easy_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 2364,
"effective_num_docs": 2364,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|boolq_ar": {
"name": "boolq_ar",
"prompt_function": "boolq_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "boolq_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 3260,
"effective_num_docs": 3260,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|copa_ext_ar": {
"name": "copa_ext_ar",
"prompt_function": "copa_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "copa_ext_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 90,
"effective_num_docs": 90,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|hellaswag_okapi_ar": {
"name": "hellaswag_okapi_ar",
"prompt_function": "hellaswag_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "hellaswag_okapi_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 9171,
"effective_num_docs": 9171,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|openbook_qa_ext_ar": {
"name": "openbook_qa_ext_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "openbook_qa_ext_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 495,
"effective_num_docs": 495,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|piqa_ar": {
"name": "piqa_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "piqa_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 1833,
"effective_num_docs": 1833,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|race_ar": {
"name": "race_ar",
"prompt_function": "alghafa_prompt",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "race_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 4929,
"effective_num_docs": 4929,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|sciq_ar": {
"name": "sciq_ar",
"prompt_function": "sciq_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "sciq_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 995,
"effective_num_docs": 995,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"community|toxigen_ar": {
"name": "toxigen_ar",
"prompt_function": "toxigen_prompt_arabic",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated",
"hf_subset": "toxigen_ar",
"metric": [
"loglikelihood_acc_norm"
],
"hf_avail_splits": [
"test",
"validation"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"community"
],
"original_num_docs": 935,
"effective_num_docs": 935,
"trust_dataset": null,
"must_remove_duplicate_docs": null,
"version": 0
},
"lighteval|xstory_cloze:ar": {
"name": "xstory_cloze:ar",
"prompt_function": "storycloze",
"hf_repo": "juletxara/xstory_cloze",
"hf_subset": "ar",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"training",
"eval"
],
"evaluation_splits": [
"eval"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 1511,
"effective_num_docs": 1511,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
}
},
"summary_tasks": {
"community|acva:Algeria|0": {
"hashes": {
"hash_examples": "da5a3003cd46f6f9",
"hash_full_prompts": "da5a3003cd46f6f9",
"hash_input_tokens": "a468f82f5d9c6854",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Ancient_Egypt|0": {
"hashes": {
"hash_examples": "52d6f767fede195b",
"hash_full_prompts": "52d6f767fede195b",
"hash_input_tokens": "ab68d5711db9b9ae",
"hash_cont_tokens": "faf8ec77d06d1bfe"
},
"truncated": 0,
"non_truncated": 315,
"padded": 630,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arab_Empire|0": {
"hashes": {
"hash_examples": "8dacff6a79804a75",
"hash_full_prompts": "8dacff6a79804a75",
"hash_input_tokens": "a0cfc0a31ecb81b1",
"hash_cont_tokens": "f33c197aeef98c47"
},
"truncated": 0,
"non_truncated": 265,
"padded": 530,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Architecture|0": {
"hashes": {
"hash_examples": "df286cd862d9f6bb",
"hash_full_prompts": "df286cd862d9f6bb",
"hash_input_tokens": "bda15fd7a84af094",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Art|0": {
"hashes": {
"hash_examples": "112883d764118a49",
"hash_full_prompts": "112883d764118a49",
"hash_input_tokens": "94f5dc97af51afcc",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Astronomy|0": {
"hashes": {
"hash_examples": "20dcdf2454bf8671",
"hash_full_prompts": "20dcdf2454bf8671",
"hash_input_tokens": "c51b4ba152e28548",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Calligraphy|0": {
"hashes": {
"hash_examples": "3a9f9d1ebe868a15",
"hash_full_prompts": "3a9f9d1ebe868a15",
"hash_input_tokens": "6d0d833e28b72e5b",
"hash_cont_tokens": "9dee4b8a4f039fe7"
},
"truncated": 0,
"non_truncated": 255,
"padded": 510,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Ceremony|0": {
"hashes": {
"hash_examples": "c927630f8d2f44da",
"hash_full_prompts": "c927630f8d2f44da",
"hash_input_tokens": "096dbc4773da1ef6",
"hash_cont_tokens": "334d4f35b8161677"
},
"truncated": 0,
"non_truncated": 185,
"padded": 370,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Clothing|0": {
"hashes": {
"hash_examples": "6ad0740c2ac6ac92",
"hash_full_prompts": "6ad0740c2ac6ac92",
"hash_input_tokens": "7663885a3787ac53",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Culture|0": {
"hashes": {
"hash_examples": "2177bd857ad872ae",
"hash_full_prompts": "2177bd857ad872ae",
"hash_input_tokens": "16cc711636c3c0cb",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Food|0": {
"hashes": {
"hash_examples": "a6ada65b71d7c9c5",
"hash_full_prompts": "a6ada65b71d7c9c5",
"hash_input_tokens": "39c9c91d777e876b",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Funeral|0": {
"hashes": {
"hash_examples": "fcee39dc29eaae91",
"hash_full_prompts": "fcee39dc29eaae91",
"hash_input_tokens": "f208927fafa46368",
"hash_cont_tokens": "369e2f2e07e4b988"
},
"truncated": 0,
"non_truncated": 95,
"padded": 190,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Geography|0": {
"hashes": {
"hash_examples": "d36eda7c89231c02",
"hash_full_prompts": "d36eda7c89231c02",
"hash_input_tokens": "f1dca912dfd9e048",
"hash_cont_tokens": "640ab5ff98ba7f94"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_History|0": {
"hashes": {
"hash_examples": "6354ac0d6db6a5fc",
"hash_full_prompts": "6354ac0d6db6a5fc",
"hash_input_tokens": "40e86a514925b382",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Language_Origin|0": {
"hashes": {
"hash_examples": "ddc967c8aca34402",
"hash_full_prompts": "ddc967c8aca34402",
"hash_input_tokens": "41fb781ea47278e8",
"hash_cont_tokens": "369e2f2e07e4b988"
},
"truncated": 0,
"non_truncated": 95,
"padded": 190,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Literature|0": {
"hashes": {
"hash_examples": "4305379fd46be5d8",
"hash_full_prompts": "4305379fd46be5d8",
"hash_input_tokens": "80a3bae2fdeae26d",
"hash_cont_tokens": "640ab5ff98ba7f94"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Math|0": {
"hashes": {
"hash_examples": "dec621144f4d28be",
"hash_full_prompts": "dec621144f4d28be",
"hash_input_tokens": "8f90756790ad2368",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Medicine|0": {
"hashes": {
"hash_examples": "2b344cdae9495ff2",
"hash_full_prompts": "2b344cdae9495ff2",
"hash_input_tokens": "7ffc5f7e10959770",
"hash_cont_tokens": "640ab5ff98ba7f94"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Music|0": {
"hashes": {
"hash_examples": "0c54624d881944ce",
"hash_full_prompts": "0c54624d881944ce",
"hash_input_tokens": "1074fb2fede7fc26",
"hash_cont_tokens": "10523c1c9ae4d1e9"
},
"truncated": 0,
"non_truncated": 139,
"padded": 278,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Ornament|0": {
"hashes": {
"hash_examples": "251a4a84289d8bc1",
"hash_full_prompts": "251a4a84289d8bc1",
"hash_input_tokens": "be2d9d51a0215562",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Philosophy|0": {
"hashes": {
"hash_examples": "3f86fb9c94c13d22",
"hash_full_prompts": "3f86fb9c94c13d22",
"hash_input_tokens": "57b5170986e4d92d",
"hash_cont_tokens": "640ab5ff98ba7f94"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Physics_and_Chemistry|0": {
"hashes": {
"hash_examples": "8fec65af3695b62a",
"hash_full_prompts": "8fec65af3695b62a",
"hash_input_tokens": "79b041a4862091cb",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Arabic_Wedding|0": {
"hashes": {
"hash_examples": "9cc3477184d7a4b8",
"hash_full_prompts": "9cc3477184d7a4b8",
"hash_input_tokens": "cd74c23ddcf86c41",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Bahrain|0": {
"hashes": {
"hash_examples": "c92e803a0fa8b9e2",
"hash_full_prompts": "c92e803a0fa8b9e2",
"hash_input_tokens": "daf7ce5fcb286be4",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Comoros|0": {
"hashes": {
"hash_examples": "06e5d4bba8e54cae",
"hash_full_prompts": "06e5d4bba8e54cae",
"hash_input_tokens": "5a8d9ddbe96b44a6",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Egypt_modern|0": {
"hashes": {
"hash_examples": "c6ec369164f93446",
"hash_full_prompts": "c6ec369164f93446",
"hash_input_tokens": "e3bcbb196115a057",
"hash_cont_tokens": "369e2f2e07e4b988"
},
"truncated": 0,
"non_truncated": 95,
"padded": 190,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromAncientEgypt|0": {
"hashes": {
"hash_examples": "b9d56d74818b9bd4",
"hash_full_prompts": "b9d56d74818b9bd4",
"hash_input_tokens": "04f08b4873879cfa",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromByzantium|0": {
"hashes": {
"hash_examples": "5316c9624e7e59b8",
"hash_full_prompts": "5316c9624e7e59b8",
"hash_input_tokens": "640280f2413ec463",
"hash_cont_tokens": "640ab5ff98ba7f94"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromChina|0": {
"hashes": {
"hash_examples": "87894bce95a56411",
"hash_full_prompts": "87894bce95a56411",
"hash_input_tokens": "0328ffb7527e04b6",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromGreece|0": {
"hashes": {
"hash_examples": "0baa78a27e469312",
"hash_full_prompts": "0baa78a27e469312",
"hash_input_tokens": "2441e042445615e3",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromIslam|0": {
"hashes": {
"hash_examples": "0c2532cde6541ff2",
"hash_full_prompts": "0c2532cde6541ff2",
"hash_input_tokens": "54fffa3990a5ac8d",
"hash_cont_tokens": "640ab5ff98ba7f94"
},
"truncated": 0,
"non_truncated": 145,
"padded": 290,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromPersia|0": {
"hashes": {
"hash_examples": "efcd8112dc53c6e5",
"hash_full_prompts": "efcd8112dc53c6e5",
"hash_input_tokens": "00f3571bc77a055b",
"hash_cont_tokens": "ff491831b4391c7b"
},
"truncated": 0,
"non_truncated": 175,
"padded": 350,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:InfluenceFromRome|0": {
"hashes": {
"hash_examples": "9db61480e2e85fd3",
"hash_full_prompts": "9db61480e2e85fd3",
"hash_input_tokens": "d65117c163cc8fa8",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Iraq|0": {
"hashes": {
"hash_examples": "96dac3dfa8d2f41f",
"hash_full_prompts": "96dac3dfa8d2f41f",
"hash_input_tokens": "c29332876f686c1f",
"hash_cont_tokens": "b323f8ff8aeb401e"
},
"truncated": 0,
"non_truncated": 85,
"padded": 170,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Islam_Education|0": {
"hashes": {
"hash_examples": "0d80355f6a4cb51b",
"hash_full_prompts": "0d80355f6a4cb51b",
"hash_input_tokens": "038d92f5caa50d62",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Islam_branches_and_schools|0": {
"hashes": {
"hash_examples": "5cedce1be2c3ad50",
"hash_full_prompts": "5cedce1be2c3ad50",
"hash_input_tokens": "f15ae187d3f49e51",
"hash_cont_tokens": "ff491831b4391c7b"
},
"truncated": 0,
"non_truncated": 175,
"padded": 350,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Islamic_law_system|0": {
"hashes": {
"hash_examples": "c0e6db8bc84e105e",
"hash_full_prompts": "c0e6db8bc84e105e",
"hash_input_tokens": "546615b2ab3b58d4",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Jordan|0": {
"hashes": {
"hash_examples": "33deb5b4e5ddd6a1",
"hash_full_prompts": "33deb5b4e5ddd6a1",
"hash_input_tokens": "c05500addd30dccf",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Kuwait|0": {
"hashes": {
"hash_examples": "eb41773346d7c46c",
"hash_full_prompts": "eb41773346d7c46c",
"hash_input_tokens": "90beb5f421d38f23",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Lebanon|0": {
"hashes": {
"hash_examples": "25932dbf4c13d34f",
"hash_full_prompts": "25932dbf4c13d34f",
"hash_input_tokens": "e8e154330e1800c9",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Libya|0": {
"hashes": {
"hash_examples": "f2c4db63cd402926",
"hash_full_prompts": "f2c4db63cd402926",
"hash_input_tokens": "c4eaaedcf85500b6",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Mauritania|0": {
"hashes": {
"hash_examples": "8723ab5fdf286b54",
"hash_full_prompts": "8723ab5fdf286b54",
"hash_input_tokens": "4cc5a73a9b339ea7",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Mesopotamia_civilization|0": {
"hashes": {
"hash_examples": "c33f5502a6130ca9",
"hash_full_prompts": "c33f5502a6130ca9",
"hash_input_tokens": "84769d5824aef731",
"hash_cont_tokens": "7c4c99936f5360cc"
},
"truncated": 0,
"non_truncated": 155,
"padded": 310,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Morocco|0": {
"hashes": {
"hash_examples": "588a5ed27904b1ae",
"hash_full_prompts": "588a5ed27904b1ae",
"hash_input_tokens": "d07b06e741b9e9e9",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Oman|0": {
"hashes": {
"hash_examples": "d447c52b94248b69",
"hash_full_prompts": "d447c52b94248b69",
"hash_input_tokens": "2a85d5b3480f9a2f",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Palestine|0": {
"hashes": {
"hash_examples": "19197e076ad14ff5",
"hash_full_prompts": "19197e076ad14ff5",
"hash_input_tokens": "1a6440040d6276d6",
"hash_cont_tokens": "b323f8ff8aeb401e"
},
"truncated": 0,
"non_truncated": 85,
"padded": 170,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Qatar|0": {
"hashes": {
"hash_examples": "cf0736fa185b28f6",
"hash_full_prompts": "cf0736fa185b28f6",
"hash_input_tokens": "fea2463eb15d2080",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Saudi_Arabia|0": {
"hashes": {
"hash_examples": "69beda6e1b85a08d",
"hash_full_prompts": "69beda6e1b85a08d",
"hash_input_tokens": "7a691ce6c6354c06",
"hash_cont_tokens": "1129dee7109539f2"
},
"truncated": 0,
"non_truncated": 195,
"padded": 390,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Somalia|0": {
"hashes": {
"hash_examples": "b387940c65784fbf",
"hash_full_prompts": "b387940c65784fbf",
"hash_input_tokens": "84bb1575a58cc0bc",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Sudan|0": {
"hashes": {
"hash_examples": "e02c32b9d2dd0c3f",
"hash_full_prompts": "e02c32b9d2dd0c3f",
"hash_input_tokens": "4db9137b67697dac",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Syria|0": {
"hashes": {
"hash_examples": "60a6f8fe73bda4bb",
"hash_full_prompts": "60a6f8fe73bda4bb",
"hash_input_tokens": "c7ed9ada2e610085",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Tunisia|0": {
"hashes": {
"hash_examples": "34bb15d3830c5649",
"hash_full_prompts": "34bb15d3830c5649",
"hash_input_tokens": "f128cb115f72f5d5",
"hash_cont_tokens": "46d74ca33ef53415"
},
"truncated": 0,
"non_truncated": 45,
"padded": 90,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:United_Arab_Emirates|0": {
"hashes": {
"hash_examples": "98a0ba78172718ce",
"hash_full_prompts": "98a0ba78172718ce",
"hash_input_tokens": "dad06aee610b6e11",
"hash_cont_tokens": "b323f8ff8aeb401e"
},
"truncated": 0,
"non_truncated": 85,
"padded": 170,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:Yemen|0": {
"hashes": {
"hash_examples": "18e9bcccbb4ced7a",
"hash_full_prompts": "18e9bcccbb4ced7a",
"hash_input_tokens": "be0041a85e247401",
"hash_cont_tokens": "fa8ac073fd52ca3a"
},
"truncated": 0,
"non_truncated": 10,
"padded": 20,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:communication|0": {
"hashes": {
"hash_examples": "9ff28ab5eab5c97b",
"hash_full_prompts": "9ff28ab5eab5c97b",
"hash_input_tokens": "3350c883101c2e56",
"hash_cont_tokens": "c29bffa9827bd9ec"
},
"truncated": 0,
"non_truncated": 364,
"padded": 728,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:computer_and_phone|0": {
"hashes": {
"hash_examples": "37bac2f086aaf6c2",
"hash_full_prompts": "37bac2f086aaf6c2",
"hash_input_tokens": "e499c74fd65693b4",
"hash_cont_tokens": "66715be6f922eb68"
},
"truncated": 0,
"non_truncated": 295,
"padded": 590,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:daily_life|0": {
"hashes": {
"hash_examples": "bf07363c1c252e2f",
"hash_full_prompts": "bf07363c1c252e2f",
"hash_input_tokens": "887c73c27215f9a8",
"hash_cont_tokens": "cc11782ecc0590e2"
},
"truncated": 0,
"non_truncated": 337,
"padded": 674,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|acva:entertainment|0": {
"hashes": {
"hash_examples": "37077bc00f0ac56a",
"hash_full_prompts": "37077bc00f0ac56a",
"hash_input_tokens": "a45384e6a0d47dd8",
"hash_cont_tokens": "66715be6f922eb68"
},
"truncated": 0,
"non_truncated": 295,
"padded": 590,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:mcq_exams_test_ar|0": {
"hashes": {
"hash_examples": "c07a5e78c5c0b8fe",
"hash_full_prompts": "c07a5e78c5c0b8fe",
"hash_input_tokens": "7b7b1edc3238d18b",
"hash_cont_tokens": "5cfbb1bea9f8b5a6"
},
"truncated": 0,
"non_truncated": 557,
"padded": 2228,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_dialects|0": {
"hashes": {
"hash_examples": "c0b6081f83e14064",
"hash_full_prompts": "c0b6081f83e14064",
"hash_input_tokens": "c91d1e2ee877370c",
"hash_cont_tokens": "9d97824f41a263aa"
},
"truncated": 0,
"non_truncated": 5395,
"padded": 21533,
"non_padded": 47,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_msa|0": {
"hashes": {
"hash_examples": "64eb78a7c5b7484b",
"hash_full_prompts": "64eb78a7c5b7484b",
"hash_input_tokens": "defe0d16c14da56c",
"hash_cont_tokens": "a812bae7a78d4cb0"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3572,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"hashes": {
"hash_examples": "54fc3502c1c02c06",
"hash_full_prompts": "54fc3502c1c02c06",
"hash_input_tokens": "3c4ed8e65ec0d03c",
"hash_cont_tokens": "f1e737a4a73b78ac"
},
"truncated": 0,
"non_truncated": 75,
"padded": 150,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"hashes": {
"hash_examples": "46572d83696552ae",
"hash_full_prompts": "46572d83696552ae",
"hash_input_tokens": "acacdf1fcd2f19ad",
"hash_cont_tokens": "5ede1168c1d18b70"
},
"truncated": 0,
"non_truncated": 150,
"padded": 747,
"non_padded": 3,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"hashes": {
"hash_examples": "f430d97ff715bc1c",
"hash_full_prompts": "f430d97ff715bc1c",
"hash_input_tokens": "6bb53da3e402d9f1",
"hash_cont_tokens": "7146f3c408fa7be7"
},
"truncated": 0,
"non_truncated": 150,
"padded": 745,
"non_padded": 5,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"hashes": {
"hash_examples": "6b70a7416584f98c",
"hash_full_prompts": "6b70a7416584f98c",
"hash_input_tokens": "e457eff104f12b14",
"hash_cont_tokens": "80d31b7d3c4ff28a"
},
"truncated": 0,
"non_truncated": 7995,
"padded": 15990,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"hashes": {
"hash_examples": "bc2005cc9d2f436e",
"hash_full_prompts": "bc2005cc9d2f436e",
"hash_input_tokens": "b6fc9e51971bd17a",
"hash_cont_tokens": "2f1bb39d57cade63"
},
"truncated": 0,
"non_truncated": 5995,
"padded": 17911,
"non_padded": 74,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"hashes": {
"hash_examples": "6fb0e254ea5945d8",
"hash_full_prompts": "6fb0e254ea5945d8",
"hash_input_tokens": "af32a8508a505e69",
"hash_cont_tokens": "cb3e1cfc23461022"
},
"truncated": 0,
"non_truncated": 1720,
"padded": 5121,
"non_padded": 39,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_exams|0": {
"hashes": {
"hash_examples": "6d721df351722656",
"hash_full_prompts": "6d721df351722656",
"hash_input_tokens": "ed9fa7e0ee9834ea",
"hash_cont_tokens": "e3a374ff3dc59047"
},
"truncated": 0,
"non_truncated": 537,
"padded": 2112,
"non_padded": 36,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:abstract_algebra|0": {
"hashes": {
"hash_examples": "f2ddca8f45c0a511",
"hash_full_prompts": "f2ddca8f45c0a511",
"hash_input_tokens": "56e6d8b2e555717b",
"hash_cont_tokens": "01ab263bc1484312"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:anatomy|0": {
"hashes": {
"hash_examples": "dfdbc1b83107668d",
"hash_full_prompts": "dfdbc1b83107668d",
"hash_input_tokens": "178d6a807ea83566",
"hash_cont_tokens": "683cb1282f34e4e2"
},
"truncated": 0,
"non_truncated": 135,
"padded": 532,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:astronomy|0": {
"hashes": {
"hash_examples": "9736a606002a848e",
"hash_full_prompts": "9736a606002a848e",
"hash_input_tokens": "51c98261bc673e34",
"hash_cont_tokens": "81214c244918fe5b"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:business_ethics|0": {
"hashes": {
"hash_examples": "735e452fbb6dc63d",
"hash_full_prompts": "735e452fbb6dc63d",
"hash_input_tokens": "c754f74e7e8624c8",
"hash_cont_tokens": "01ab263bc1484312"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:clinical_knowledge|0": {
"hashes": {
"hash_examples": "6ab0ca4da98aedcf",
"hash_full_prompts": "6ab0ca4da98aedcf",
"hash_input_tokens": "9154608425adef8a",
"hash_cont_tokens": "916541bd54fae73d"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1052,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_biology|0": {
"hashes": {
"hash_examples": "17e4e390848018a4",
"hash_full_prompts": "17e4e390848018a4",
"hash_input_tokens": "308d6830bace8088",
"hash_cont_tokens": "a038a4390627f9a2"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_chemistry|0": {
"hashes": {
"hash_examples": "4abb169f6dfd234b",
"hash_full_prompts": "4abb169f6dfd234b",
"hash_input_tokens": "a72061fc399b169d",
"hash_cont_tokens": "01ab263bc1484312"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_computer_science|0": {
"hashes": {
"hash_examples": "a369e2e941358a1e",
"hash_full_prompts": "a369e2e941358a1e",
"hash_input_tokens": "1c7c7a1930fb6621",
"hash_cont_tokens": "01ab263bc1484312"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_mathematics|0": {
"hashes": {
"hash_examples": "d7be03b8b6020bff",
"hash_full_prompts": "d7be03b8b6020bff",
"hash_input_tokens": "adf6f4dad693c333",
"hash_cont_tokens": "01ab263bc1484312"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_medicine|0": {
"hashes": {
"hash_examples": "0518a00f097346bf",
"hash_full_prompts": "0518a00f097346bf",
"hash_input_tokens": "fc64803319d75ffa",
"hash_cont_tokens": "76b086f3f689fcc2"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:college_physics|0": {
"hashes": {
"hash_examples": "5d842cd49bc70e12",
"hash_full_prompts": "5d842cd49bc70e12",
"hash_input_tokens": "8125c5f60c83cfc5",
"hash_cont_tokens": "7c056fb9eeca1f91"
},
"truncated": 0,
"non_truncated": 102,
"padded": 404,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:computer_security|0": {
"hashes": {
"hash_examples": "8e85d9f85be9b32f",
"hash_full_prompts": "8e85d9f85be9b32f",
"hash_input_tokens": "8b2f52a922058a24",
"hash_cont_tokens": "01ab263bc1484312"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:conceptual_physics|0": {
"hashes": {
"hash_examples": "7964b55a0a49502b",
"hash_full_prompts": "7964b55a0a49502b",
"hash_input_tokens": "40a36ccb7b44690f",
"hash_cont_tokens": "3bfb3e6a24c21a93"
},
"truncated": 0,
"non_truncated": 235,
"padded": 916,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:econometrics|0": {
"hashes": {
"hash_examples": "1e192eae38347257",
"hash_full_prompts": "1e192eae38347257",
"hash_input_tokens": "fb9d57e682588379",
"hash_cont_tokens": "b3bcbca1386688ed"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:electrical_engineering|0": {
"hashes": {
"hash_examples": "cf97671d5c441da1",
"hash_full_prompts": "cf97671d5c441da1",
"hash_input_tokens": "f08c32184ec91446",
"hash_cont_tokens": "42e12784335b0008"
},
"truncated": 0,
"non_truncated": 145,
"padded": 572,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:elementary_mathematics|0": {
"hashes": {
"hash_examples": "6f49107ed43c40c5",
"hash_full_prompts": "6f49107ed43c40c5",
"hash_input_tokens": "cbe3245bb58eda10",
"hash_cont_tokens": "5f01ecb05cee8c6d"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1500,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:formal_logic|0": {
"hashes": {
"hash_examples": "7922c376008ba77b",
"hash_full_prompts": "7922c376008ba77b",
"hash_input_tokens": "f0d140224ae15b51",
"hash_cont_tokens": "b632f296bf1cb9f8"
},
"truncated": 0,
"non_truncated": 126,
"padded": 500,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:global_facts|0": {
"hashes": {
"hash_examples": "11f9813185047d5b",
"hash_full_prompts": "11f9813185047d5b",
"hash_input_tokens": "26be209ef9bb52c3",
"hash_cont_tokens": "01ab263bc1484312"
},
"truncated": 0,
"non_truncated": 100,
"padded": 384,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_biology|0": {
"hashes": {
"hash_examples": "2a804b1d90cbe66e",
"hash_full_prompts": "2a804b1d90cbe66e",
"hash_input_tokens": "a97efd583a387a0c",
"hash_cont_tokens": "baf168dded0f0841"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1232,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_chemistry|0": {
"hashes": {
"hash_examples": "0032168adabc53b4",
"hash_full_prompts": "0032168adabc53b4",
"hash_input_tokens": "da7c5af7de76e30c",
"hash_cont_tokens": "7de298abfe9522d5"
},
"truncated": 0,
"non_truncated": 203,
"padded": 804,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_computer_science|0": {
"hashes": {
"hash_examples": "f2fb8740f9df980f",
"hash_full_prompts": "f2fb8740f9df980f",
"hash_input_tokens": "d3736775694eb4fd",
"hash_cont_tokens": "01ab263bc1484312"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_european_history|0": {
"hashes": {
"hash_examples": "73509021e7e66435",
"hash_full_prompts": "73509021e7e66435",
"hash_input_tokens": "3e023dd2a3e11591",
"hash_cont_tokens": "cf73ff5b24b701df"
},
"truncated": 0,
"non_truncated": 165,
"padded": 660,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_geography|0": {
"hashes": {
"hash_examples": "9e08d1894940ff42",
"hash_full_prompts": "9e08d1894940ff42",
"hash_input_tokens": "6d7a678788f0148a",
"hash_cont_tokens": "1ce0ca1777207708"
},
"truncated": 0,
"non_truncated": 198,
"padded": 784,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_government_and_politics|0": {
"hashes": {
"hash_examples": "64b7e97817ca6c76",
"hash_full_prompts": "64b7e97817ca6c76",
"hash_input_tokens": "973952d9d908e655",
"hash_cont_tokens": "912816b6287f890f"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_macroeconomics|0": {
"hashes": {
"hash_examples": "9f582da8534bd2ef",
"hash_full_prompts": "9f582da8534bd2ef",
"hash_input_tokens": "351776c990a2b381",
"hash_cont_tokens": "f79f11260696a706"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_mathematics|0": {
"hashes": {
"hash_examples": "fd54f1c10d423c51",
"hash_full_prompts": "fd54f1c10d423c51",
"hash_input_tokens": "19e52b958a06dd81",
"hash_cont_tokens": "134a99fe2ef8db3d"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1068,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_microeconomics|0": {
"hashes": {
"hash_examples": "7037896925aaf42f",
"hash_full_prompts": "7037896925aaf42f",
"hash_input_tokens": "a6438709f3df1497",
"hash_cont_tokens": "0a5f4f452e615669"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_physics|0": {
"hashes": {
"hash_examples": "60c3776215167dae",
"hash_full_prompts": "60c3776215167dae",
"hash_input_tokens": "9511bbb34e923384",
"hash_cont_tokens": "b49b49d94fe62219"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_psychology|0": {
"hashes": {
"hash_examples": "61176bfd5da1298f",
"hash_full_prompts": "61176bfd5da1298f",
"hash_input_tokens": "d03192402e3c34f4",
"hash_cont_tokens": "5c5ec9186051282b"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2160,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_statistics|0": {
"hashes": {
"hash_examples": "40dfeebd1ea10f76",
"hash_full_prompts": "40dfeebd1ea10f76",
"hash_input_tokens": "64a95ae3a3a64549",
"hash_cont_tokens": "f2e1f020e308e417"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_us_history|0": {
"hashes": {
"hash_examples": "03daa510ba917f4d",
"hash_full_prompts": "03daa510ba917f4d",
"hash_input_tokens": "bab5f0db19a96301",
"hash_cont_tokens": "20cad576ce2ebcc8"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:high_school_world_history|0": {
"hashes": {
"hash_examples": "be075ffd579f43c2",
"hash_full_prompts": "be075ffd579f43c2",
"hash_input_tokens": "00373755e0989cab",
"hash_cont_tokens": "271be6c6fef2dc48"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:human_aging|0": {
"hashes": {
"hash_examples": "caa5b69f640bd1ef",
"hash_full_prompts": "caa5b69f640bd1ef",
"hash_input_tokens": "5895450a4d93ce77",
"hash_cont_tokens": "378ead94997d0a89"
},
"truncated": 0,
"non_truncated": 223,
"padded": 884,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:human_sexuality|0": {
"hashes": {
"hash_examples": "5ed2e38fb25a3767",
"hash_full_prompts": "5ed2e38fb25a3767",
"hash_input_tokens": "acf1eb81d99ec5d6",
"hash_cont_tokens": "dc2fc2caf8134dfc"
},
"truncated": 0,
"non_truncated": 131,
"padded": 520,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:international_law|0": {
"hashes": {
"hash_examples": "4e3e9e28d1b96484",
"hash_full_prompts": "4e3e9e28d1b96484",
"hash_input_tokens": "9f217295e2d6705f",
"hash_cont_tokens": "3f3dcceb9d3e534d"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:jurisprudence|0": {
"hashes": {
"hash_examples": "e264b755366310b3",
"hash_full_prompts": "e264b755366310b3",
"hash_input_tokens": "a844ae600360b560",
"hash_cont_tokens": "70ed7b7c9f0be497"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:logical_fallacies|0": {
"hashes": {
"hash_examples": "a4ab6965a3e38071",
"hash_full_prompts": "a4ab6965a3e38071",
"hash_input_tokens": "6932d7d77f3d7538",
"hash_cont_tokens": "1a0ecd9787cd6fbf"
},
"truncated": 0,
"non_truncated": 163,
"padded": 648,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:machine_learning|0": {
"hashes": {
"hash_examples": "b92320efa6636b40",
"hash_full_prompts": "b92320efa6636b40",
"hash_input_tokens": "7b1c93bb401b1887",
"hash_cont_tokens": "00f1d44e2614b8b1"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:management|0": {
"hashes": {
"hash_examples": "c9ee4872a850fe20",
"hash_full_prompts": "c9ee4872a850fe20",
"hash_input_tokens": "e39a925890bfaef1",
"hash_cont_tokens": "11fd7b793a137357"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:marketing|0": {
"hashes": {
"hash_examples": "0c151b70f6a047e3",
"hash_full_prompts": "0c151b70f6a047e3",
"hash_input_tokens": "4117108db9347c82",
"hash_cont_tokens": "c13e4c996b92a86d"
},
"truncated": 0,
"non_truncated": 234,
"padded": 928,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:medical_genetics|0": {
"hashes": {
"hash_examples": "513f6cb8fca3a24e",
"hash_full_prompts": "513f6cb8fca3a24e",
"hash_input_tokens": "ed549ec0c7811826",
"hash_cont_tokens": "01ab263bc1484312"
},
"truncated": 0,
"non_truncated": 100,
"padded": 392,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:miscellaneous|0": {
"hashes": {
"hash_examples": "259a190d635331db",
"hash_full_prompts": "259a190d635331db",
"hash_input_tokens": "aae1aec36250bceb",
"hash_cont_tokens": "bc64ce2ec24911da"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3108,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:moral_disputes|0": {
"hashes": {
"hash_examples": "b85052c48a0b7bc3",
"hash_full_prompts": "b85052c48a0b7bc3",
"hash_input_tokens": "3862864870d45652",
"hash_cont_tokens": "8e2791ee8c9bd155"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1376,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:moral_scenarios|0": {
"hashes": {
"hash_examples": "28d0b069ef00dd00",
"hash_full_prompts": "28d0b069ef00dd00",
"hash_input_tokens": "9ec57ef45ce205e4",
"hash_cont_tokens": "b4b6cb1fe4b797d1"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:nutrition|0": {
"hashes": {
"hash_examples": "00c9bc5f1d305b2f",
"hash_full_prompts": "00c9bc5f1d305b2f",
"hash_input_tokens": "bbd1f33fbcfc883c",
"hash_cont_tokens": "6774bca703358650"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1216,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:philosophy|0": {
"hashes": {
"hash_examples": "a458c08454a3fd5f",
"hash_full_prompts": "a458c08454a3fd5f",
"hash_input_tokens": "c1cd3dcaab95d596",
"hash_cont_tokens": "8135eb5749145739"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1236,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:prehistory|0": {
"hashes": {
"hash_examples": "d6a0ecbdbb670e9c",
"hash_full_prompts": "d6a0ecbdbb670e9c",
"hash_input_tokens": "0b2a55ccba88f7e6",
"hash_cont_tokens": "3997a58b4bc73e7d"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1292,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_accounting|0": {
"hashes": {
"hash_examples": "b4a95fe480b6540e",
"hash_full_prompts": "b4a95fe480b6540e",
"hash_input_tokens": "fc2cfa9b01e311c2",
"hash_cont_tokens": "b297db541d584b5f"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1128,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_law|0": {
"hashes": {
"hash_examples": "c2be9651cdbdde3b",
"hash_full_prompts": "c2be9651cdbdde3b",
"hash_input_tokens": "4cd43f4c5d961f90",
"hash_cont_tokens": "901653ce1e0d0f28"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6132,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_medicine|0": {
"hashes": {
"hash_examples": "26ce92416288f273",
"hash_full_prompts": "26ce92416288f273",
"hash_input_tokens": "4e51b79784d59cd4",
"hash_cont_tokens": "d3e4f3f9307e28f5"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:professional_psychology|0": {
"hashes": {
"hash_examples": "71ea5f182ea9a641",
"hash_full_prompts": "71ea5f182ea9a641",
"hash_input_tokens": "167c462e66c6a1c6",
"hash_cont_tokens": "5ecb39ca26f53f20"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2444,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:public_relations|0": {
"hashes": {
"hash_examples": "125adc21f91f8d77",
"hash_full_prompts": "125adc21f91f8d77",
"hash_input_tokens": "736325fec0f3654c",
"hash_cont_tokens": "c78a9a2477e10844"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:security_studies|0": {
"hashes": {
"hash_examples": "3c18b216c099fb26",
"hash_full_prompts": "3c18b216c099fb26",
"hash_input_tokens": "ee6c5043ef7554fb",
"hash_cont_tokens": "6c45ab42fb76e2b9"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:sociology|0": {
"hashes": {
"hash_examples": "3f2a9634cef7417d",
"hash_full_prompts": "3f2a9634cef7417d",
"hash_input_tokens": "b9d59a391a64b13f",
"hash_cont_tokens": "496dd23ecc21640f"
},
"truncated": 0,
"non_truncated": 201,
"padded": 804,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:us_foreign_policy|0": {
"hashes": {
"hash_examples": "22249da54056475e",
"hash_full_prompts": "22249da54056475e",
"hash_input_tokens": "b4f5a04a388a478f",
"hash_cont_tokens": "01ab263bc1484312"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:virology|0": {
"hashes": {
"hash_examples": "9d194b9471dc624e",
"hash_full_prompts": "9d194b9471dc624e",
"hash_input_tokens": "94591fc46ee7e5d3",
"hash_cont_tokens": "9456b76d39b0ce5e"
},
"truncated": 0,
"non_truncated": 166,
"padded": 660,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:world_religions|0": {
"hashes": {
"hash_examples": "229e5fe50082b064",
"hash_full_prompts": "229e5fe50082b064",
"hash_input_tokens": "fd22bb3bb1fcf492",
"hash_cont_tokens": "4f14606052a768e1"
},
"truncated": 0,
"non_truncated": 171,
"padded": 664,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arc_challenge_okapi_ar|0": {
"hashes": {
"hash_examples": "ab893807673bc355",
"hash_full_prompts": "ab893807673bc355",
"hash_input_tokens": "bc442d49b320f97d",
"hash_cont_tokens": "69b3e843642e9265"
},
"truncated": 0,
"non_truncated": 1160,
"padded": 4624,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arc_easy_ar|0": {
"hashes": {
"hash_examples": "acb688624acc3d04",
"hash_full_prompts": "acb688624acc3d04",
"hash_input_tokens": "833267d82c06fa8a",
"hash_cont_tokens": "e9e6739998f90674"
},
"truncated": 0,
"non_truncated": 2364,
"padded": 9411,
"non_padded": 45,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|boolq_ar|0": {
"hashes": {
"hash_examples": "48355a67867e0c32",
"hash_full_prompts": "48355a67867e0c32",
"hash_input_tokens": "62228cf4b0a88ee6",
"hash_cont_tokens": "483c6a1ad4741bd5"
},
"truncated": 0,
"non_truncated": 3260,
"padded": 6500,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|copa_ext_ar|0": {
"hashes": {
"hash_examples": "9bb83301bb72eecf",
"hash_full_prompts": "9bb83301bb72eecf",
"hash_input_tokens": "d6388eb1a319d94e",
"hash_cont_tokens": "040599378ad798ef"
},
"truncated": 0,
"non_truncated": 90,
"padded": 180,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|hellaswag_okapi_ar|0": {
"hashes": {
"hash_examples": "6e8cf57a322dfadd",
"hash_full_prompts": "6e8cf57a322dfadd",
"hash_input_tokens": "3c66ffb6cbe76f5a",
"hash_cont_tokens": "3ae86fa3a244f60d"
},
"truncated": 0,
"non_truncated": 9171,
"padded": 36674,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|openbook_qa_ext_ar|0": {
"hashes": {
"hash_examples": "923d41eb0aca93eb",
"hash_full_prompts": "923d41eb0aca93eb",
"hash_input_tokens": "a7f4c3c6636dd85a",
"hash_cont_tokens": "c39637966ace2440"
},
"truncated": 0,
"non_truncated": 495,
"padded": 1971,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|piqa_ar|0": {
"hashes": {
"hash_examples": "94bc205a520d3ea0",
"hash_full_prompts": "94bc205a520d3ea0",
"hash_input_tokens": "b151c1205c58a14e",
"hash_cont_tokens": "18e8820d3bcfb256"
},
"truncated": 0,
"non_truncated": 1833,
"padded": 3644,
"non_padded": 22,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|race_ar|0": {
"hashes": {
"hash_examples": "de65130bae647516",
"hash_full_prompts": "de65130bae647516",
"hash_input_tokens": "eb42610f99f3814a",
"hash_cont_tokens": "ea56a2a375d7ec0e"
},
"truncated": 0,
"non_truncated": 4929,
"padded": 19709,
"non_padded": 7,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|sciq_ar|0": {
"hashes": {
"hash_examples": "40370d4afeb83e3b",
"hash_full_prompts": "40370d4afeb83e3b",
"hash_input_tokens": "a7925ee036d5f2ec",
"hash_cont_tokens": "73d45ad38520aff9"
},
"truncated": 0,
"non_truncated": 995,
"padded": 3972,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|toxigen_ar|0": {
"hashes": {
"hash_examples": "1e139513004a9a2e",
"hash_full_prompts": "1e139513004a9a2e",
"hash_input_tokens": "c8557c46a6f0557c",
"hash_cont_tokens": "603c210c4b703bf2"
},
"truncated": 0,
"non_truncated": 935,
"padded": 1854,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|xstory_cloze:ar|0": {
"hashes": {
"hash_examples": "865426a22c787481",
"hash_full_prompts": "865426a22c787481",
"hash_input_tokens": "1776e31ec5c48f41",
"hash_cont_tokens": "abeba579fe84aa21"
},
"truncated": 0,
"non_truncated": 1511,
"padded": 2984,
"non_padded": 38,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "af9b7a761e240f94",
"hash_full_prompts": "af9b7a761e240f94",
"hash_input_tokens": "922b427f0f0c8661",
"hash_cont_tokens": "7edd02ee9c901908"
},
"truncated": 0,
"non_truncated": 72964,
"padded": 234960,
"non_padded": 663,
"num_truncated_few_shots": 0
}
}