|
{ |
|
"config_general": { |
|
"lighteval_sha": "?", |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null, |
|
"job_id": "", |
|
"start_time": 507.513414872, |
|
"end_time": 58862.983485138, |
|
"total_evaluation_time_secondes": "58355.470070266", |
|
"model_name": "01-ai/Yi-1.5-34B-Chat", |
|
"model_sha": "aecc6b2b173e0dccc12a5612c2b9324171054406", |
|
"model_dtype": "torch.bfloat16", |
|
"model_size": "64.17 GB", |
|
"config": null |
|
}, |
|
"results": { |
|
"community|acva:Algeria|0": { |
|
"acc_norm": 0.6461538461538462, |
|
"acc_norm_stderr": 0.03433004254147036 |
|
}, |
|
"community|acva:Ancient_Egypt|0": { |
|
"acc_norm": 0.526984126984127, |
|
"acc_norm_stderr": 0.028175510942128692 |
|
}, |
|
"community|acva:Arab_Empire|0": { |
|
"acc_norm": 0.36981132075471695, |
|
"acc_norm_stderr": 0.02971142188010793 |
|
}, |
|
"community|acva:Arabic_Architecture|0": { |
|
"acc_norm": 0.6358974358974359, |
|
"acc_norm_stderr": 0.034546538677863885 |
|
}, |
|
"community|acva:Arabic_Art|0": { |
|
"acc_norm": 0.38461538461538464, |
|
"acc_norm_stderr": 0.03492896993742303 |
|
}, |
|
"community|acva:Arabic_Astronomy|0": { |
|
"acc_norm": 0.517948717948718, |
|
"acc_norm_stderr": 0.03587477098773825 |
|
}, |
|
"community|acva:Arabic_Calligraphy|0": { |
|
"acc_norm": 0.7333333333333333, |
|
"acc_norm_stderr": 0.0277471397089816 |
|
}, |
|
"community|acva:Arabic_Ceremony|0": { |
|
"acc_norm": 0.6216216216216216, |
|
"acc_norm_stderr": 0.03575339609546739 |
|
}, |
|
"community|acva:Arabic_Clothing|0": { |
|
"acc_norm": 0.5282051282051282, |
|
"acc_norm_stderr": 0.035840746749208334 |
|
}, |
|
"community|acva:Arabic_Culture|0": { |
|
"acc_norm": 0.8666666666666667, |
|
"acc_norm_stderr": 0.024405884404899894 |
|
}, |
|
"community|acva:Arabic_Food|0": { |
|
"acc_norm": 0.6153846153846154, |
|
"acc_norm_stderr": 0.03492896993742304 |
|
}, |
|
"community|acva:Arabic_Funeral|0": { |
|
"acc_norm": 0.6947368421052632, |
|
"acc_norm_stderr": 0.047498887145627784 |
|
}, |
|
"community|acva:Arabic_Geography|0": { |
|
"acc_norm": 0.5103448275862069, |
|
"acc_norm_stderr": 0.04165774775728763 |
|
}, |
|
"community|acva:Arabic_History|0": { |
|
"acc_norm": 0.4358974358974359, |
|
"acc_norm_stderr": 0.03560166662346635 |
|
}, |
|
"community|acva:Arabic_Language_Origin|0": { |
|
"acc_norm": 0.6526315789473685, |
|
"acc_norm_stderr": 0.049109474007766586 |
|
}, |
|
"community|acva:Arabic_Literature|0": { |
|
"acc_norm": 0.5310344827586206, |
|
"acc_norm_stderr": 0.04158632762097828 |
|
}, |
|
"community|acva:Arabic_Math|0": { |
|
"acc_norm": 0.7846153846153846, |
|
"acc_norm_stderr": 0.02951446289186094 |
|
}, |
|
"community|acva:Arabic_Medicine|0": { |
|
"acc_norm": 0.6896551724137931, |
|
"acc_norm_stderr": 0.038552896163789464 |
|
}, |
|
"community|acva:Arabic_Music|0": { |
|
"acc_norm": 0.2517985611510791, |
|
"acc_norm_stderr": 0.03694846055443904 |
|
}, |
|
"community|acva:Arabic_Ornament|0": { |
|
"acc_norm": 0.7743589743589744, |
|
"acc_norm_stderr": 0.030010921825357008 |
|
}, |
|
"community|acva:Arabic_Philosophy|0": { |
|
"acc_norm": 0.5172413793103449, |
|
"acc_norm_stderr": 0.04164188720169375 |
|
}, |
|
"community|acva:Arabic_Physics_and_Chemistry|0": { |
|
"acc_norm": 0.7538461538461538, |
|
"acc_norm_stderr": 0.03092742837122567 |
|
}, |
|
"community|acva:Arabic_Wedding|0": { |
|
"acc_norm": 0.7435897435897436, |
|
"acc_norm_stderr": 0.03134970994274492 |
|
}, |
|
"community|acva:Bahrain|0": { |
|
"acc_norm": 0.7555555555555555, |
|
"acc_norm_stderr": 0.06478835438717001 |
|
}, |
|
"community|acva:Comoros|0": { |
|
"acc_norm": 0.5777777777777777, |
|
"acc_norm_stderr": 0.07446027270295806 |
|
}, |
|
"community|acva:Egypt_modern|0": { |
|
"acc_norm": 0.6526315789473685, |
|
"acc_norm_stderr": 0.04910947400776659 |
|
}, |
|
"community|acva:InfluenceFromAncientEgypt|0": { |
|
"acc_norm": 0.7692307692307693, |
|
"acc_norm_stderr": 0.030249375293831295 |
|
}, |
|
"community|acva:InfluenceFromByzantium|0": { |
|
"acc_norm": 0.6896551724137931, |
|
"acc_norm_stderr": 0.03855289616378949 |
|
}, |
|
"community|acva:InfluenceFromChina|0": { |
|
"acc_norm": 0.6974358974358974, |
|
"acc_norm_stderr": 0.03298070870085619 |
|
}, |
|
"community|acva:InfluenceFromGreece|0": { |
|
"acc_norm": 0.8666666666666667, |
|
"acc_norm_stderr": 0.024405884404899894 |
|
}, |
|
"community|acva:InfluenceFromIslam|0": { |
|
"acc_norm": 0.8275862068965517, |
|
"acc_norm_stderr": 0.03147830790259574 |
|
}, |
|
"community|acva:InfluenceFromPersia|0": { |
|
"acc_norm": 0.9257142857142857, |
|
"acc_norm_stderr": 0.019880005975330343 |
|
}, |
|
"community|acva:InfluenceFromRome|0": { |
|
"acc_norm": 0.6564102564102564, |
|
"acc_norm_stderr": 0.034096273014098545 |
|
}, |
|
"community|acva:Iraq|0": { |
|
"acc_norm": 0.5529411764705883, |
|
"acc_norm_stderr": 0.054247803536170265 |
|
}, |
|
"community|acva:Islam_Education|0": { |
|
"acc_norm": 0.7025641025641025, |
|
"acc_norm_stderr": 0.032820017178388096 |
|
}, |
|
"community|acva:Islam_branches_and_schools|0": { |
|
"acc_norm": 0.49142857142857144, |
|
"acc_norm_stderr": 0.0378993320697706 |
|
}, |
|
"community|acva:Islamic_law_system|0": { |
|
"acc_norm": 0.7128205128205128, |
|
"acc_norm_stderr": 0.03248373338539886 |
|
}, |
|
"community|acva:Jordan|0": { |
|
"acc_norm": 0.5777777777777777, |
|
"acc_norm_stderr": 0.07446027270295806 |
|
}, |
|
"community|acva:Kuwait|0": { |
|
"acc_norm": 0.7333333333333333, |
|
"acc_norm_stderr": 0.06666666666666668 |
|
}, |
|
"community|acva:Lebanon|0": { |
|
"acc_norm": 0.6888888888888889, |
|
"acc_norm_stderr": 0.06979205927323111 |
|
}, |
|
"community|acva:Libya|0": { |
|
"acc_norm": 0.5777777777777777, |
|
"acc_norm_stderr": 0.07446027270295805 |
|
}, |
|
"community|acva:Mauritania|0": { |
|
"acc_norm": 0.4666666666666667, |
|
"acc_norm_stderr": 0.0752101433090355 |
|
}, |
|
"community|acva:Mesopotamia_civilization|0": { |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.03947710169758612 |
|
}, |
|
"community|acva:Morocco|0": { |
|
"acc_norm": 0.7333333333333333, |
|
"acc_norm_stderr": 0.0666666666666667 |
|
}, |
|
"community|acva:Oman|0": { |
|
"acc_norm": 0.8222222222222222, |
|
"acc_norm_stderr": 0.05763774795025094 |
|
}, |
|
"community|acva:Palestine|0": { |
|
"acc_norm": 0.5647058823529412, |
|
"acc_norm_stderr": 0.054095720804810316 |
|
}, |
|
"community|acva:Qatar|0": { |
|
"acc_norm": 0.5777777777777777, |
|
"acc_norm_stderr": 0.07446027270295805 |
|
}, |
|
"community|acva:Saudi_Arabia|0": { |
|
"acc_norm": 0.7589743589743589, |
|
"acc_norm_stderr": 0.030707489381124196 |
|
}, |
|
"community|acva:Somalia|0": { |
|
"acc_norm": 0.6222222222222222, |
|
"acc_norm_stderr": 0.07309112127323451 |
|
}, |
|
"community|acva:Sudan|0": { |
|
"acc_norm": 0.7111111111111111, |
|
"acc_norm_stderr": 0.06832943242540507 |
|
}, |
|
"community|acva:Syria|0": { |
|
"acc_norm": 0.7777777777777778, |
|
"acc_norm_stderr": 0.06267511942419626 |
|
}, |
|
"community|acva:Tunisia|0": { |
|
"acc_norm": 0.7111111111111111, |
|
"acc_norm_stderr": 0.06832943242540508 |
|
}, |
|
"community|acva:United_Arab_Emirates|0": { |
|
"acc_norm": 0.7764705882352941, |
|
"acc_norm_stderr": 0.045455893567389986 |
|
}, |
|
"community|acva:Yemen|0": { |
|
"acc_norm": 0.7, |
|
"acc_norm_stderr": 0.15275252316519466 |
|
}, |
|
"community|acva:communication|0": { |
|
"acc_norm": 0.5824175824175825, |
|
"acc_norm_stderr": 0.02588421677363805 |
|
}, |
|
"community|acva:computer_and_phone|0": { |
|
"acc_norm": 0.6711864406779661, |
|
"acc_norm_stderr": 0.027398247282935856 |
|
}, |
|
"community|acva:daily_life|0": { |
|
"acc_norm": 0.7002967359050445, |
|
"acc_norm_stderr": 0.024992928618112607 |
|
}, |
|
"community|acva:entertainment|0": { |
|
"acc_norm": 0.8101694915254237, |
|
"acc_norm_stderr": 0.022871638746161122 |
|
}, |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"acc_norm": 0.2800718132854578, |
|
"acc_norm_stderr": 0.01904328620379534 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"acc_norm": 0.2861909175162187, |
|
"acc_norm_stderr": 0.00615408495495672 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"acc_norm": 0.3229050279329609, |
|
"acc_norm_stderr": 0.015638440380241488 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"acc_norm": 0.5466666666666666, |
|
"acc_norm_stderr": 0.05787010410349153 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.04092881363092387 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.03976440686960231 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"acc_norm": 0.6127579737335835, |
|
"acc_norm_stderr": 0.005448207700271403 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"acc_norm": 0.41100917431192663, |
|
"acc_norm_stderr": 0.006355089245364129 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.011676634983483975 |
|
}, |
|
"community|arabic_exams|0": { |
|
"acc_norm": 0.2737430167597765, |
|
"acc_norm_stderr": 0.01925902470829234 |
|
}, |
|
"community|arabic_mmlu:abstract_algebra|0": { |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"community|arabic_mmlu:anatomy|0": { |
|
"acc_norm": 0.31851851851851853, |
|
"acc_norm_stderr": 0.04024778401977109 |
|
}, |
|
"community|arabic_mmlu:astronomy|0": { |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.039397364351956274 |
|
}, |
|
"community|arabic_mmlu:business_ethics|0": { |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"community|arabic_mmlu:clinical_knowledge|0": { |
|
"acc_norm": 0.3132075471698113, |
|
"acc_norm_stderr": 0.02854479331905533 |
|
}, |
|
"community|arabic_mmlu:college_biology|0": { |
|
"acc_norm": 0.2986111111111111, |
|
"acc_norm_stderr": 0.038270523579507554 |
|
}, |
|
"community|arabic_mmlu:college_chemistry|0": { |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"community|arabic_mmlu:college_computer_science|0": { |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"community|arabic_mmlu:college_mathematics|0": { |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"community|arabic_mmlu:college_medicine|0": { |
|
"acc_norm": 0.2658959537572254, |
|
"acc_norm_stderr": 0.0336876293225943 |
|
}, |
|
"community|arabic_mmlu:college_physics|0": { |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.043364327079931785 |
|
}, |
|
"community|arabic_mmlu:computer_security|0": { |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145632 |
|
}, |
|
"community|arabic_mmlu:conceptual_physics|0": { |
|
"acc_norm": 0.30638297872340425, |
|
"acc_norm_stderr": 0.03013590647851756 |
|
}, |
|
"community|arabic_mmlu:econometrics|0": { |
|
"acc_norm": 0.2543859649122807, |
|
"acc_norm_stderr": 0.040969851398436716 |
|
}, |
|
"community|arabic_mmlu:electrical_engineering|0": { |
|
"acc_norm": 0.43448275862068964, |
|
"acc_norm_stderr": 0.04130740879555498 |
|
}, |
|
"community|arabic_mmlu:elementary_mathematics|0": { |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.02487081525105709 |
|
}, |
|
"community|arabic_mmlu:formal_logic|0": { |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.037184890068181146 |
|
}, |
|
"community|arabic_mmlu:global_facts|0": { |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"community|arabic_mmlu:high_school_biology|0": { |
|
"acc_norm": 0.3064516129032258, |
|
"acc_norm_stderr": 0.026226485652553883 |
|
}, |
|
"community|arabic_mmlu:high_school_chemistry|0": { |
|
"acc_norm": 0.3645320197044335, |
|
"acc_norm_stderr": 0.0338640574606209 |
|
}, |
|
"community|arabic_mmlu:high_school_computer_science|0": { |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"community|arabic_mmlu:high_school_european_history|0": { |
|
"acc_norm": 0.24242424242424243, |
|
"acc_norm_stderr": 0.03346409881055953 |
|
}, |
|
"community|arabic_mmlu:high_school_geography|0": { |
|
"acc_norm": 0.3434343434343434, |
|
"acc_norm_stderr": 0.03383201223244442 |
|
}, |
|
"community|arabic_mmlu:high_school_government_and_politics|0": { |
|
"acc_norm": 0.29015544041450775, |
|
"acc_norm_stderr": 0.032752644677915166 |
|
}, |
|
"community|arabic_mmlu:high_school_macroeconomics|0": { |
|
"acc_norm": 0.31025641025641026, |
|
"acc_norm_stderr": 0.02345467488940429 |
|
}, |
|
"community|arabic_mmlu:high_school_mathematics|0": { |
|
"acc_norm": 0.3074074074074074, |
|
"acc_norm_stderr": 0.028133252578815632 |
|
}, |
|
"community|arabic_mmlu:high_school_microeconomics|0": { |
|
"acc_norm": 0.25210084033613445, |
|
"acc_norm_stderr": 0.028205545033277726 |
|
}, |
|
"community|arabic_mmlu:high_school_physics|0": { |
|
"acc_norm": 0.26490066225165565, |
|
"acc_norm_stderr": 0.03603038545360385 |
|
}, |
|
"community|arabic_mmlu:high_school_psychology|0": { |
|
"acc_norm": 0.27155963302752295, |
|
"acc_norm_stderr": 0.01906909836319144 |
|
}, |
|
"community|arabic_mmlu:high_school_statistics|0": { |
|
"acc_norm": 0.3101851851851852, |
|
"acc_norm_stderr": 0.03154696285656628 |
|
}, |
|
"community|arabic_mmlu:high_school_us_history|0": { |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.02977177522814565 |
|
}, |
|
"community|arabic_mmlu:high_school_world_history|0": { |
|
"acc_norm": 0.2911392405063291, |
|
"acc_norm_stderr": 0.02957160106575337 |
|
}, |
|
"community|arabic_mmlu:human_aging|0": { |
|
"acc_norm": 0.2600896860986547, |
|
"acc_norm_stderr": 0.029442495585857473 |
|
}, |
|
"community|arabic_mmlu:human_sexuality|0": { |
|
"acc_norm": 0.31297709923664124, |
|
"acc_norm_stderr": 0.04066962905677697 |
|
}, |
|
"community|arabic_mmlu:international_law|0": { |
|
"acc_norm": 0.4297520661157025, |
|
"acc_norm_stderr": 0.04519082021319772 |
|
}, |
|
"community|arabic_mmlu:jurisprudence|0": { |
|
"acc_norm": 0.35185185185185186, |
|
"acc_norm_stderr": 0.04616631111801714 |
|
}, |
|
"community|arabic_mmlu:logical_fallacies|0": { |
|
"acc_norm": 0.3803680981595092, |
|
"acc_norm_stderr": 0.03814269893261837 |
|
}, |
|
"community|arabic_mmlu:machine_learning|0": { |
|
"acc_norm": 0.24107142857142858, |
|
"acc_norm_stderr": 0.04059867246952687 |
|
}, |
|
"community|arabic_mmlu:management|0": { |
|
"acc_norm": 0.3592233009708738, |
|
"acc_norm_stderr": 0.04750458399041692 |
|
}, |
|
"community|arabic_mmlu:marketing|0": { |
|
"acc_norm": 0.3547008547008547, |
|
"acc_norm_stderr": 0.03134250486245402 |
|
}, |
|
"community|arabic_mmlu:medical_genetics|0": { |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.04793724854411019 |
|
}, |
|
"community|arabic_mmlu:miscellaneous|0": { |
|
"acc_norm": 0.33588761174968074, |
|
"acc_norm_stderr": 0.016889407235171686 |
|
}, |
|
"community|arabic_mmlu:moral_disputes|0": { |
|
"acc_norm": 0.3583815028901734, |
|
"acc_norm_stderr": 0.025816756791584204 |
|
}, |
|
"community|arabic_mmlu:moral_scenarios|0": { |
|
"acc_norm": 0.24916201117318434, |
|
"acc_norm_stderr": 0.014465893829859926 |
|
}, |
|
"community|arabic_mmlu:nutrition|0": { |
|
"acc_norm": 0.3660130718954248, |
|
"acc_norm_stderr": 0.027582811415159624 |
|
}, |
|
"community|arabic_mmlu:philosophy|0": { |
|
"acc_norm": 0.3279742765273312, |
|
"acc_norm_stderr": 0.026664410886937606 |
|
}, |
|
"community|arabic_mmlu:prehistory|0": { |
|
"acc_norm": 0.3425925925925926, |
|
"acc_norm_stderr": 0.026406145973625676 |
|
}, |
|
"community|arabic_mmlu:professional_accounting|0": { |
|
"acc_norm": 0.2624113475177305, |
|
"acc_norm_stderr": 0.026244920349843028 |
|
}, |
|
"community|arabic_mmlu:professional_law|0": { |
|
"acc_norm": 0.27249022164276404, |
|
"acc_norm_stderr": 0.011371658294311538 |
|
}, |
|
"community|arabic_mmlu:professional_medicine|0": { |
|
"acc_norm": 0.1801470588235294, |
|
"acc_norm_stderr": 0.02334516361654487 |
|
}, |
|
"community|arabic_mmlu:professional_psychology|0": { |
|
"acc_norm": 0.29901960784313725, |
|
"acc_norm_stderr": 0.018521756215423024 |
|
}, |
|
"community|arabic_mmlu:public_relations|0": { |
|
"acc_norm": 0.2545454545454545, |
|
"acc_norm_stderr": 0.041723430387053825 |
|
}, |
|
"community|arabic_mmlu:security_studies|0": { |
|
"acc_norm": 0.33877551020408164, |
|
"acc_norm_stderr": 0.030299506562154185 |
|
}, |
|
"community|arabic_mmlu:sociology|0": { |
|
"acc_norm": 0.35323383084577115, |
|
"acc_norm_stderr": 0.03379790611796777 |
|
}, |
|
"community|arabic_mmlu:us_foreign_policy|0": { |
|
"acc_norm": 0.51, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"community|arabic_mmlu:virology|0": { |
|
"acc_norm": 0.25301204819277107, |
|
"acc_norm_stderr": 0.03384429155233137 |
|
}, |
|
"community|arabic_mmlu:world_religions|0": { |
|
"acc_norm": 0.23976608187134502, |
|
"acc_norm_stderr": 0.03274485211946956 |
|
}, |
|
"community|arc_challenge_okapi_ar|0": { |
|
"acc_norm": 0.3163793103448276, |
|
"acc_norm_stderr": 0.01366060452184034 |
|
}, |
|
"community|arc_easy_ar|0": { |
|
"acc_norm": 0.30583756345177665, |
|
"acc_norm_stderr": 0.009478598032723799 |
|
}, |
|
"community|boolq_ar|0": { |
|
"acc_norm": 0.7027607361963191, |
|
"acc_norm_stderr": 0.008005982801564051 |
|
}, |
|
"community|copa_ext_ar|0": { |
|
"acc_norm": 0.4777777777777778, |
|
"acc_norm_stderr": 0.05294752255076824 |
|
}, |
|
"community|hellaswag_okapi_ar|0": { |
|
"acc_norm": 0.26049503870897395, |
|
"acc_norm_stderr": 0.0045833772664397735 |
|
}, |
|
"community|openbook_qa_ext_ar|0": { |
|
"acc_norm": 0.3696969696969697, |
|
"acc_norm_stderr": 0.02171871710436895 |
|
}, |
|
"community|piqa_ar|0": { |
|
"acc_norm": 0.5177304964539007, |
|
"acc_norm_stderr": 0.011674385742971971 |
|
}, |
|
"community|race_ar|0": { |
|
"acc_norm": 0.33515926151349157, |
|
"acc_norm_stderr": 0.0067243308660135 |
|
}, |
|
"community|sciq_ar|0": { |
|
"acc_norm": 0.5115577889447236, |
|
"acc_norm_stderr": 0.015854799447099773 |
|
}, |
|
"community|toxigen_ar|0": { |
|
"acc_norm": 0.4459893048128342, |
|
"acc_norm_stderr": 0.016264767455798974 |
|
}, |
|
"lighteval|xstory_cloze:ar|0": { |
|
"acc": 0.5545996029119789, |
|
"acc_stderr": 0.01279017843808481 |
|
}, |
|
"community|acva:_average|0": { |
|
"acc_norm": 0.6522243507684015, |
|
"acc_norm_stderr": 0.04523294255606712 |
|
}, |
|
"community|alghafa:_average|0": { |
|
"acc_norm": 0.41495573038297934, |
|
"acc_norm_stderr": 0.022542118674681196 |
|
}, |
|
"community|arabic_mmlu:_average|0": { |
|
"acc_norm": 0.31626784483699816, |
|
"acc_norm_stderr": 0.034608288056212196 |
|
}, |
|
"all": { |
|
"acc_norm": 0.4748741358398842, |
|
"acc_norm_stderr": 0.0368832167853778, |
|
"acc": 0.5545996029119789, |
|
"acc_stderr": 0.01279017843808481 |
|
} |
|
}, |
|
"versions": { |
|
"community|acva:Algeria|0": 0, |
|
"community|acva:Ancient_Egypt|0": 0, |
|
"community|acva:Arab_Empire|0": 0, |
|
"community|acva:Arabic_Architecture|0": 0, |
|
"community|acva:Arabic_Art|0": 0, |
|
"community|acva:Arabic_Astronomy|0": 0, |
|
"community|acva:Arabic_Calligraphy|0": 0, |
|
"community|acva:Arabic_Ceremony|0": 0, |
|
"community|acva:Arabic_Clothing|0": 0, |
|
"community|acva:Arabic_Culture|0": 0, |
|
"community|acva:Arabic_Food|0": 0, |
|
"community|acva:Arabic_Funeral|0": 0, |
|
"community|acva:Arabic_Geography|0": 0, |
|
"community|acva:Arabic_History|0": 0, |
|
"community|acva:Arabic_Language_Origin|0": 0, |
|
"community|acva:Arabic_Literature|0": 0, |
|
"community|acva:Arabic_Math|0": 0, |
|
"community|acva:Arabic_Medicine|0": 0, |
|
"community|acva:Arabic_Music|0": 0, |
|
"community|acva:Arabic_Ornament|0": 0, |
|
"community|acva:Arabic_Philosophy|0": 0, |
|
"community|acva:Arabic_Physics_and_Chemistry|0": 0, |
|
"community|acva:Arabic_Wedding|0": 0, |
|
"community|acva:Bahrain|0": 0, |
|
"community|acva:Comoros|0": 0, |
|
"community|acva:Egypt_modern|0": 0, |
|
"community|acva:InfluenceFromAncientEgypt|0": 0, |
|
"community|acva:InfluenceFromByzantium|0": 0, |
|
"community|acva:InfluenceFromChina|0": 0, |
|
"community|acva:InfluenceFromGreece|0": 0, |
|
"community|acva:InfluenceFromIslam|0": 0, |
|
"community|acva:InfluenceFromPersia|0": 0, |
|
"community|acva:InfluenceFromRome|0": 0, |
|
"community|acva:Iraq|0": 0, |
|
"community|acva:Islam_Education|0": 0, |
|
"community|acva:Islam_branches_and_schools|0": 0, |
|
"community|acva:Islamic_law_system|0": 0, |
|
"community|acva:Jordan|0": 0, |
|
"community|acva:Kuwait|0": 0, |
|
"community|acva:Lebanon|0": 0, |
|
"community|acva:Libya|0": 0, |
|
"community|acva:Mauritania|0": 0, |
|
"community|acva:Mesopotamia_civilization|0": 0, |
|
"community|acva:Morocco|0": 0, |
|
"community|acva:Oman|0": 0, |
|
"community|acva:Palestine|0": 0, |
|
"community|acva:Qatar|0": 0, |
|
"community|acva:Saudi_Arabia|0": 0, |
|
"community|acva:Somalia|0": 0, |
|
"community|acva:Sudan|0": 0, |
|
"community|acva:Syria|0": 0, |
|
"community|acva:Tunisia|0": 0, |
|
"community|acva:United_Arab_Emirates|0": 0, |
|
"community|acva:Yemen|0": 0, |
|
"community|acva:communication|0": 0, |
|
"community|acva:computer_and_phone|0": 0, |
|
"community|acva:daily_life|0": 0, |
|
"community|acva:entertainment|0": 0, |
|
"community|alghafa:mcq_exams_test_ar|0": 0, |
|
"community|alghafa:meta_ar_dialects|0": 0, |
|
"community|alghafa:meta_ar_msa|0": 0, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0, |
|
"community|alghafa:multiple_choice_sentiment_task|0": 0, |
|
"community|arabic_exams|0": 0, |
|
"community|arabic_mmlu:abstract_algebra|0": 0, |
|
"community|arabic_mmlu:anatomy|0": 0, |
|
"community|arabic_mmlu:astronomy|0": 0, |
|
"community|arabic_mmlu:business_ethics|0": 0, |
|
"community|arabic_mmlu:clinical_knowledge|0": 0, |
|
"community|arabic_mmlu:college_biology|0": 0, |
|
"community|arabic_mmlu:college_chemistry|0": 0, |
|
"community|arabic_mmlu:college_computer_science|0": 0, |
|
"community|arabic_mmlu:college_mathematics|0": 0, |
|
"community|arabic_mmlu:college_medicine|0": 0, |
|
"community|arabic_mmlu:college_physics|0": 0, |
|
"community|arabic_mmlu:computer_security|0": 0, |
|
"community|arabic_mmlu:conceptual_physics|0": 0, |
|
"community|arabic_mmlu:econometrics|0": 0, |
|
"community|arabic_mmlu:electrical_engineering|0": 0, |
|
"community|arabic_mmlu:elementary_mathematics|0": 0, |
|
"community|arabic_mmlu:formal_logic|0": 0, |
|
"community|arabic_mmlu:global_facts|0": 0, |
|
"community|arabic_mmlu:high_school_biology|0": 0, |
|
"community|arabic_mmlu:high_school_chemistry|0": 0, |
|
"community|arabic_mmlu:high_school_computer_science|0": 0, |
|
"community|arabic_mmlu:high_school_european_history|0": 0, |
|
"community|arabic_mmlu:high_school_geography|0": 0, |
|
"community|arabic_mmlu:high_school_government_and_politics|0": 0, |
|
"community|arabic_mmlu:high_school_macroeconomics|0": 0, |
|
"community|arabic_mmlu:high_school_mathematics|0": 0, |
|
"community|arabic_mmlu:high_school_microeconomics|0": 0, |
|
"community|arabic_mmlu:high_school_physics|0": 0, |
|
"community|arabic_mmlu:high_school_psychology|0": 0, |
|
"community|arabic_mmlu:high_school_statistics|0": 0, |
|
"community|arabic_mmlu:high_school_us_history|0": 0, |
|
"community|arabic_mmlu:high_school_world_history|0": 0, |
|
"community|arabic_mmlu:human_aging|0": 0, |
|
"community|arabic_mmlu:human_sexuality|0": 0, |
|
"community|arabic_mmlu:international_law|0": 0, |
|
"community|arabic_mmlu:jurisprudence|0": 0, |
|
"community|arabic_mmlu:logical_fallacies|0": 0, |
|
"community|arabic_mmlu:machine_learning|0": 0, |
|
"community|arabic_mmlu:management|0": 0, |
|
"community|arabic_mmlu:marketing|0": 0, |
|
"community|arabic_mmlu:medical_genetics|0": 0, |
|
"community|arabic_mmlu:miscellaneous|0": 0, |
|
"community|arabic_mmlu:moral_disputes|0": 0, |
|
"community|arabic_mmlu:moral_scenarios|0": 0, |
|
"community|arabic_mmlu:nutrition|0": 0, |
|
"community|arabic_mmlu:philosophy|0": 0, |
|
"community|arabic_mmlu:prehistory|0": 0, |
|
"community|arabic_mmlu:professional_accounting|0": 0, |
|
"community|arabic_mmlu:professional_law|0": 0, |
|
"community|arabic_mmlu:professional_medicine|0": 0, |
|
"community|arabic_mmlu:professional_psychology|0": 0, |
|
"community|arabic_mmlu:public_relations|0": 0, |
|
"community|arabic_mmlu:security_studies|0": 0, |
|
"community|arabic_mmlu:sociology|0": 0, |
|
"community|arabic_mmlu:us_foreign_policy|0": 0, |
|
"community|arabic_mmlu:virology|0": 0, |
|
"community|arabic_mmlu:world_religions|0": 0, |
|
"community|arc_challenge_okapi_ar|0": 0, |
|
"community|arc_easy_ar|0": 0, |
|
"community|boolq_ar|0": 0, |
|
"community|copa_ext_ar|0": 0, |
|
"community|hellaswag_okapi_ar|0": 0, |
|
"community|openbook_qa_ext_ar|0": 0, |
|
"community|piqa_ar|0": 0, |
|
"community|race_ar|0": 0, |
|
"community|sciq_ar|0": 0, |
|
"community|toxigen_ar|0": 0, |
|
"lighteval|xstory_cloze:ar|0": 0 |
|
}, |
|
"config_tasks": { |
|
"community|acva:Algeria": { |
|
"name": "acva:Algeria", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Algeria", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Ancient_Egypt": { |
|
"name": "acva:Ancient_Egypt", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Ancient_Egypt", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 315, |
|
"effective_num_docs": 315, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arab_Empire": { |
|
"name": "acva:Arab_Empire", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arab_Empire", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 265, |
|
"effective_num_docs": 265, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Architecture": { |
|
"name": "acva:Arabic_Architecture", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Architecture", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Art": { |
|
"name": "acva:Arabic_Art", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Art", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Astronomy": { |
|
"name": "acva:Arabic_Astronomy", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Astronomy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Calligraphy": { |
|
"name": "acva:Arabic_Calligraphy", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Calligraphy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 255, |
|
"effective_num_docs": 255, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Ceremony": { |
|
"name": "acva:Arabic_Ceremony", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Ceremony", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 185, |
|
"effective_num_docs": 185, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Clothing": { |
|
"name": "acva:Arabic_Clothing", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Clothing", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Culture": { |
|
"name": "acva:Arabic_Culture", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Culture", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Food": { |
|
"name": "acva:Arabic_Food", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Food", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Funeral": { |
|
"name": "acva:Arabic_Funeral", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Funeral", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 95, |
|
"effective_num_docs": 95, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Geography": { |
|
"name": "acva:Arabic_Geography", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Geography", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_History": { |
|
"name": "acva:Arabic_History", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_History", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Language_Origin": { |
|
"name": "acva:Arabic_Language_Origin", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Language_Origin", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 95, |
|
"effective_num_docs": 95, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Literature": { |
|
"name": "acva:Arabic_Literature", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Literature", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Math": { |
|
"name": "acva:Arabic_Math", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Math", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Medicine": { |
|
"name": "acva:Arabic_Medicine", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Medicine", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Music": { |
|
"name": "acva:Arabic_Music", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Music", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 139, |
|
"effective_num_docs": 139, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Ornament": { |
|
"name": "acva:Arabic_Ornament", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Ornament", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Philosophy": { |
|
"name": "acva:Arabic_Philosophy", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Philosophy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Physics_and_Chemistry": { |
|
"name": "acva:Arabic_Physics_and_Chemistry", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Physics_and_Chemistry", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Arabic_Wedding": { |
|
"name": "acva:Arabic_Wedding", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Arabic_Wedding", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Bahrain": { |
|
"name": "acva:Bahrain", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Bahrain", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Comoros": { |
|
"name": "acva:Comoros", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Comoros", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Egypt_modern": { |
|
"name": "acva:Egypt_modern", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Egypt_modern", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 95, |
|
"effective_num_docs": 95, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromAncientEgypt": { |
|
"name": "acva:InfluenceFromAncientEgypt", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromAncientEgypt", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromByzantium": { |
|
"name": "acva:InfluenceFromByzantium", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromByzantium", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromChina": { |
|
"name": "acva:InfluenceFromChina", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromChina", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromGreece": { |
|
"name": "acva:InfluenceFromGreece", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromGreece", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromIslam": { |
|
"name": "acva:InfluenceFromIslam", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromIslam", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromPersia": { |
|
"name": "acva:InfluenceFromPersia", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromPersia", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 175, |
|
"effective_num_docs": 175, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:InfluenceFromRome": { |
|
"name": "acva:InfluenceFromRome", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "InfluenceFromRome", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Iraq": { |
|
"name": "acva:Iraq", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Iraq", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 85, |
|
"effective_num_docs": 85, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Islam_Education": { |
|
"name": "acva:Islam_Education", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Islam_Education", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Islam_branches_and_schools": { |
|
"name": "acva:Islam_branches_and_schools", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Islam_branches_and_schools", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 175, |
|
"effective_num_docs": 175, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Islamic_law_system": { |
|
"name": "acva:Islamic_law_system", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Islamic_law_system", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Jordan": { |
|
"name": "acva:Jordan", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Jordan", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Kuwait": { |
|
"name": "acva:Kuwait", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Kuwait", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Lebanon": { |
|
"name": "acva:Lebanon", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Lebanon", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Libya": { |
|
"name": "acva:Libya", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Libya", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Mauritania": { |
|
"name": "acva:Mauritania", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Mauritania", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Mesopotamia_civilization": { |
|
"name": "acva:Mesopotamia_civilization", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Mesopotamia_civilization", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 155, |
|
"effective_num_docs": 155, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Morocco": { |
|
"name": "acva:Morocco", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Morocco", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Oman": { |
|
"name": "acva:Oman", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Oman", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Palestine": { |
|
"name": "acva:Palestine", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Palestine", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 85, |
|
"effective_num_docs": 85, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Qatar": { |
|
"name": "acva:Qatar", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Qatar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Saudi_Arabia": { |
|
"name": "acva:Saudi_Arabia", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Saudi_Arabia", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 195, |
|
"effective_num_docs": 195, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Somalia": { |
|
"name": "acva:Somalia", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Somalia", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Sudan": { |
|
"name": "acva:Sudan", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Sudan", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Syria": { |
|
"name": "acva:Syria", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Syria", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Tunisia": { |
|
"name": "acva:Tunisia", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Tunisia", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 45, |
|
"effective_num_docs": 45, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:United_Arab_Emirates": { |
|
"name": "acva:United_Arab_Emirates", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "United_Arab_Emirates", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 85, |
|
"effective_num_docs": 85, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:Yemen": { |
|
"name": "acva:Yemen", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "Yemen", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 10, |
|
"effective_num_docs": 10, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:communication": { |
|
"name": "acva:communication", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "communication", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 364, |
|
"effective_num_docs": 364, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:computer_and_phone": { |
|
"name": "acva:computer_and_phone", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "computer_and_phone", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 295, |
|
"effective_num_docs": 295, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:daily_life": { |
|
"name": "acva:daily_life", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "daily_life", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 337, |
|
"effective_num_docs": 337, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|acva:entertainment": { |
|
"name": "acva:entertainment", |
|
"prompt_function": "acva", |
|
"hf_repo": "OALL/ACVA", |
|
"hf_subset": "entertainment", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 295, |
|
"effective_num_docs": 295, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:mcq_exams_test_ar": { |
|
"name": "alghafa:mcq_exams_test_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "mcq_exams_test_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 557, |
|
"effective_num_docs": 557, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects": { |
|
"name": "alghafa:meta_ar_dialects", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_dialects", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5395, |
|
"effective_num_docs": 5395, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa": { |
|
"name": "alghafa:meta_ar_msa", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_msa", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": { |
|
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_facts_truefalse_balanced_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 75, |
|
"effective_num_docs": 75, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_soqal_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_soqal_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 7995, |
|
"effective_num_docs": 7995, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5995, |
|
"effective_num_docs": 5995, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task": { |
|
"name": "alghafa:multiple_choice_sentiment_task", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_sentiment_task", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1720, |
|
"effective_num_docs": 1720, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_exams": { |
|
"name": "arabic_exams", |
|
"prompt_function": "arabic_exams", |
|
"hf_repo": "OALL/Arabic_EXAMS", |
|
"hf_subset": "default", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 537, |
|
"effective_num_docs": 537, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:abstract_algebra": { |
|
"name": "arabic_mmlu:abstract_algebra", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "abstract_algebra", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:anatomy": { |
|
"name": "arabic_mmlu:anatomy", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "anatomy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 135, |
|
"effective_num_docs": 135, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:astronomy": { |
|
"name": "arabic_mmlu:astronomy", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "astronomy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 152, |
|
"effective_num_docs": 152, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:business_ethics": { |
|
"name": "arabic_mmlu:business_ethics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "business_ethics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:clinical_knowledge": { |
|
"name": "arabic_mmlu:clinical_knowledge", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "clinical_knowledge", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 265, |
|
"effective_num_docs": 265, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_biology": { |
|
"name": "arabic_mmlu:college_biology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_biology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 144, |
|
"effective_num_docs": 144, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_chemistry": { |
|
"name": "arabic_mmlu:college_chemistry", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_chemistry", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_computer_science": { |
|
"name": "arabic_mmlu:college_computer_science", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_computer_science", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_mathematics": { |
|
"name": "arabic_mmlu:college_mathematics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_medicine": { |
|
"name": "arabic_mmlu:college_medicine", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_medicine", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 173, |
|
"effective_num_docs": 173, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:college_physics": { |
|
"name": "arabic_mmlu:college_physics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "college_physics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 102, |
|
"effective_num_docs": 102, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:computer_security": { |
|
"name": "arabic_mmlu:computer_security", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "computer_security", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:conceptual_physics": { |
|
"name": "arabic_mmlu:conceptual_physics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "conceptual_physics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 235, |
|
"effective_num_docs": 235, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:econometrics": { |
|
"name": "arabic_mmlu:econometrics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "econometrics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 114, |
|
"effective_num_docs": 114, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:electrical_engineering": { |
|
"name": "arabic_mmlu:electrical_engineering", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "electrical_engineering", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:elementary_mathematics": { |
|
"name": "arabic_mmlu:elementary_mathematics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "elementary_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 378, |
|
"effective_num_docs": 378, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:formal_logic": { |
|
"name": "arabic_mmlu:formal_logic", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "formal_logic", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 126, |
|
"effective_num_docs": 126, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:global_facts": { |
|
"name": "arabic_mmlu:global_facts", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "global_facts", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_biology": { |
|
"name": "arabic_mmlu:high_school_biology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_biology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 310, |
|
"effective_num_docs": 310, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_chemistry": { |
|
"name": "arabic_mmlu:high_school_chemistry", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_chemistry", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 203, |
|
"effective_num_docs": 203, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_computer_science": { |
|
"name": "arabic_mmlu:high_school_computer_science", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_computer_science", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_european_history": { |
|
"name": "arabic_mmlu:high_school_european_history", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_european_history", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 165, |
|
"effective_num_docs": 165, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_geography": { |
|
"name": "arabic_mmlu:high_school_geography", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_geography", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 198, |
|
"effective_num_docs": 198, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_government_and_politics": { |
|
"name": "arabic_mmlu:high_school_government_and_politics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_government_and_politics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 193, |
|
"effective_num_docs": 193, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_macroeconomics": { |
|
"name": "arabic_mmlu:high_school_macroeconomics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_macroeconomics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 390, |
|
"effective_num_docs": 390, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_mathematics": { |
|
"name": "arabic_mmlu:high_school_mathematics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 270, |
|
"effective_num_docs": 270, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_microeconomics": { |
|
"name": "arabic_mmlu:high_school_microeconomics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_microeconomics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 238, |
|
"effective_num_docs": 238, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_physics": { |
|
"name": "arabic_mmlu:high_school_physics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_physics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 151, |
|
"effective_num_docs": 151, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_psychology": { |
|
"name": "arabic_mmlu:high_school_psychology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_psychology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 545, |
|
"effective_num_docs": 545, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_statistics": { |
|
"name": "arabic_mmlu:high_school_statistics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_statistics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 216, |
|
"effective_num_docs": 216, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_us_history": { |
|
"name": "arabic_mmlu:high_school_us_history", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_us_history", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 204, |
|
"effective_num_docs": 204, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_world_history": { |
|
"name": "arabic_mmlu:high_school_world_history", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "high_school_world_history", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 237, |
|
"effective_num_docs": 237, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:human_aging": { |
|
"name": "arabic_mmlu:human_aging", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "human_aging", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 223, |
|
"effective_num_docs": 223, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:human_sexuality": { |
|
"name": "arabic_mmlu:human_sexuality", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "human_sexuality", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 131, |
|
"effective_num_docs": 131, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:international_law": { |
|
"name": "arabic_mmlu:international_law", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "international_law", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 121, |
|
"effective_num_docs": 121, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:jurisprudence": { |
|
"name": "arabic_mmlu:jurisprudence", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "jurisprudence", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 108, |
|
"effective_num_docs": 108, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:logical_fallacies": { |
|
"name": "arabic_mmlu:logical_fallacies", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "logical_fallacies", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 163, |
|
"effective_num_docs": 163, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:machine_learning": { |
|
"name": "arabic_mmlu:machine_learning", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "machine_learning", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 112, |
|
"effective_num_docs": 112, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:management": { |
|
"name": "arabic_mmlu:management", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "management", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 103, |
|
"effective_num_docs": 103, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:marketing": { |
|
"name": "arabic_mmlu:marketing", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "marketing", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 234, |
|
"effective_num_docs": 234, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:medical_genetics": { |
|
"name": "arabic_mmlu:medical_genetics", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "medical_genetics", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:miscellaneous": { |
|
"name": "arabic_mmlu:miscellaneous", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "miscellaneous", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 783, |
|
"effective_num_docs": 783, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:moral_disputes": { |
|
"name": "arabic_mmlu:moral_disputes", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "moral_disputes", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 346, |
|
"effective_num_docs": 346, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:moral_scenarios": { |
|
"name": "arabic_mmlu:moral_scenarios", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "moral_scenarios", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:nutrition": { |
|
"name": "arabic_mmlu:nutrition", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "nutrition", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 306, |
|
"effective_num_docs": 306, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:philosophy": { |
|
"name": "arabic_mmlu:philosophy", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "philosophy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 311, |
|
"effective_num_docs": 311, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:prehistory": { |
|
"name": "arabic_mmlu:prehistory", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "prehistory", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 324, |
|
"effective_num_docs": 324, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:professional_accounting": { |
|
"name": "arabic_mmlu:professional_accounting", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "professional_accounting", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 282, |
|
"effective_num_docs": 282, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:professional_law": { |
|
"name": "arabic_mmlu:professional_law", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "professional_law", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1534, |
|
"effective_num_docs": 1534, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:professional_medicine": { |
|
"name": "arabic_mmlu:professional_medicine", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "professional_medicine", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 272, |
|
"effective_num_docs": 272, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:professional_psychology": { |
|
"name": "arabic_mmlu:professional_psychology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "professional_psychology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 612, |
|
"effective_num_docs": 612, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:public_relations": { |
|
"name": "arabic_mmlu:public_relations", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "public_relations", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 110, |
|
"effective_num_docs": 110, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:security_studies": { |
|
"name": "arabic_mmlu:security_studies", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "security_studies", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 245, |
|
"effective_num_docs": 245, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:sociology": { |
|
"name": "arabic_mmlu:sociology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "sociology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 201, |
|
"effective_num_docs": 201, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:us_foreign_policy": { |
|
"name": "arabic_mmlu:us_foreign_policy", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "us_foreign_policy", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:virology": { |
|
"name": "arabic_mmlu:virology", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "virology", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 166, |
|
"effective_num_docs": 166, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:world_religions": { |
|
"name": "arabic_mmlu:world_religions", |
|
"prompt_function": "mmlu_arabic", |
|
"hf_repo": "OALL/Arabic_MMLU", |
|
"hf_subset": "world_religions", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"dev" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 171, |
|
"effective_num_docs": 171, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arc_challenge_okapi_ar": { |
|
"name": "arc_challenge_okapi_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "arc_challenge_okapi_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1160, |
|
"effective_num_docs": 1160, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|arc_easy_ar": { |
|
"name": "arc_easy_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "arc_easy_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2364, |
|
"effective_num_docs": 2364, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|boolq_ar": { |
|
"name": "boolq_ar", |
|
"prompt_function": "boolq_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "boolq_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 3260, |
|
"effective_num_docs": 3260, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|copa_ext_ar": { |
|
"name": "copa_ext_ar", |
|
"prompt_function": "copa_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "copa_ext_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 90, |
|
"effective_num_docs": 90, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|hellaswag_okapi_ar": { |
|
"name": "hellaswag_okapi_ar", |
|
"prompt_function": "hellaswag_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "hellaswag_okapi_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 9171, |
|
"effective_num_docs": 9171, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|openbook_qa_ext_ar": { |
|
"name": "openbook_qa_ext_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "openbook_qa_ext_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 495, |
|
"effective_num_docs": 495, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|piqa_ar": { |
|
"name": "piqa_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "piqa_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1833, |
|
"effective_num_docs": 1833, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|race_ar": { |
|
"name": "race_ar", |
|
"prompt_function": "alghafa_prompt", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "race_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4929, |
|
"effective_num_docs": 4929, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|sciq_ar": { |
|
"name": "sciq_ar", |
|
"prompt_function": "sciq_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "sciq_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 995, |
|
"effective_num_docs": 995, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"community|toxigen_ar": { |
|
"name": "toxigen_ar", |
|
"prompt_function": "toxigen_prompt_arabic", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Translated", |
|
"hf_subset": "toxigen_ar", |
|
"metric": [ |
|
"loglikelihood_acc_norm" |
|
], |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 935, |
|
"effective_num_docs": 935, |
|
"trust_dataset": null, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"lighteval|xstory_cloze:ar": { |
|
"name": "xstory_cloze:ar", |
|
"prompt_function": "storycloze", |
|
"hf_repo": "juletxara/xstory_cloze", |
|
"hf_subset": "ar", |
|
"metric": [ |
|
"loglikelihood_acc" |
|
], |
|
"hf_avail_splits": [ |
|
"training", |
|
"eval" |
|
], |
|
"evaluation_splits": [ |
|
"eval" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": [ |
|
"\n" |
|
], |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"lighteval" |
|
], |
|
"original_num_docs": 1511, |
|
"effective_num_docs": 1511, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
} |
|
}, |
|
"summary_tasks": { |
|
"community|acva:Algeria|0": { |
|
"hashes": { |
|
"hash_examples": "da5a3003cd46f6f9", |
|
"hash_full_prompts": "da5a3003cd46f6f9", |
|
"hash_input_tokens": "1c19028ba0ae3d4e", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Ancient_Egypt|0": { |
|
"hashes": { |
|
"hash_examples": "52d6f767fede195b", |
|
"hash_full_prompts": "52d6f767fede195b", |
|
"hash_input_tokens": "b757194a85ab7a18", |
|
"hash_cont_tokens": "5f7d1751e6ad9399" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 315, |
|
"padded": 630, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arab_Empire|0": { |
|
"hashes": { |
|
"hash_examples": "8dacff6a79804a75", |
|
"hash_full_prompts": "8dacff6a79804a75", |
|
"hash_input_tokens": "fbd0e6f13bb014fe", |
|
"hash_cont_tokens": "8783a9653c6992cc" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 265, |
|
"padded": 530, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Architecture|0": { |
|
"hashes": { |
|
"hash_examples": "df286cd862d9f6bb", |
|
"hash_full_prompts": "df286cd862d9f6bb", |
|
"hash_input_tokens": "4a53cb654ed5b732", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Art|0": { |
|
"hashes": { |
|
"hash_examples": "112883d764118a49", |
|
"hash_full_prompts": "112883d764118a49", |
|
"hash_input_tokens": "e15986914cf33547", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Astronomy|0": { |
|
"hashes": { |
|
"hash_examples": "20dcdf2454bf8671", |
|
"hash_full_prompts": "20dcdf2454bf8671", |
|
"hash_input_tokens": "a40e10c8b1133e87", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Calligraphy|0": { |
|
"hashes": { |
|
"hash_examples": "3a9f9d1ebe868a15", |
|
"hash_full_prompts": "3a9f9d1ebe868a15", |
|
"hash_input_tokens": "7ba7c0ab446b5b0b", |
|
"hash_cont_tokens": "1e63d5b9bb8d45b9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 255, |
|
"padded": 510, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Ceremony|0": { |
|
"hashes": { |
|
"hash_examples": "c927630f8d2f44da", |
|
"hash_full_prompts": "c927630f8d2f44da", |
|
"hash_input_tokens": "54e9aca34da1ae3d", |
|
"hash_cont_tokens": "587187a4b9ec6b9f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 185, |
|
"padded": 370, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Clothing|0": { |
|
"hashes": { |
|
"hash_examples": "6ad0740c2ac6ac92", |
|
"hash_full_prompts": "6ad0740c2ac6ac92", |
|
"hash_input_tokens": "8946e008315a59a1", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Culture|0": { |
|
"hashes": { |
|
"hash_examples": "2177bd857ad872ae", |
|
"hash_full_prompts": "2177bd857ad872ae", |
|
"hash_input_tokens": "2ec7106e4e241413", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Food|0": { |
|
"hashes": { |
|
"hash_examples": "a6ada65b71d7c9c5", |
|
"hash_full_prompts": "a6ada65b71d7c9c5", |
|
"hash_input_tokens": "6e83eafb88147dfe", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Funeral|0": { |
|
"hashes": { |
|
"hash_examples": "fcee39dc29eaae91", |
|
"hash_full_prompts": "fcee39dc29eaae91", |
|
"hash_input_tokens": "4e2c065a1651fd3f", |
|
"hash_cont_tokens": "a53062899e4fc8e9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 95, |
|
"padded": 190, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Geography|0": { |
|
"hashes": { |
|
"hash_examples": "d36eda7c89231c02", |
|
"hash_full_prompts": "d36eda7c89231c02", |
|
"hash_input_tokens": "8cf3dfb5f1d97817", |
|
"hash_cont_tokens": "c15aaf8e70b82ff0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_History|0": { |
|
"hashes": { |
|
"hash_examples": "6354ac0d6db6a5fc", |
|
"hash_full_prompts": "6354ac0d6db6a5fc", |
|
"hash_input_tokens": "dfd1349be420dad9", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Language_Origin|0": { |
|
"hashes": { |
|
"hash_examples": "ddc967c8aca34402", |
|
"hash_full_prompts": "ddc967c8aca34402", |
|
"hash_input_tokens": "2ca3d78293624a9c", |
|
"hash_cont_tokens": "a53062899e4fc8e9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 95, |
|
"padded": 190, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Literature|0": { |
|
"hashes": { |
|
"hash_examples": "4305379fd46be5d8", |
|
"hash_full_prompts": "4305379fd46be5d8", |
|
"hash_input_tokens": "177ca7ca0a64f733", |
|
"hash_cont_tokens": "c15aaf8e70b82ff0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Math|0": { |
|
"hashes": { |
|
"hash_examples": "dec621144f4d28be", |
|
"hash_full_prompts": "dec621144f4d28be", |
|
"hash_input_tokens": "04bab064157cd22d", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Medicine|0": { |
|
"hashes": { |
|
"hash_examples": "2b344cdae9495ff2", |
|
"hash_full_prompts": "2b344cdae9495ff2", |
|
"hash_input_tokens": "c8349b471ba7ee2b", |
|
"hash_cont_tokens": "c15aaf8e70b82ff0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Music|0": { |
|
"hashes": { |
|
"hash_examples": "0c54624d881944ce", |
|
"hash_full_prompts": "0c54624d881944ce", |
|
"hash_input_tokens": "ddc86c5d02ee6008", |
|
"hash_cont_tokens": "1859e6278efcea1b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 139, |
|
"padded": 278, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Ornament|0": { |
|
"hashes": { |
|
"hash_examples": "251a4a84289d8bc1", |
|
"hash_full_prompts": "251a4a84289d8bc1", |
|
"hash_input_tokens": "303cd32276205eea", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Philosophy|0": { |
|
"hashes": { |
|
"hash_examples": "3f86fb9c94c13d22", |
|
"hash_full_prompts": "3f86fb9c94c13d22", |
|
"hash_input_tokens": "5ef332a135a2844d", |
|
"hash_cont_tokens": "c15aaf8e70b82ff0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Physics_and_Chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "8fec65af3695b62a", |
|
"hash_full_prompts": "8fec65af3695b62a", |
|
"hash_input_tokens": "f3f528c0270759df", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Arabic_Wedding|0": { |
|
"hashes": { |
|
"hash_examples": "9cc3477184d7a4b8", |
|
"hash_full_prompts": "9cc3477184d7a4b8", |
|
"hash_input_tokens": "26276fc3b213ec7a", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Bahrain|0": { |
|
"hashes": { |
|
"hash_examples": "c92e803a0fa8b9e2", |
|
"hash_full_prompts": "c92e803a0fa8b9e2", |
|
"hash_input_tokens": "b370fdfeb6dd3db9", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Comoros|0": { |
|
"hashes": { |
|
"hash_examples": "06e5d4bba8e54cae", |
|
"hash_full_prompts": "06e5d4bba8e54cae", |
|
"hash_input_tokens": "8569395657b23eef", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Egypt_modern|0": { |
|
"hashes": { |
|
"hash_examples": "c6ec369164f93446", |
|
"hash_full_prompts": "c6ec369164f93446", |
|
"hash_input_tokens": "76b84713cb53ae16", |
|
"hash_cont_tokens": "a53062899e4fc8e9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 95, |
|
"padded": 190, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromAncientEgypt|0": { |
|
"hashes": { |
|
"hash_examples": "b9d56d74818b9bd4", |
|
"hash_full_prompts": "b9d56d74818b9bd4", |
|
"hash_input_tokens": "40f8cc70da0b36cf", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromByzantium|0": { |
|
"hashes": { |
|
"hash_examples": "5316c9624e7e59b8", |
|
"hash_full_prompts": "5316c9624e7e59b8", |
|
"hash_input_tokens": "b2c1173b9f1ad066", |
|
"hash_cont_tokens": "c15aaf8e70b82ff0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromChina|0": { |
|
"hashes": { |
|
"hash_examples": "87894bce95a56411", |
|
"hash_full_prompts": "87894bce95a56411", |
|
"hash_input_tokens": "e42e20b94d2e8a28", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromGreece|0": { |
|
"hashes": { |
|
"hash_examples": "0baa78a27e469312", |
|
"hash_full_prompts": "0baa78a27e469312", |
|
"hash_input_tokens": "e80678551eb1e84a", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromIslam|0": { |
|
"hashes": { |
|
"hash_examples": "0c2532cde6541ff2", |
|
"hash_full_prompts": "0c2532cde6541ff2", |
|
"hash_input_tokens": "4939b377b42738fc", |
|
"hash_cont_tokens": "c15aaf8e70b82ff0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 290, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromPersia|0": { |
|
"hashes": { |
|
"hash_examples": "efcd8112dc53c6e5", |
|
"hash_full_prompts": "efcd8112dc53c6e5", |
|
"hash_input_tokens": "d02776b4948419ed", |
|
"hash_cont_tokens": "7305f8930f27cd4e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 175, |
|
"padded": 350, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:InfluenceFromRome|0": { |
|
"hashes": { |
|
"hash_examples": "9db61480e2e85fd3", |
|
"hash_full_prompts": "9db61480e2e85fd3", |
|
"hash_input_tokens": "8d1991d7fc194b08", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Iraq|0": { |
|
"hashes": { |
|
"hash_examples": "96dac3dfa8d2f41f", |
|
"hash_full_prompts": "96dac3dfa8d2f41f", |
|
"hash_input_tokens": "2cf3a34c1d055be1", |
|
"hash_cont_tokens": "79b22c57a64b25bf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 85, |
|
"padded": 170, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Islam_Education|0": { |
|
"hashes": { |
|
"hash_examples": "0d80355f6a4cb51b", |
|
"hash_full_prompts": "0d80355f6a4cb51b", |
|
"hash_input_tokens": "712d3282227c9f42", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Islam_branches_and_schools|0": { |
|
"hashes": { |
|
"hash_examples": "5cedce1be2c3ad50", |
|
"hash_full_prompts": "5cedce1be2c3ad50", |
|
"hash_input_tokens": "4a8b01f8fbf1ab1f", |
|
"hash_cont_tokens": "7305f8930f27cd4e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 175, |
|
"padded": 350, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Islamic_law_system|0": { |
|
"hashes": { |
|
"hash_examples": "c0e6db8bc84e105e", |
|
"hash_full_prompts": "c0e6db8bc84e105e", |
|
"hash_input_tokens": "4a6727893d06a57c", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Jordan|0": { |
|
"hashes": { |
|
"hash_examples": "33deb5b4e5ddd6a1", |
|
"hash_full_prompts": "33deb5b4e5ddd6a1", |
|
"hash_input_tokens": "93d22337d0642619", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Kuwait|0": { |
|
"hashes": { |
|
"hash_examples": "eb41773346d7c46c", |
|
"hash_full_prompts": "eb41773346d7c46c", |
|
"hash_input_tokens": "33435199ae99a205", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Lebanon|0": { |
|
"hashes": { |
|
"hash_examples": "25932dbf4c13d34f", |
|
"hash_full_prompts": "25932dbf4c13d34f", |
|
"hash_input_tokens": "b2f6d0b7637a9fbc", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Libya|0": { |
|
"hashes": { |
|
"hash_examples": "f2c4db63cd402926", |
|
"hash_full_prompts": "f2c4db63cd402926", |
|
"hash_input_tokens": "863af05ecf28d69e", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Mauritania|0": { |
|
"hashes": { |
|
"hash_examples": "8723ab5fdf286b54", |
|
"hash_full_prompts": "8723ab5fdf286b54", |
|
"hash_input_tokens": "057c3b5378a7900c", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Mesopotamia_civilization|0": { |
|
"hashes": { |
|
"hash_examples": "c33f5502a6130ca9", |
|
"hash_full_prompts": "c33f5502a6130ca9", |
|
"hash_input_tokens": "b4350173216a50b6", |
|
"hash_cont_tokens": "d6ed79461fc11fb7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 155, |
|
"padded": 310, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Morocco|0": { |
|
"hashes": { |
|
"hash_examples": "588a5ed27904b1ae", |
|
"hash_full_prompts": "588a5ed27904b1ae", |
|
"hash_input_tokens": "6e3de590d782e438", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Oman|0": { |
|
"hashes": { |
|
"hash_examples": "d447c52b94248b69", |
|
"hash_full_prompts": "d447c52b94248b69", |
|
"hash_input_tokens": "2c019dc2fdc3093b", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Palestine|0": { |
|
"hashes": { |
|
"hash_examples": "19197e076ad14ff5", |
|
"hash_full_prompts": "19197e076ad14ff5", |
|
"hash_input_tokens": "90854e016c03cf1b", |
|
"hash_cont_tokens": "79b22c57a64b25bf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 85, |
|
"padded": 170, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Qatar|0": { |
|
"hashes": { |
|
"hash_examples": "cf0736fa185b28f6", |
|
"hash_full_prompts": "cf0736fa185b28f6", |
|
"hash_input_tokens": "3c148edf45bfba37", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Saudi_Arabia|0": { |
|
"hashes": { |
|
"hash_examples": "69beda6e1b85a08d", |
|
"hash_full_prompts": "69beda6e1b85a08d", |
|
"hash_input_tokens": "3084f035930f11a6", |
|
"hash_cont_tokens": "b3ed374c07f6a1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 195, |
|
"padded": 390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Somalia|0": { |
|
"hashes": { |
|
"hash_examples": "b387940c65784fbf", |
|
"hash_full_prompts": "b387940c65784fbf", |
|
"hash_input_tokens": "41e0252ba3505efd", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Sudan|0": { |
|
"hashes": { |
|
"hash_examples": "e02c32b9d2dd0c3f", |
|
"hash_full_prompts": "e02c32b9d2dd0c3f", |
|
"hash_input_tokens": "243854639755237c", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Syria|0": { |
|
"hashes": { |
|
"hash_examples": "60a6f8fe73bda4bb", |
|
"hash_full_prompts": "60a6f8fe73bda4bb", |
|
"hash_input_tokens": "00d38794e57ffae9", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Tunisia|0": { |
|
"hashes": { |
|
"hash_examples": "34bb15d3830c5649", |
|
"hash_full_prompts": "34bb15d3830c5649", |
|
"hash_input_tokens": "1a0dd52844578aa7", |
|
"hash_cont_tokens": "e32fa30311c5caac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 45, |
|
"padded": 90, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:United_Arab_Emirates|0": { |
|
"hashes": { |
|
"hash_examples": "98a0ba78172718ce", |
|
"hash_full_prompts": "98a0ba78172718ce", |
|
"hash_input_tokens": "fbcb4595ee909eb1", |
|
"hash_cont_tokens": "79b22c57a64b25bf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 85, |
|
"padded": 170, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:Yemen|0": { |
|
"hashes": { |
|
"hash_examples": "18e9bcccbb4ced7a", |
|
"hash_full_prompts": "18e9bcccbb4ced7a", |
|
"hash_input_tokens": "878d75114814eb28", |
|
"hash_cont_tokens": "546600309db314db" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 10, |
|
"padded": 20, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:communication|0": { |
|
"hashes": { |
|
"hash_examples": "9ff28ab5eab5c97b", |
|
"hash_full_prompts": "9ff28ab5eab5c97b", |
|
"hash_input_tokens": "8133f565b946881d", |
|
"hash_cont_tokens": "922348efeac48904" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 364, |
|
"padded": 728, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:computer_and_phone|0": { |
|
"hashes": { |
|
"hash_examples": "37bac2f086aaf6c2", |
|
"hash_full_prompts": "37bac2f086aaf6c2", |
|
"hash_input_tokens": "e8aa23fc4d3c76df", |
|
"hash_cont_tokens": "21c566d247b85282" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 295, |
|
"padded": 590, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:daily_life|0": { |
|
"hashes": { |
|
"hash_examples": "bf07363c1c252e2f", |
|
"hash_full_prompts": "bf07363c1c252e2f", |
|
"hash_input_tokens": "03fa2a5e6d1417bc", |
|
"hash_cont_tokens": "35a4aa65a4889d29" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 337, |
|
"padded": 674, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|acva:entertainment|0": { |
|
"hashes": { |
|
"hash_examples": "37077bc00f0ac56a", |
|
"hash_full_prompts": "37077bc00f0ac56a", |
|
"hash_input_tokens": "f6abb817e65929b3", |
|
"hash_cont_tokens": "21c566d247b85282" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 295, |
|
"padded": 590, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"hashes": { |
|
"hash_examples": "c07a5e78c5c0b8fe", |
|
"hash_full_prompts": "c07a5e78c5c0b8fe", |
|
"hash_input_tokens": "2f297e98e69e9dd9", |
|
"hash_cont_tokens": "d625c55cecf56c98" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 557, |
|
"padded": 2228, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"hashes": { |
|
"hash_examples": "c0b6081f83e14064", |
|
"hash_full_prompts": "c0b6081f83e14064", |
|
"hash_input_tokens": "d6c95d00c25f8111", |
|
"hash_cont_tokens": "dc457b275d6de4e2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5395, |
|
"padded": 21580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"hashes": { |
|
"hash_examples": "64eb78a7c5b7484b", |
|
"hash_full_prompts": "64eb78a7c5b7484b", |
|
"hash_input_tokens": "3749d441a107fc96", |
|
"hash_cont_tokens": "046b55645def694d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"hashes": { |
|
"hash_examples": "54fc3502c1c02c06", |
|
"hash_full_prompts": "54fc3502c1c02c06", |
|
"hash_input_tokens": "9c8eb46cfd51884a", |
|
"hash_cont_tokens": "21a564f3caaf138d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 75, |
|
"padded": 150, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"hashes": { |
|
"hash_examples": "46572d83696552ae", |
|
"hash_full_prompts": "46572d83696552ae", |
|
"hash_input_tokens": "d60aadd23ab3bb22", |
|
"hash_cont_tokens": "fb98c023b6c1db18" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 750, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"hashes": { |
|
"hash_examples": "f430d97ff715bc1c", |
|
"hash_full_prompts": "f430d97ff715bc1c", |
|
"hash_input_tokens": "c52e06fe8fea7db3", |
|
"hash_cont_tokens": "0f0bc76437c61af7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 750, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"hashes": { |
|
"hash_examples": "6b70a7416584f98c", |
|
"hash_full_prompts": "6b70a7416584f98c", |
|
"hash_input_tokens": "023b0948b30f95a9", |
|
"hash_cont_tokens": "b0cc8cf11c102265" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 7995, |
|
"padded": 15990, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "bc2005cc9d2f436e", |
|
"hash_full_prompts": "bc2005cc9d2f436e", |
|
"hash_input_tokens": "acd12e362c97c170", |
|
"hash_cont_tokens": "7c4c9000eafebe43" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5995, |
|
"padded": 17985, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "6fb0e254ea5945d8", |
|
"hash_full_prompts": "6fb0e254ea5945d8", |
|
"hash_input_tokens": "2de20609ac65cbda", |
|
"hash_cont_tokens": "02aed8bf71d25cfb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1720, |
|
"padded": 5160, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_exams|0": { |
|
"hashes": { |
|
"hash_examples": "6d721df351722656", |
|
"hash_full_prompts": "6d721df351722656", |
|
"hash_input_tokens": "65d48a7031d92cf7", |
|
"hash_cont_tokens": "75d35d1d02a02179" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 537, |
|
"padded": 2148, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:abstract_algebra|0": { |
|
"hashes": { |
|
"hash_examples": "f2ddca8f45c0a511", |
|
"hash_full_prompts": "f2ddca8f45c0a511", |
|
"hash_input_tokens": "1a8dddc6133c955c", |
|
"hash_cont_tokens": "00a223315c15a9ce" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:anatomy|0": { |
|
"hashes": { |
|
"hash_examples": "dfdbc1b83107668d", |
|
"hash_full_prompts": "dfdbc1b83107668d", |
|
"hash_input_tokens": "5d962662d33ec648", |
|
"hash_cont_tokens": "e7cfbda8199e7611" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 135, |
|
"padded": 540, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:astronomy|0": { |
|
"hashes": { |
|
"hash_examples": "9736a606002a848e", |
|
"hash_full_prompts": "9736a606002a848e", |
|
"hash_input_tokens": "9ab2e5c1e04075eb", |
|
"hash_cont_tokens": "d5464986c8d97559" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 152, |
|
"padded": 608, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:business_ethics|0": { |
|
"hashes": { |
|
"hash_examples": "735e452fbb6dc63d", |
|
"hash_full_prompts": "735e452fbb6dc63d", |
|
"hash_input_tokens": "2dbdf40984e408cd", |
|
"hash_cont_tokens": "00a223315c15a9ce" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:clinical_knowledge|0": { |
|
"hashes": { |
|
"hash_examples": "6ab0ca4da98aedcf", |
|
"hash_full_prompts": "6ab0ca4da98aedcf", |
|
"hash_input_tokens": "1ad150f1c8b107f9", |
|
"hash_cont_tokens": "048c2bb287cd81e3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 265, |
|
"padded": 1060, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_biology|0": { |
|
"hashes": { |
|
"hash_examples": "17e4e390848018a4", |
|
"hash_full_prompts": "17e4e390848018a4", |
|
"hash_input_tokens": "12017f6c266bbff6", |
|
"hash_cont_tokens": "f77b26b202ef9b2b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 144, |
|
"padded": 576, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "4abb169f6dfd234b", |
|
"hash_full_prompts": "4abb169f6dfd234b", |
|
"hash_input_tokens": "5b9480ac4e234e1e", |
|
"hash_cont_tokens": "00a223315c15a9ce" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "a369e2e941358a1e", |
|
"hash_full_prompts": "a369e2e941358a1e", |
|
"hash_input_tokens": "840a40e045d3dc9c", |
|
"hash_cont_tokens": "00a223315c15a9ce" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "d7be03b8b6020bff", |
|
"hash_full_prompts": "d7be03b8b6020bff", |
|
"hash_input_tokens": "a808ecc977305cb0", |
|
"hash_cont_tokens": "00a223315c15a9ce" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "0518a00f097346bf", |
|
"hash_full_prompts": "0518a00f097346bf", |
|
"hash_input_tokens": "be30229bf1476c1c", |
|
"hash_cont_tokens": "76fa38a751ac9bc2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 173, |
|
"padded": 692, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:college_physics|0": { |
|
"hashes": { |
|
"hash_examples": "5d842cd49bc70e12", |
|
"hash_full_prompts": "5d842cd49bc70e12", |
|
"hash_input_tokens": "234b865ce85b1fec", |
|
"hash_cont_tokens": "c786ae78224e9572" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 102, |
|
"padded": 408, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:computer_security|0": { |
|
"hashes": { |
|
"hash_examples": "8e85d9f85be9b32f", |
|
"hash_full_prompts": "8e85d9f85be9b32f", |
|
"hash_input_tokens": "b7f02e9f44cb544f", |
|
"hash_cont_tokens": "00a223315c15a9ce" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:conceptual_physics|0": { |
|
"hashes": { |
|
"hash_examples": "7964b55a0a49502b", |
|
"hash_full_prompts": "7964b55a0a49502b", |
|
"hash_input_tokens": "3ee529be05ca435a", |
|
"hash_cont_tokens": "cdabd8b4dc5070a7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 235, |
|
"padded": 940, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:econometrics|0": { |
|
"hashes": { |
|
"hash_examples": "1e192eae38347257", |
|
"hash_full_prompts": "1e192eae38347257", |
|
"hash_input_tokens": "821aae6999eda0df", |
|
"hash_cont_tokens": "79f7b7012bc5cae3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 114, |
|
"padded": 456, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:electrical_engineering|0": { |
|
"hashes": { |
|
"hash_examples": "cf97671d5c441da1", |
|
"hash_full_prompts": "cf97671d5c441da1", |
|
"hash_input_tokens": "e046c08e37d529b2", |
|
"hash_cont_tokens": "98c8c04cfecc2e3a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:elementary_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "6f49107ed43c40c5", |
|
"hash_full_prompts": "6f49107ed43c40c5", |
|
"hash_input_tokens": "1002c8f7d757a326", |
|
"hash_cont_tokens": "55f0c3be194d2e8f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 378, |
|
"padded": 1512, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:formal_logic|0": { |
|
"hashes": { |
|
"hash_examples": "7922c376008ba77b", |
|
"hash_full_prompts": "7922c376008ba77b", |
|
"hash_input_tokens": "7b58c8d21408130e", |
|
"hash_cont_tokens": "e715b1eaed2453d1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 126, |
|
"padded": 504, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:global_facts|0": { |
|
"hashes": { |
|
"hash_examples": "11f9813185047d5b", |
|
"hash_full_prompts": "11f9813185047d5b", |
|
"hash_input_tokens": "4bf7d893bca659dd", |
|
"hash_cont_tokens": "00a223315c15a9ce" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_biology|0": { |
|
"hashes": { |
|
"hash_examples": "2a804b1d90cbe66e", |
|
"hash_full_prompts": "2a804b1d90cbe66e", |
|
"hash_input_tokens": "50f07619c4b56283", |
|
"hash_cont_tokens": "b5a22be1545a5885" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 310, |
|
"padded": 1240, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "0032168adabc53b4", |
|
"hash_full_prompts": "0032168adabc53b4", |
|
"hash_input_tokens": "5f55244a4982df74", |
|
"hash_cont_tokens": "41f59f267986db24" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 203, |
|
"padded": 812, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "f2fb8740f9df980f", |
|
"hash_full_prompts": "f2fb8740f9df980f", |
|
"hash_input_tokens": "5b5a8857e8029fe6", |
|
"hash_cont_tokens": "00a223315c15a9ce" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_european_history|0": { |
|
"hashes": { |
|
"hash_examples": "73509021e7e66435", |
|
"hash_full_prompts": "73509021e7e66435", |
|
"hash_input_tokens": "830d3e7cd5b8c279", |
|
"hash_cont_tokens": "ba925766deaa3c15" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 165, |
|
"padded": 660, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_geography|0": { |
|
"hashes": { |
|
"hash_examples": "9e08d1894940ff42", |
|
"hash_full_prompts": "9e08d1894940ff42", |
|
"hash_input_tokens": "46d2f31bd3e891f3", |
|
"hash_cont_tokens": "ba892eb0674f32f0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 198, |
|
"padded": 792, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_government_and_politics|0": { |
|
"hashes": { |
|
"hash_examples": "64b7e97817ca6c76", |
|
"hash_full_prompts": "64b7e97817ca6c76", |
|
"hash_input_tokens": "5f084df02b040abb", |
|
"hash_cont_tokens": "da6ef64a42a0438e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 193, |
|
"padded": 772, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_macroeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "9f582da8534bd2ef", |
|
"hash_full_prompts": "9f582da8534bd2ef", |
|
"hash_input_tokens": "8a1889a0b1175c11", |
|
"hash_cont_tokens": "6660aeaa8368b295" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 390, |
|
"padded": 1560, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "fd54f1c10d423c51", |
|
"hash_full_prompts": "fd54f1c10d423c51", |
|
"hash_input_tokens": "d3f84c81f1a554bd", |
|
"hash_cont_tokens": "07575f7140327432" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 270, |
|
"padded": 1080, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_microeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "7037896925aaf42f", |
|
"hash_full_prompts": "7037896925aaf42f", |
|
"hash_input_tokens": "a1216906bb761c32", |
|
"hash_cont_tokens": "19bfd23905b3bec3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 238, |
|
"padded": 952, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_physics|0": { |
|
"hashes": { |
|
"hash_examples": "60c3776215167dae", |
|
"hash_full_prompts": "60c3776215167dae", |
|
"hash_input_tokens": "9558f5d1462bcfbf", |
|
"hash_cont_tokens": "0562b521128fbe6b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 151, |
|
"padded": 604, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "61176bfd5da1298f", |
|
"hash_full_prompts": "61176bfd5da1298f", |
|
"hash_input_tokens": "4890f882555e69df", |
|
"hash_cont_tokens": "cfa877943c39a466" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 545, |
|
"padded": 2180, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_statistics|0": { |
|
"hashes": { |
|
"hash_examples": "40dfeebd1ea10f76", |
|
"hash_full_prompts": "40dfeebd1ea10f76", |
|
"hash_input_tokens": "3e4c64a8c5d79aed", |
|
"hash_cont_tokens": "e58c5a09e16fdd84" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 216, |
|
"padded": 864, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_us_history|0": { |
|
"hashes": { |
|
"hash_examples": "03daa510ba917f4d", |
|
"hash_full_prompts": "03daa510ba917f4d", |
|
"hash_input_tokens": "81067a44ae65c486", |
|
"hash_cont_tokens": "b93a8c4f2a2af9f0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 204, |
|
"padded": 816, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:high_school_world_history|0": { |
|
"hashes": { |
|
"hash_examples": "be075ffd579f43c2", |
|
"hash_full_prompts": "be075ffd579f43c2", |
|
"hash_input_tokens": "fb11251c27775e6f", |
|
"hash_cont_tokens": "54a49424057f2e6b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 237, |
|
"padded": 948, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:human_aging|0": { |
|
"hashes": { |
|
"hash_examples": "caa5b69f640bd1ef", |
|
"hash_full_prompts": "caa5b69f640bd1ef", |
|
"hash_input_tokens": "ab4c85d5ffea76a7", |
|
"hash_cont_tokens": "eec7417389927586" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 223, |
|
"padded": 892, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:human_sexuality|0": { |
|
"hashes": { |
|
"hash_examples": "5ed2e38fb25a3767", |
|
"hash_full_prompts": "5ed2e38fb25a3767", |
|
"hash_input_tokens": "42e64c2677dcb620", |
|
"hash_cont_tokens": "9dc6bb3c4ecb3178" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 131, |
|
"padded": 524, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:international_law|0": { |
|
"hashes": { |
|
"hash_examples": "4e3e9e28d1b96484", |
|
"hash_full_prompts": "4e3e9e28d1b96484", |
|
"hash_input_tokens": "4ae677688ac2df1d", |
|
"hash_cont_tokens": "192d0de6ed8ebc9b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 121, |
|
"padded": 484, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:jurisprudence|0": { |
|
"hashes": { |
|
"hash_examples": "e264b755366310b3", |
|
"hash_full_prompts": "e264b755366310b3", |
|
"hash_input_tokens": "fd99a2ae93a65ae0", |
|
"hash_cont_tokens": "75dcdf05908542a5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 108, |
|
"padded": 432, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:logical_fallacies|0": { |
|
"hashes": { |
|
"hash_examples": "a4ab6965a3e38071", |
|
"hash_full_prompts": "a4ab6965a3e38071", |
|
"hash_input_tokens": "8ae8c4efa94f64ff", |
|
"hash_cont_tokens": "ffd60b4e1fa30f63" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 163, |
|
"padded": 652, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:machine_learning|0": { |
|
"hashes": { |
|
"hash_examples": "b92320efa6636b40", |
|
"hash_full_prompts": "b92320efa6636b40", |
|
"hash_input_tokens": "8f92b17628aa9dbe", |
|
"hash_cont_tokens": "4f0e7389d8fa87d9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 112, |
|
"padded": 448, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:management|0": { |
|
"hashes": { |
|
"hash_examples": "c9ee4872a850fe20", |
|
"hash_full_prompts": "c9ee4872a850fe20", |
|
"hash_input_tokens": "5b98008da739b3d5", |
|
"hash_cont_tokens": "75a8c1e4c452cb1d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 103, |
|
"padded": 412, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:marketing|0": { |
|
"hashes": { |
|
"hash_examples": "0c151b70f6a047e3", |
|
"hash_full_prompts": "0c151b70f6a047e3", |
|
"hash_input_tokens": "63d91f815be55833", |
|
"hash_cont_tokens": "727f87587fa3a95b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 234, |
|
"padded": 936, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:medical_genetics|0": { |
|
"hashes": { |
|
"hash_examples": "513f6cb8fca3a24e", |
|
"hash_full_prompts": "513f6cb8fca3a24e", |
|
"hash_input_tokens": "a6edbf2448d32ce3", |
|
"hash_cont_tokens": "00a223315c15a9ce" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:miscellaneous|0": { |
|
"hashes": { |
|
"hash_examples": "259a190d635331db", |
|
"hash_full_prompts": "259a190d635331db", |
|
"hash_input_tokens": "1ce711888d699b26", |
|
"hash_cont_tokens": "73f36e25a6fef508" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 783, |
|
"padded": 3124, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:moral_disputes|0": { |
|
"hashes": { |
|
"hash_examples": "b85052c48a0b7bc3", |
|
"hash_full_prompts": "b85052c48a0b7bc3", |
|
"hash_input_tokens": "9faebaf4e7c471c8", |
|
"hash_cont_tokens": "6b2c2a581ad40b69" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 346, |
|
"padded": 1384, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:moral_scenarios|0": { |
|
"hashes": { |
|
"hash_examples": "28d0b069ef00dd00", |
|
"hash_full_prompts": "28d0b069ef00dd00", |
|
"hash_input_tokens": "f7c3b7b8889abdce", |
|
"hash_cont_tokens": "af64a7a018654c30" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:nutrition|0": { |
|
"hashes": { |
|
"hash_examples": "00c9bc5f1d305b2f", |
|
"hash_full_prompts": "00c9bc5f1d305b2f", |
|
"hash_input_tokens": "64e2ede846ad5d62", |
|
"hash_cont_tokens": "6e20cc0d504ecac3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 306, |
|
"padded": 1218, |
|
"non_padded": 6, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:philosophy|0": { |
|
"hashes": { |
|
"hash_examples": "a458c08454a3fd5f", |
|
"hash_full_prompts": "a458c08454a3fd5f", |
|
"hash_input_tokens": "23b31139e893c178", |
|
"hash_cont_tokens": "aa4ac747c265c6ee" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 311, |
|
"padded": 1234, |
|
"non_padded": 10, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:prehistory|0": { |
|
"hashes": { |
|
"hash_examples": "d6a0ecbdbb670e9c", |
|
"hash_full_prompts": "d6a0ecbdbb670e9c", |
|
"hash_input_tokens": "4a771ee134473980", |
|
"hash_cont_tokens": "77d8e266e29e78db" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 324, |
|
"padded": 1288, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:professional_accounting|0": { |
|
"hashes": { |
|
"hash_examples": "b4a95fe480b6540e", |
|
"hash_full_prompts": "b4a95fe480b6540e", |
|
"hash_input_tokens": "16dc60cf48d89a11", |
|
"hash_cont_tokens": "e25888a5ba75c974" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 282, |
|
"padded": 1128, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:professional_law|0": { |
|
"hashes": { |
|
"hash_examples": "c2be9651cdbdde3b", |
|
"hash_full_prompts": "c2be9651cdbdde3b", |
|
"hash_input_tokens": "b891efbc1c0b3b44", |
|
"hash_cont_tokens": "d1bfd61f4ba42a8b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1534, |
|
"padded": 6132, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:professional_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "26ce92416288f273", |
|
"hash_full_prompts": "26ce92416288f273", |
|
"hash_input_tokens": "7ebdc5e840946925", |
|
"hash_cont_tokens": "6606c8af95c09c78" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 272, |
|
"padded": 1088, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:professional_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "71ea5f182ea9a641", |
|
"hash_full_prompts": "71ea5f182ea9a641", |
|
"hash_input_tokens": "3e30728a3d56366a", |
|
"hash_cont_tokens": "b061897bffe334ec" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 612, |
|
"padded": 2440, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:public_relations|0": { |
|
"hashes": { |
|
"hash_examples": "125adc21f91f8d77", |
|
"hash_full_prompts": "125adc21f91f8d77", |
|
"hash_input_tokens": "3485feef9e062732", |
|
"hash_cont_tokens": "5c3107c12bceb18e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 110, |
|
"padded": 436, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:security_studies|0": { |
|
"hashes": { |
|
"hash_examples": "3c18b216c099fb26", |
|
"hash_full_prompts": "3c18b216c099fb26", |
|
"hash_input_tokens": "ed9bcdf8542ec5f0", |
|
"hash_cont_tokens": "ce4361df75a6e6a7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 245, |
|
"padded": 980, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:sociology|0": { |
|
"hashes": { |
|
"hash_examples": "3f2a9634cef7417d", |
|
"hash_full_prompts": "3f2a9634cef7417d", |
|
"hash_input_tokens": "03ff81748101997e", |
|
"hash_cont_tokens": "cbbeed1c4c5a128e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 201, |
|
"padded": 802, |
|
"non_padded": 2, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:us_foreign_policy|0": { |
|
"hashes": { |
|
"hash_examples": "22249da54056475e", |
|
"hash_full_prompts": "22249da54056475e", |
|
"hash_input_tokens": "d0b5f78648ba93b9", |
|
"hash_cont_tokens": "00a223315c15a9ce" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:virology|0": { |
|
"hashes": { |
|
"hash_examples": "9d194b9471dc624e", |
|
"hash_full_prompts": "9d194b9471dc624e", |
|
"hash_input_tokens": "2ebce402deeddf9a", |
|
"hash_cont_tokens": "84e4fe7f10383133" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 166, |
|
"padded": 660, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:world_religions|0": { |
|
"hashes": { |
|
"hash_examples": "229e5fe50082b064", |
|
"hash_full_prompts": "229e5fe50082b064", |
|
"hash_input_tokens": "c12e29073be7db38", |
|
"hash_cont_tokens": "a0fac287dd015964" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 171, |
|
"padded": 678, |
|
"non_padded": 6, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arc_challenge_okapi_ar|0": { |
|
"hashes": { |
|
"hash_examples": "ab893807673bc355", |
|
"hash_full_prompts": "ab893807673bc355", |
|
"hash_input_tokens": "b8e694e1073bb8f2", |
|
"hash_cont_tokens": "726609a9298bbada" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1160, |
|
"padded": 4630, |
|
"non_padded": 10, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arc_easy_ar|0": { |
|
"hashes": { |
|
"hash_examples": "acb688624acc3d04", |
|
"hash_full_prompts": "acb688624acc3d04", |
|
"hash_input_tokens": "e425f329969b99ac", |
|
"hash_cont_tokens": "f3c33db663397968" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 2364, |
|
"padded": 9418, |
|
"non_padded": 38, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|boolq_ar|0": { |
|
"hashes": { |
|
"hash_examples": "48355a67867e0c32", |
|
"hash_full_prompts": "48355a67867e0c32", |
|
"hash_input_tokens": "3ea7c179566ad40a", |
|
"hash_cont_tokens": "912f7226532bcd5b" |
|
}, |
|
"truncated": 2, |
|
"non_truncated": 3258, |
|
"padded": 6517, |
|
"non_padded": 3, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|copa_ext_ar|0": { |
|
"hashes": { |
|
"hash_examples": "9bb83301bb72eecf", |
|
"hash_full_prompts": "9bb83301bb72eecf", |
|
"hash_input_tokens": "29d6ee6513c8d7da", |
|
"hash_cont_tokens": "6240ddc895d662c5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 90, |
|
"padded": 180, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|hellaswag_okapi_ar|0": { |
|
"hashes": { |
|
"hash_examples": "6e8cf57a322dfadd", |
|
"hash_full_prompts": "6e8cf57a322dfadd", |
|
"hash_input_tokens": "cdedd5c4ba1fada0", |
|
"hash_cont_tokens": "7b105a1c159805da" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 9171, |
|
"padded": 36632, |
|
"non_padded": 52, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|openbook_qa_ext_ar|0": { |
|
"hashes": { |
|
"hash_examples": "923d41eb0aca93eb", |
|
"hash_full_prompts": "923d41eb0aca93eb", |
|
"hash_input_tokens": "71f7bf26b6852800", |
|
"hash_cont_tokens": "d1357c9db83cc945" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 495, |
|
"padded": 1970, |
|
"non_padded": 10, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|piqa_ar|0": { |
|
"hashes": { |
|
"hash_examples": "94bc205a520d3ea0", |
|
"hash_full_prompts": "94bc205a520d3ea0", |
|
"hash_input_tokens": "a31d33aa2dfdc604", |
|
"hash_cont_tokens": "661c43850095d871" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1833, |
|
"padded": 3651, |
|
"non_padded": 15, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|race_ar|0": { |
|
"hashes": { |
|
"hash_examples": "de65130bae647516", |
|
"hash_full_prompts": "de65130bae647516", |
|
"hash_input_tokens": "c3fe0de6729d9bce", |
|
"hash_cont_tokens": "fd2244c08d29bb74" |
|
}, |
|
"truncated": 124, |
|
"non_truncated": 4805, |
|
"padded": 19589, |
|
"non_padded": 127, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|sciq_ar|0": { |
|
"hashes": { |
|
"hash_examples": "5f752356244660cf", |
|
"hash_full_prompts": "5f752356244660cf", |
|
"hash_input_tokens": "142c38486db52370", |
|
"hash_cont_tokens": "5cb3304c8b68fd03" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 995, |
|
"padded": 3970, |
|
"non_padded": 10, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|toxigen_ar|0": { |
|
"hashes": { |
|
"hash_examples": "1e139513004a9a2e", |
|
"hash_full_prompts": "1e139513004a9a2e", |
|
"hash_input_tokens": "22fab16b1cb71976", |
|
"hash_cont_tokens": "95916816b9079ef8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 935, |
|
"padded": 1858, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"lighteval|xstory_cloze:ar|0": { |
|
"hashes": { |
|
"hash_examples": "865426a22c787481", |
|
"hash_full_prompts": "865426a22c787481", |
|
"hash_input_tokens": "9437ed4fbbd44141", |
|
"hash_cont_tokens": "0ec4f03b9b94acd4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1511, |
|
"padded": 2991, |
|
"non_padded": 31, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
} |
|
}, |
|
"summary_general": { |
|
"hashes": { |
|
"hash_examples": "ca0205af914ade69", |
|
"hash_full_prompts": "ca0205af914ade69", |
|
"hash_input_tokens": "4d3e84bba01d96d8", |
|
"hash_cont_tokens": "ecec10f146c55a0f" |
|
}, |
|
"truncated": 126, |
|
"non_truncated": 72838, |
|
"padded": 235255, |
|
"non_padded": 368, |
|
"num_truncated_few_shots": 0 |
|
} |
|
} |