{ "config_general": { "lighteval_sha": "no", "num_few_shot_default": null, "num_fewshot_seeds": null, "override_batch_size": null, "max_samples": null, "job_id": -1, "start_time": null, "end_time": "2024-05-25-03-11-56", "total_evaluation_time_secondes": "", "model_name": "QuantFactory/Phi-3-mini-4k-instruct-GGUF", "model_sha": "", "model_dtype": "4bit", "model_size": 2.175438336, "model_params": 3.821079552, "quant_type": "llama.cpp", "precision": "4bit" }, "results": { "harness|mmlu|0": { "acc,none": 0.654037886340977, "acc_stderr,none": 0.0038135827737680933, "alias": "mmlu" }, "harness|mmlu_humanities|0": { "alias": " - humanities", "acc,none": 0.612327311370882, "acc_stderr,none": 0.00681296950147169 }, "harness|mmlu_formal_logic|0": { "alias": " - formal_logic", "acc,none": 0.5555555555555556, "acc_stderr,none": 0.044444444444444495 }, "harness|mmlu_high_school_european_history|0": { "alias": " - high_school_european_history", "acc,none": 0.806060606060606, "acc_stderr,none": 0.03087414513656208 }, "harness|mmlu_high_school_us_history|0": { "alias": " - high_school_us_history", "acc,none": 0.7745098039215687, "acc_stderr,none": 0.029331162294251728 }, "harness|mmlu_high_school_world_history|0": { "alias": " - high_school_world_history", "acc,none": 0.7890295358649789, "acc_stderr,none": 0.02655837250266192 }, "harness|mmlu_international_law|0": { "alias": " - international_law", "acc,none": 0.7768595041322314, "acc_stderr,none": 0.03800754475228733 }, "harness|mmlu_jurisprudence|0": { "alias": " - jurisprudence", "acc,none": 0.7407407407407407, "acc_stderr,none": 0.042365112580946315 }, "harness|mmlu_logical_fallacies|0": { "alias": " - logical_fallacies", "acc,none": 0.8098159509202454, "acc_stderr,none": 0.030833491146281214 }, "harness|mmlu_moral_disputes|0": { "alias": " - moral_disputes", "acc,none": 0.7109826589595376, "acc_stderr,none": 0.02440517393578323 }, "harness|mmlu_moral_scenarios|0": { "alias": " - moral_scenarios", "acc,none": 0.48268156424581005, "acc_stderr,none": 0.01671246744170252 }, "harness|mmlu_philosophy|0": { "alias": " - philosophy", "acc,none": 0.7009646302250804, "acc_stderr,none": 0.02600330111788514 }, "harness|mmlu_prehistory|0": { "alias": " - prehistory", "acc,none": 0.7685185185185185, "acc_stderr,none": 0.023468429832451152 }, "harness|mmlu_professional_law|0": { "alias": " - professional_law", "acc,none": 0.48370273794002605, "acc_stderr,none": 0.012763450734699816 }, "harness|mmlu_world_religions|0": { "alias": " - world_religions", "acc,none": 0.8187134502923976, "acc_stderr,none": 0.029547741687640038 }, "harness|mmlu_other|0": { "alias": " - other", "acc,none": 0.6971355004827808, "acc_stderr,none": 0.007949711839349649 }, "harness|mmlu_business_ethics|0": { "alias": " - business_ethics", "acc,none": 0.69, "acc_stderr,none": 0.04648231987117316 }, "harness|mmlu_clinical_knowledge|0": { "alias": " - clinical_knowledge", "acc,none": 0.7283018867924528, "acc_stderr,none": 0.027377706624670713 }, "harness|mmlu_college_medicine|0": { "alias": " - college_medicine", "acc,none": 0.6473988439306358, "acc_stderr,none": 0.03643037168958548 }, "harness|mmlu_global_facts|0": { "alias": " - global_facts", "acc,none": 0.31, "acc_stderr,none": 0.04648231987117316 }, "harness|mmlu_human_aging|0": { "alias": " - human_aging", "acc,none": 0.6681614349775785, "acc_stderr,none": 0.03160295143776679 }, "harness|mmlu_management|0": { "alias": " - management", "acc,none": 0.8349514563106796, "acc_stderr,none": 0.03675668832233189 }, "harness|mmlu_marketing|0": { "alias": " - marketing", "acc,none": 0.8717948717948718, "acc_stderr,none": 0.021901905115073318 }, "harness|mmlu_medical_genetics|0": { "alias": " - medical_genetics", "acc,none": 0.7, "acc_stderr,none": 0.046056618647183814 }, "harness|mmlu_miscellaneous|0": { "alias": " - miscellaneous", "acc,none": 0.8033205619412516, "acc_stderr,none": 0.01421413855691391 }, "harness|mmlu_nutrition|0": { "alias": " - nutrition", "acc,none": 0.6993464052287581, "acc_stderr,none": 0.026256053835718964 }, "harness|mmlu_professional_accounting|0": { "alias": " - professional_accounting", "acc,none": 0.5390070921985816, "acc_stderr,none": 0.029736592526424434 }, "harness|mmlu_professional_medicine|0": { "alias": " - professional_medicine", "acc,none": 0.6433823529411765, "acc_stderr,none": 0.02909720956841196 }, "harness|mmlu_virology|0": { "alias": " - virology", "acc,none": 0.4939759036144578, "acc_stderr,none": 0.03892212195333045 }, "harness|mmlu_social_sciences|0": { "alias": " - social_sciences", "acc,none": 0.7689307767305817, "acc_stderr,none": 0.0074364870765378744 }, "harness|mmlu_econometrics|0": { "alias": " - econometrics", "acc,none": 0.5263157894736842, "acc_stderr,none": 0.046970851366478626 }, "harness|mmlu_high_school_geography|0": { "alias": " - high_school_geography", "acc,none": 0.803030303030303, "acc_stderr,none": 0.02833560973246336 }, "harness|mmlu_high_school_government_and_politics|0": { "alias": " - high_school_government_and_politics", "acc,none": 0.8860103626943006, "acc_stderr,none": 0.022935144053919432 }, "harness|mmlu_high_school_macroeconomics|0": { "alias": " - high_school_macroeconomics", "acc,none": 0.7, "acc_stderr,none": 0.023234581088428487 }, "harness|mmlu_high_school_microeconomics|0": { "alias": " - high_school_microeconomics", "acc,none": 0.8109243697478992, "acc_stderr,none": 0.025435119438105357 }, "harness|mmlu_high_school_psychology|0": { "alias": " - high_school_psychology", "acc,none": 0.8715596330275229, "acc_stderr,none": 0.014344977542914307 }, "harness|mmlu_human_sexuality|0": { "alias": " - human_sexuality", "acc,none": 0.7099236641221374, "acc_stderr,none": 0.03980066246467765 }, "harness|mmlu_professional_psychology|0": { "alias": " - professional_psychology", "acc,none": 0.6944444444444444, "acc_stderr,none": 0.018635594034423976 }, "harness|mmlu_public_relations|0": { "alias": " - public_relations", "acc,none": 0.6818181818181818, "acc_stderr,none": 0.04461272175910508 }, "harness|mmlu_security_studies|0": { "alias": " - security_studies", "acc,none": 0.7428571428571429, "acc_stderr,none": 0.027979823538744543 }, "harness|mmlu_sociology|0": { "alias": " - sociology", "acc,none": 0.8656716417910447, "acc_stderr,none": 0.024112678240900826 }, "harness|mmlu_us_foreign_policy|0": { "alias": " - us_foreign_policy", "acc,none": 0.86, "acc_stderr,none": 0.03487350880197768 }, "harness|mmlu_stem|0": { "alias": " - stem", "acc,none": 0.5616872819536949, "acc_stderr,none": 0.00842972061711791 }, "harness|mmlu_abstract_algebra|0": { "alias": " - abstract_algebra", "acc,none": 0.36, "acc_stderr,none": 0.04824181513244218 }, "harness|mmlu_anatomy|0": { "alias": " - anatomy", "acc,none": 0.6296296296296297, "acc_stderr,none": 0.041716541613545426 }, "harness|mmlu_astronomy|0": { "alias": " - astronomy", "acc,none": 0.7368421052631579, "acc_stderr,none": 0.03583496176361072 }, "harness|mmlu_college_biology|0": { "alias": " - college_biology", "acc,none": 0.8125, "acc_stderr,none": 0.032639560491693344 }, "harness|mmlu_college_chemistry|0": { "alias": " - college_chemistry", "acc,none": 0.46, "acc_stderr,none": 0.05009082659620332 }, "harness|mmlu_college_computer_science|0": { "alias": " - college_computer_science", "acc,none": 0.47, "acc_stderr,none": 0.05016135580465919 }, "harness|mmlu_college_mathematics|0": { "alias": " - college_mathematics", "acc,none": 0.34, "acc_stderr,none": 0.04760952285695235 }, "harness|mmlu_college_physics|0": { "alias": " - college_physics", "acc,none": 0.37254901960784315, "acc_stderr,none": 0.04810840148082634 }, "harness|mmlu_computer_security|0": { "alias": " - computer_security", "acc,none": 0.75, "acc_stderr,none": 0.04351941398892446 }, "harness|mmlu_conceptual_physics|0": { "alias": " - conceptual_physics", "acc,none": 0.6212765957446809, "acc_stderr,none": 0.03170995606040655 }, "harness|mmlu_electrical_engineering|0": { "alias": " - electrical_engineering", "acc,none": 0.5517241379310345, "acc_stderr,none": 0.04144311810878152 }, "harness|mmlu_elementary_mathematics|0": { "alias": " - elementary_mathematics", "acc,none": 0.48677248677248675, "acc_stderr,none": 0.025742297289575142 }, "harness|mmlu_high_school_biology|0": { "alias": " - high_school_biology", "acc,none": 0.8258064516129032, "acc_stderr,none": 0.021576248184514566 }, "harness|mmlu_high_school_chemistry|0": { "alias": " - high_school_chemistry", "acc,none": 0.5566502463054187, "acc_stderr,none": 0.03495334582162934 }, "harness|mmlu_high_school_computer_science|0": { "alias": " - high_school_computer_science", "acc,none": 0.64, "acc_stderr,none": 0.04824181513244218 }, "harness|mmlu_high_school_mathematics|0": { "alias": " - high_school_mathematics", "acc,none": 0.337037037037037, "acc_stderr,none": 0.028820884666253252 }, "harness|mmlu_high_school_physics|0": { "alias": " - high_school_physics", "acc,none": 0.4370860927152318, "acc_stderr,none": 0.040500357222306355 }, "harness|mmlu_high_school_statistics|0": { "alias": " - high_school_statistics", "acc,none": 0.5787037037037037, "acc_stderr,none": 0.03367462138896078 }, "harness|mmlu_machine_learning|0": { "alias": " - machine_learning", "acc,none": 0.5, "acc_stderr,none": 0.04745789978762494 }, "harness|hellaswag|0": { "acc,none": 0.5984863572993427, "acc_stderr,none": 0.004892026457294725, "acc_norm,none": 0.7739494124676359, "acc_norm_stderr,none": 0.0041741747242881, "alias": "hellaswag" }, "harness|openbookqa|0": { "acc,none": 0.338, "acc_stderr,none": 0.02117566569520941, "acc_norm,none": 0.436, "acc_norm_stderr,none": 0.0221989546414768, "alias": "openbookqa" }, "harness|arc:easy|0": { "acc,none": 0.8051346801346801, "acc_stderr,none": 0.008127738779969257, "acc_norm,none": 0.7857744107744108, "acc_norm_stderr,none": 0.008418850681568162, "alias": "arc_easy" }, "harness|boolq|0": { "acc,none": 0.8642201834862385, "acc_stderr,none": 0.005991317719933094, "alias": "boolq" }, "harness|lambada:openai|0": { "perplexity,none": 5.9716028864360515, "perplexity_stderr,none": 0.17600170081638883, "acc,none": 0.33844362507277315, "acc_stderr,none": 0.006592325932741157, "alias": "lambada_openai" }, "harness|truthfulqa:mc2|0": { "acc,none": 0.606890311170753, "acc_stderr,none": 0.015406862286089968, "alias": "truthfulqa_mc2" }, "harness|winogrande|0": { "acc,none": 0.696921862667719, "acc_stderr,none": 0.012916727462634463, "alias": "winogrande" }, "harness|arc:challenge|0": { "acc,none": 0.5196245733788396, "acc_stderr,none": 0.014600132075947085, "acc_norm,none": 0.5469283276450512, "acc_norm_stderr,none": 0.01454689205200563, "alias": "arc_challenge" }, "harness|truthfulqa:mc1|0": { "acc,none": 0.4149326805385557, "acc_stderr,none": 0.017248314465805978, "alias": "truthfulqa_mc1" }, "harness|piqa|0": { "acc,none": 0.778563656147987, "acc_stderr,none": 0.009687616456840253, "acc_norm,none": 0.7752992383025027, "acc_norm_stderr,none": 0.009738282586548361, "alias": "piqa" } }, "task_info": { "model": "QuantFactory/Phi-3-mini-4k-instruct-GGUF", "revision": "main", "private": false, "params": 3.6, "architectures": "?", "quant_type": "llama.cpp", "precision": "4bit", "model_params": 3.6, "model_size": 2.18, "weight_dtype": "int4", "compute_dtype": "float16", "gguf_ftype": "*Q4_0.gguf", "hardware": "cpu", "status": "Pending", "submitted_time": "2024-04-29T07:28:31Z", "model_type": "quantization", "job_id": -1, "job_start_time": null, "scripts": "llama_cpp" }, "quantization_config": { "quant_method": "llama.cpp", "ftype": "*Q4_0.gguf" }, "versions": { "harness|mmlu|0": null, "harness|mmlu_humanities|0": null, "harness|mmlu_formal_logic|0": 0.0, "harness|mmlu_high_school_european_history|0": 0.0, "harness|mmlu_high_school_us_history|0": 0.0, "harness|mmlu_high_school_world_history|0": 0.0, "harness|mmlu_international_law|0": 0.0, "harness|mmlu_jurisprudence|0": 0.0, "harness|mmlu_logical_fallacies|0": 0.0, "harness|mmlu_moral_disputes|0": 0.0, "harness|mmlu_moral_scenarios|0": 0.0, "harness|mmlu_philosophy|0": 0.0, "harness|mmlu_prehistory|0": 0.0, "harness|mmlu_professional_law|0": 0.0, "harness|mmlu_world_religions|0": 0.0, "harness|mmlu_other|0": null, "harness|mmlu_business_ethics|0": 0.0, "harness|mmlu_clinical_knowledge|0": 0.0, "harness|mmlu_college_medicine|0": 0.0, "harness|mmlu_global_facts|0": 0.0, "harness|mmlu_human_aging|0": 0.0, "harness|mmlu_management|0": 0.0, "harness|mmlu_marketing|0": 0.0, "harness|mmlu_medical_genetics|0": 0.0, "harness|mmlu_miscellaneous|0": 0.0, "harness|mmlu_nutrition|0": 0.0, "harness|mmlu_professional_accounting|0": 0.0, "harness|mmlu_professional_medicine|0": 0.0, "harness|mmlu_virology|0": 0.0, "harness|mmlu_social_sciences|0": null, "harness|mmlu_econometrics|0": 0.0, "harness|mmlu_high_school_geography|0": 0.0, "harness|mmlu_high_school_government_and_politics|0": 0.0, "harness|mmlu_high_school_macroeconomics|0": 0.0, "harness|mmlu_high_school_microeconomics|0": 0.0, "harness|mmlu_high_school_psychology|0": 0.0, "harness|mmlu_human_sexuality|0": 0.0, "harness|mmlu_professional_psychology|0": 0.0, "harness|mmlu_public_relations|0": 0.0, "harness|mmlu_security_studies|0": 0.0, "harness|mmlu_sociology|0": 0.0, "harness|mmlu_us_foreign_policy|0": 0.0, "harness|mmlu_stem|0": null, "harness|mmlu_abstract_algebra|0": 0.0, "harness|mmlu_anatomy|0": 0.0, "harness|mmlu_astronomy|0": 0.0, "harness|mmlu_college_biology|0": 0.0, "harness|mmlu_college_chemistry|0": 0.0, "harness|mmlu_college_computer_science|0": 0.0, "harness|mmlu_college_mathematics|0": 0.0, "harness|mmlu_college_physics|0": 0.0, "harness|mmlu_computer_security|0": 0.0, "harness|mmlu_conceptual_physics|0": 0.0, "harness|mmlu_electrical_engineering|0": 0.0, "harness|mmlu_elementary_mathematics|0": 0.0, "harness|mmlu_high_school_biology|0": 0.0, "harness|mmlu_high_school_chemistry|0": 0.0, "harness|mmlu_high_school_computer_science|0": 0.0, "harness|mmlu_high_school_mathematics|0": 0.0, "harness|mmlu_high_school_physics|0": 0.0, "harness|mmlu_high_school_statistics|0": 0.0, "harness|mmlu_machine_learning|0": 0.0, "harness|hellaswag|0": 1.0, "harness|openbookqa|0": 1.0, "harness|arc:easy|0": 1.0, "harness|boolq|0": 2.0, "harness|lambada:openai|0": 1.0, "harness|truthfulqa:mc2|0": 2.0, "harness|winogrande|0": 1.0, "harness|arc:challenge|0": 1.0, "harness|truthfulqa:mc1|0": 2.0, "harness|piqa|0": 1.0 }, "n-shot": { "arc_challenge": 0, "arc_easy": 0, "boolq": 0, "hellaswag": 0, "lambada_openai": 0, "mmlu": 0, "mmlu_abstract_algebra": 0, "mmlu_anatomy": 0, "mmlu_astronomy": 0, "mmlu_business_ethics": 0, "mmlu_clinical_knowledge": 0, "mmlu_college_biology": 0, "mmlu_college_chemistry": 0, "mmlu_college_computer_science": 0, "mmlu_college_mathematics": 0, "mmlu_college_medicine": 0, "mmlu_college_physics": 0, "mmlu_computer_security": 0, "mmlu_conceptual_physics": 0, "mmlu_econometrics": 0, "mmlu_electrical_engineering": 0, "mmlu_elementary_mathematics": 0, "mmlu_formal_logic": 0, "mmlu_global_facts": 0, "mmlu_high_school_biology": 0, "mmlu_high_school_chemistry": 0, "mmlu_high_school_computer_science": 0, "mmlu_high_school_european_history": 0, "mmlu_high_school_geography": 0, "mmlu_high_school_government_and_politics": 0, "mmlu_high_school_macroeconomics": 0, "mmlu_high_school_mathematics": 0, "mmlu_high_school_microeconomics": 0, "mmlu_high_school_physics": 0, "mmlu_high_school_psychology": 0, "mmlu_high_school_statistics": 0, "mmlu_high_school_us_history": 0, "mmlu_high_school_world_history": 0, "mmlu_human_aging": 0, "mmlu_human_sexuality": 0, "mmlu_humanities": 0, "mmlu_international_law": 0, "mmlu_jurisprudence": 0, "mmlu_logical_fallacies": 0, "mmlu_machine_learning": 0, "mmlu_management": 0, "mmlu_marketing": 0, "mmlu_medical_genetics": 0, "mmlu_miscellaneous": 0, "mmlu_moral_disputes": 0, "mmlu_moral_scenarios": 0, "mmlu_nutrition": 0, "mmlu_other": 0, "mmlu_philosophy": 0, "mmlu_prehistory": 0, "mmlu_professional_accounting": 0, "mmlu_professional_law": 0, "mmlu_professional_medicine": 0, "mmlu_professional_psychology": 0, "mmlu_public_relations": 0, "mmlu_security_studies": 0, "mmlu_social_sciences": 0, "mmlu_sociology": 0, "mmlu_stem": 0, "mmlu_us_foreign_policy": 0, "mmlu_virology": 0, "mmlu_world_religions": 0, "openbookqa": 0, "piqa": 0, "truthfulqa_mc1": 0, "truthfulqa_mc2": 0, "winogrande": 0 }, "date": 1716562293.095405, "config": { "model": "WrapperGGUFLM", "model_args": "gguf_model=QuantFactory/Phi-3-mini-4k-instruct-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main", "batch_size": 1, "batch_sizes": [], "device": "cuda", "use_cache": null, "limit": null, "bootstrap_iters": 100000, "gen_kwargs": null } }