{ "config_general": { "lighteval_sha": "1.4", "num_few_shot_default": null, "num_fewshot_seeds": null, "override_batch_size": null, "max_samples": null, "job_id": -1, "start_time": null, "end_time": "2024-04-30-13-36-35", "total_evaluation_time_secondes": "", "model_name": "leliuga/Phi-3-mini-4k-instruct-bnb-4bit", "model_sha": "", "model_dtype": "4bit", "model_size": 2.26, "model_params": 3.74, "quant_type": "bitsandbytes", "precision": "4bit" }, "results": { "harness|openbookqa|0": { "acc,none": 0.38, "acc_stderr,none": 0.021728881438701716, "acc_norm,none": 0.454, "acc_norm_stderr,none": 0.022288147591176945, "alias": "openbookqa" }, "harness|truthfulqa:mc2|0": { "acc,none": 0.5580592365477883, "acc_stderr,none": 0.015331592955190172, "alias": "truthfulqa_mc2" }, "harness|hellaswag|0": { "acc,none": 0.5970922127066322, "acc_stderr,none": 0.004894801119898624, "acc_norm,none": 0.7801234813782115, "acc_norm_stderr,none": 0.0041331638053174495, "alias": "hellaswag" }, "harness|truthfulqa:mc1|0": { "acc,none": 0.37209302325581395, "acc_stderr,none": 0.016921090118814035, "alias": "truthfulqa_mc1" }, "harness|piqa|0": { "acc,none": 0.7889009793253536, "acc_stderr,none": 0.009521377378734167, "acc_norm,none": 0.7932535364526659, "acc_norm_stderr,none": 0.00944866551418327, "alias": "piqa" }, "harness|arc:easy|0": { "acc,none": 0.8308080808080808, "acc_stderr,none": 0.007693223639488826, "acc_norm,none": 0.8000841750841751, "acc_norm_stderr,none": 0.008206531105458863, "alias": "arc_easy" }, "harness|mmlu|0": { "acc,none": 0.6661444238712434, "acc_stderr,none": 0.0037896788441303046, "alias": "mmlu" }, "harness|mmlu_humanities|0": { "alias": " - humanities", "acc,none": 0.6289054197662062, "acc_stderr,none": 0.006790926904899143 }, "harness|mmlu_formal_logic|0": { "alias": " - formal_logic", "acc,none": 0.5634920634920635, "acc_stderr,none": 0.04435932892851466 }, "harness|mmlu_high_school_european_history|0": { "alias": " - high_school_european_history", "acc,none": 0.7878787878787878, "acc_stderr,none": 0.03192271569548301 }, "harness|mmlu_high_school_us_history|0": { "alias": " - high_school_us_history", "acc,none": 0.7745098039215687, "acc_stderr,none": 0.02933116229425172 }, "harness|mmlu_high_school_world_history|0": { "alias": " - high_school_world_history", "acc,none": 0.8059071729957806, "acc_stderr,none": 0.025744902532290934 }, "harness|mmlu_international_law|0": { "alias": " - international_law", "acc,none": 0.8264462809917356, "acc_stderr,none": 0.0345727283691767 }, "harness|mmlu_jurisprudence|0": { "alias": " - jurisprudence", "acc,none": 0.7222222222222222, "acc_stderr,none": 0.043300437496507416 }, "harness|mmlu_logical_fallacies|0": { "alias": " - logical_fallacies", "acc,none": 0.8220858895705522, "acc_stderr,none": 0.03004735765580663 }, "harness|mmlu_moral_disputes|0": { "alias": " - moral_disputes", "acc,none": 0.7254335260115607, "acc_stderr,none": 0.02402774515526502 }, "harness|mmlu_moral_scenarios|0": { "alias": " - moral_scenarios", "acc,none": 0.5094972067039106, "acc_stderr,none": 0.01671948464334877 }, "harness|mmlu_philosophy|0": { "alias": " - philosophy", "acc,none": 0.7170418006430869, "acc_stderr,none": 0.025583062489984838 }, "harness|mmlu_prehistory|0": { "alias": " - prehistory", "acc,none": 0.7685185185185185, "acc_stderr,none": 0.02346842983245116 }, "harness|mmlu_professional_law|0": { "alias": " - professional_law", "acc,none": 0.5091264667535854, "acc_stderr,none": 0.012768108601640016 }, "harness|mmlu_world_religions|0": { "alias": " - world_religions", "acc,none": 0.8011695906432749, "acc_stderr,none": 0.03061111655743253 }, "harness|mmlu_other|0": { "alias": " - other", "acc,none": 0.7093659478596717, "acc_stderr,none": 0.007845883201569273 }, "harness|mmlu_business_ethics|0": { "alias": " - business_ethics", "acc,none": 0.67, "acc_stderr,none": 0.04725815626252609 }, "harness|mmlu_clinical_knowledge|0": { "alias": " - clinical_knowledge", "acc,none": 0.7584905660377359, "acc_stderr,none": 0.026341480371118352 }, "harness|mmlu_college_medicine|0": { "alias": " - college_medicine", "acc,none": 0.6589595375722543, "acc_stderr,none": 0.036146654241808254 }, "harness|mmlu_global_facts|0": { "alias": " - global_facts", "acc,none": 0.36, "acc_stderr,none": 0.048241815132442176 }, "harness|mmlu_human_aging|0": { "alias": " - human_aging", "acc,none": 0.6771300448430493, "acc_stderr,none": 0.031381476375754995 }, "harness|mmlu_management|0": { "alias": " - management", "acc,none": 0.8252427184466019, "acc_stderr,none": 0.0376017800602662 }, "harness|mmlu_marketing|0": { "alias": " - marketing", "acc,none": 0.8803418803418803, "acc_stderr,none": 0.02126271940040694 }, "harness|mmlu_medical_genetics|0": { "alias": " - medical_genetics", "acc,none": 0.72, "acc_stderr,none": 0.04512608598542127 }, "harness|mmlu_miscellaneous|0": { "alias": " - miscellaneous", "acc,none": 0.8186462324393359, "acc_stderr,none": 0.013778693778464081 }, "harness|mmlu_nutrition|0": { "alias": " - nutrition", "acc,none": 0.7058823529411765, "acc_stderr,none": 0.02609016250427904 }, "harness|mmlu_professional_accounting|0": { "alias": " - professional_accounting", "acc,none": 0.524822695035461, "acc_stderr,none": 0.02979071924382972 }, "harness|mmlu_professional_medicine|0": { "alias": " - professional_medicine", "acc,none": 0.6801470588235294, "acc_stderr,none": 0.02833295951403123 }, "harness|mmlu_virology|0": { "alias": " - virology", "acc,none": 0.4939759036144578, "acc_stderr,none": 0.03892212195333045 }, "harness|mmlu_social_sciences|0": { "alias": " - social_sciences", "acc,none": 0.7757556061098473, "acc_stderr,none": 0.007371597574029847 }, "harness|mmlu_econometrics|0": { "alias": " - econometrics", "acc,none": 0.5087719298245614, "acc_stderr,none": 0.04702880432049615 }, "harness|mmlu_high_school_geography|0": { "alias": " - high_school_geography", "acc,none": 0.8383838383838383, "acc_stderr,none": 0.026225919863629293 }, "harness|mmlu_high_school_government_and_politics|0": { "alias": " - high_school_government_and_politics", "acc,none": 0.8652849740932642, "acc_stderr,none": 0.024639789097709443 }, "harness|mmlu_high_school_macroeconomics|0": { "alias": " - high_school_macroeconomics", "acc,none": 0.7, "acc_stderr,none": 0.023234581088428487 }, "harness|mmlu_high_school_microeconomics|0": { "alias": " - high_school_microeconomics", "acc,none": 0.7899159663865546, "acc_stderr,none": 0.026461398717471878 }, "harness|mmlu_high_school_psychology|0": { "alias": " - high_school_psychology", "acc,none": 0.8752293577981651, "acc_stderr,none": 0.014168298359156352 }, "harness|mmlu_human_sexuality|0": { "alias": " - human_sexuality", "acc,none": 0.7251908396946565, "acc_stderr,none": 0.039153454088478354 }, "harness|mmlu_professional_psychology|0": { "alias": " - professional_psychology", "acc,none": 0.7173202614379085, "acc_stderr,none": 0.018217269552053435 }, "harness|mmlu_public_relations|0": { "alias": " - public_relations", "acc,none": 0.7090909090909091, "acc_stderr,none": 0.04350271442923243 }, "harness|mmlu_security_studies|0": { "alias": " - security_studies", "acc,none": 0.7755102040816326, "acc_stderr,none": 0.026711430555538398 }, "harness|mmlu_sociology|0": { "alias": " - sociology", "acc,none": 0.8606965174129353, "acc_stderr,none": 0.02448448716291397 }, "harness|mmlu_us_foreign_policy|0": { "alias": " - us_foreign_policy", "acc,none": 0.83, "acc_stderr,none": 0.03775251680686371 }, "harness|mmlu_stem|0": { "alias": " - stem", "acc,none": 0.5721535045987948, "acc_stderr,none": 0.008404208733097255 }, "harness|mmlu_abstract_algebra|0": { "alias": " - abstract_algebra", "acc,none": 0.42, "acc_stderr,none": 0.049604496374885836 }, "harness|mmlu_anatomy|0": { "alias": " - anatomy", "acc,none": 0.6296296296296297, "acc_stderr,none": 0.041716541613545426 }, "harness|mmlu_astronomy|0": { "alias": " - astronomy", "acc,none": 0.7368421052631579, "acc_stderr,none": 0.03583496176361072 }, "harness|mmlu_college_biology|0": { "alias": " - college_biology", "acc,none": 0.8194444444444444, "acc_stderr,none": 0.032166008088022675 }, "harness|mmlu_college_chemistry|0": { "alias": " - college_chemistry", "acc,none": 0.47, "acc_stderr,none": 0.05016135580465919 }, "harness|mmlu_college_computer_science|0": { "alias": " - college_computer_science", "acc,none": 0.47, "acc_stderr,none": 0.050161355804659205 }, "harness|mmlu_college_mathematics|0": { "alias": " - college_mathematics", "acc,none": 0.34, "acc_stderr,none": 0.047609522856952344 }, "harness|mmlu_college_physics|0": { "alias": " - college_physics", "acc,none": 0.43137254901960786, "acc_stderr,none": 0.04928099597287534 }, "harness|mmlu_computer_security|0": { "alias": " - computer_security", "acc,none": 0.77, "acc_stderr,none": 0.04229525846816506 }, "harness|mmlu_conceptual_physics|0": { "alias": " - conceptual_physics", "acc,none": 0.6340425531914894, "acc_stderr,none": 0.0314895582974553 }, "harness|mmlu_electrical_engineering|0": { "alias": " - electrical_engineering", "acc,none": 0.5793103448275863, "acc_stderr,none": 0.0411391498118926 }, "harness|mmlu_elementary_mathematics|0": { "alias": " - elementary_mathematics", "acc,none": 0.4973544973544973, "acc_stderr,none": 0.02575094967813038 }, "harness|mmlu_high_school_biology|0": { "alias": " - high_school_biology", "acc,none": 0.832258064516129, "acc_stderr,none": 0.021255464065371335 }, "harness|mmlu_high_school_chemistry|0": { "alias": " - high_school_chemistry", "acc,none": 0.5862068965517241, "acc_stderr,none": 0.03465304488406795 }, "harness|mmlu_high_school_computer_science|0": { "alias": " - high_school_computer_science", "acc,none": 0.7, "acc_stderr,none": 0.046056618647183814 }, "harness|mmlu_high_school_mathematics|0": { "alias": " - high_school_mathematics", "acc,none": 0.3296296296296296, "acc_stderr,none": 0.028661201116524586 }, "harness|mmlu_high_school_physics|0": { "alias": " - high_school_physics", "acc,none": 0.4370860927152318, "acc_stderr,none": 0.04050035722230636 }, "harness|mmlu_high_school_statistics|0": { "alias": " - high_school_statistics", "acc,none": 0.5324074074074074, "acc_stderr,none": 0.034028015813589656 }, "harness|mmlu_machine_learning|0": { "alias": " - machine_learning", "acc,none": 0.5357142857142857, "acc_stderr,none": 0.04733667890053756 }, "harness|arc:challenge|0": { "acc,none": 0.5691126279863481, "acc_stderr,none": 0.014471133392642475, "acc_norm,none": 0.5836177474402731, "acc_norm_stderr,none": 0.014405618279436178, "alias": "arc_challenge" }, "harness|winogrande|0": { "acc,none": 0.7269139700078927, "acc_stderr,none": 0.012522020105869456, "alias": "winogrande" }, "harness|boolq|0": { "acc,none": 0.8602446483180428, "acc_stderr,none": 0.0060643988004343554, "alias": "boolq" }, "harness|lambada:openai|0": { "perplexity,none": 4.367293923780302, "perplexity_stderr,none": 0.10618058268537012, "acc,none": 0.6745585096060547, "acc_stderr,none": 0.006527672323541589, "alias": "lambada_openai" } }, "task_info": { "model": "leliuga/Phi-3-mini-4k-instruct-bnb-4bit", "revision": "main", "private": false, "params": 8.264, "architectures": "Phi3ForCausalLM", "quant_type": "bitsandbytes", "precision": "4bit", "model_params": 16.528, "model_size": 8.264, "weight_dtype": "int4", "compute_dtype": "float16", "gguf_ftype": "*Q4_0.gguf", "hardware": "gpu", "status": "Pending", "submitted_time": "2024-04-29T07:25:56Z", "model_type": "quantization", "job_id": -1, "job_start_time": null, "scripts": "ITREX" }, "quantization_config": { "_load_in_4bit": true, "_load_in_8bit": false, "bnb_4bit_compute_dtype": "bfloat16", "bnb_4bit_quant_storage": "uint8", "bnb_4bit_quant_type": "nf4", "bnb_4bit_use_double_quant": true, "llm_int8_enable_fp32_cpu_offload": false, "llm_int8_has_fp16_weight": false, "llm_int8_skip_modules": null, "llm_int8_threshold": 6.0, "load_in_4bit": true, "load_in_8bit": false, "quant_method": "bitsandbytes" }, "versions": { "harness|openbookqa|0": 1.0, "harness|truthfulqa:mc2|0": 2.0, "harness|hellaswag|0": 1.0, "harness|truthfulqa:mc1|0": 2.0, "harness|piqa|0": 1.0, "harness|arc:easy|0": 1.0, "harness|mmlu|0": null, "harness|mmlu_humanities|0": null, "harness|mmlu_formal_logic|0": 0.0, "harness|mmlu_high_school_european_history|0": 0.0, "harness|mmlu_high_school_us_history|0": 0.0, "harness|mmlu_high_school_world_history|0": 0.0, "harness|mmlu_international_law|0": 0.0, "harness|mmlu_jurisprudence|0": 0.0, "harness|mmlu_logical_fallacies|0": 0.0, "harness|mmlu_moral_disputes|0": 0.0, "harness|mmlu_moral_scenarios|0": 0.0, "harness|mmlu_philosophy|0": 0.0, "harness|mmlu_prehistory|0": 0.0, "harness|mmlu_professional_law|0": 0.0, "harness|mmlu_world_religions|0": 0.0, "harness|mmlu_other|0": null, "harness|mmlu_business_ethics|0": 0.0, "harness|mmlu_clinical_knowledge|0": 0.0, "harness|mmlu_college_medicine|0": 0.0, "harness|mmlu_global_facts|0": 0.0, "harness|mmlu_human_aging|0": 0.0, "harness|mmlu_management|0": 0.0, "harness|mmlu_marketing|0": 0.0, "harness|mmlu_medical_genetics|0": 0.0, "harness|mmlu_miscellaneous|0": 0.0, "harness|mmlu_nutrition|0": 0.0, "harness|mmlu_professional_accounting|0": 0.0, "harness|mmlu_professional_medicine|0": 0.0, "harness|mmlu_virology|0": 0.0, "harness|mmlu_social_sciences|0": null, "harness|mmlu_econometrics|0": 0.0, "harness|mmlu_high_school_geography|0": 0.0, "harness|mmlu_high_school_government_and_politics|0": 0.0, "harness|mmlu_high_school_macroeconomics|0": 0.0, "harness|mmlu_high_school_microeconomics|0": 0.0, "harness|mmlu_high_school_psychology|0": 0.0, "harness|mmlu_human_sexuality|0": 0.0, "harness|mmlu_professional_psychology|0": 0.0, "harness|mmlu_public_relations|0": 0.0, "harness|mmlu_security_studies|0": 0.0, "harness|mmlu_sociology|0": 0.0, "harness|mmlu_us_foreign_policy|0": 0.0, "harness|mmlu_stem|0": null, "harness|mmlu_abstract_algebra|0": 0.0, "harness|mmlu_anatomy|0": 0.0, "harness|mmlu_astronomy|0": 0.0, "harness|mmlu_college_biology|0": 0.0, "harness|mmlu_college_chemistry|0": 0.0, "harness|mmlu_college_computer_science|0": 0.0, "harness|mmlu_college_mathematics|0": 0.0, "harness|mmlu_college_physics|0": 0.0, "harness|mmlu_computer_security|0": 0.0, "harness|mmlu_conceptual_physics|0": 0.0, "harness|mmlu_electrical_engineering|0": 0.0, "harness|mmlu_elementary_mathematics|0": 0.0, "harness|mmlu_high_school_biology|0": 0.0, "harness|mmlu_high_school_chemistry|0": 0.0, "harness|mmlu_high_school_computer_science|0": 0.0, "harness|mmlu_high_school_mathematics|0": 0.0, "harness|mmlu_high_school_physics|0": 0.0, "harness|mmlu_high_school_statistics|0": 0.0, "harness|mmlu_machine_learning|0": 0.0, "harness|arc:challenge|0": 1.0, "harness|winogrande|0": 1.0, "harness|boolq|0": 2.0, "harness|lambada:openai|0": 1.0 }, "n-shot": { "arc_challenge": 0, "arc_easy": 0, "boolq": 0, "hellaswag": 0, "lambada_openai": 0, "mmlu": 0, "mmlu_abstract_algebra": 0, "mmlu_anatomy": 0, "mmlu_astronomy": 0, "mmlu_business_ethics": 0, "mmlu_clinical_knowledge": 0, "mmlu_college_biology": 0, "mmlu_college_chemistry": 0, "mmlu_college_computer_science": 0, "mmlu_college_mathematics": 0, "mmlu_college_medicine": 0, "mmlu_college_physics": 0, "mmlu_computer_security": 0, "mmlu_conceptual_physics": 0, "mmlu_econometrics": 0, "mmlu_electrical_engineering": 0, "mmlu_elementary_mathematics": 0, "mmlu_formal_logic": 0, "mmlu_global_facts": 0, "mmlu_high_school_biology": 0, "mmlu_high_school_chemistry": 0, "mmlu_high_school_computer_science": 0, "mmlu_high_school_european_history": 0, "mmlu_high_school_geography": 0, "mmlu_high_school_government_and_politics": 0, "mmlu_high_school_macroeconomics": 0, "mmlu_high_school_mathematics": 0, "mmlu_high_school_microeconomics": 0, "mmlu_high_school_physics": 0, "mmlu_high_school_psychology": 0, "mmlu_high_school_statistics": 0, "mmlu_high_school_us_history": 0, "mmlu_high_school_world_history": 0, "mmlu_human_aging": 0, "mmlu_human_sexuality": 0, "mmlu_humanities": 0, "mmlu_international_law": 0, "mmlu_jurisprudence": 0, "mmlu_logical_fallacies": 0, "mmlu_machine_learning": 0, "mmlu_management": 0, "mmlu_marketing": 0, "mmlu_medical_genetics": 0, "mmlu_miscellaneous": 0, "mmlu_moral_disputes": 0, "mmlu_moral_scenarios": 0, "mmlu_nutrition": 0, "mmlu_other": 0, "mmlu_philosophy": 0, "mmlu_prehistory": 0, "mmlu_professional_accounting": 0, "mmlu_professional_law": 0, "mmlu_professional_medicine": 0, "mmlu_professional_psychology": 0, "mmlu_public_relations": 0, "mmlu_security_studies": 0, "mmlu_social_sciences": 0, "mmlu_sociology": 0, "mmlu_stem": 0, "mmlu_us_foreign_policy": 0, "mmlu_virology": 0, "mmlu_world_religions": 0, "openbookqa": 0, "piqa": 0, "truthfulqa_mc1": 0, "truthfulqa_mc2": 0, "winogrande": 0 }, "date": 1714442777.7010922, "config": { "model": "hf", "model_args": "pretrained=leliuga/Phi-3-mini-4k-instruct-bnb-4bit,trust_remote_code=True,dtype=float16,_commit_hash=main", "batch_size": 2, "batch_sizes": [], "device": "cuda", "use_cache": null, "limit": null, "bootstrap_iters": 100000, "gen_kwargs": null } }