{ "config_general": { "lighteval_sha": "1.4", "num_few_shot_default": null, "num_fewshot_seeds": null, "override_batch_size": null, "max_samples": null, "job_id": -1, "start_time": null, "end_time": "2024-04-27-01-16-55", "total_evaluation_time_secondes": "", "model_name": "unsloth/llama-2-7b-chat-bnb-4bit", "model_sha": "", "model_dtype": "4bit", "model_size": 3.87, "model_params": 6.68, "quant_type": "bitsandbytes", "precision": "4bit" }, "results": { "harness|piqa|0": { "acc,none": 0.764417845484222, "acc_stderr,none": 0.009901067586473912, "acc_norm,none": 0.76550598476605, "acc_norm_stderr,none": 0.00988520314324055, "alias": "piqa" }, "harness|truthfulqa:mc1|0": { "acc,none": 0.2937576499388005, "acc_stderr,none": 0.015945068581236614, "alias": "truthfulqa_mc1" }, "harness|winogrande|0": { "acc,none": 0.6669297553275454, "acc_stderr,none": 0.01324619402807065, "alias": "winogrande" }, "harness|truthfulqa:mc2|0": { "acc,none": 0.45035586657845295, "acc_stderr,none": 0.01565376338587469, "alias": "truthfulqa_mc2" }, "harness|arc:easy|0": { "acc,none": 0.7369528619528619, "acc_stderr,none": 0.009034514898865822, "acc_norm,none": 0.6982323232323232, "acc_norm_stderr,none": 0.009418994158522528, "alias": "arc_easy" }, "harness|mmlu|0": { "acc,none": 0.45107534539239424, "acc_stderr,none": 0.004041496994837459, "alias": "mmlu" }, "harness|mmlu_humanities|0": { "alias": " - humanities", "acc,none": 0.42422954303931987, "acc_stderr,none": 0.006916469882448509 }, "harness|mmlu_formal_logic|0": { "alias": " - formal_logic", "acc,none": 0.2619047619047619, "acc_stderr,none": 0.039325376803928704 }, "harness|mmlu_high_school_european_history|0": { "alias": " - high_school_european_history", "acc,none": 0.5575757575757576, "acc_stderr,none": 0.03878372113711274 }, "harness|mmlu_high_school_us_history|0": { "alias": " - high_school_us_history", "acc,none": 0.6715686274509803, "acc_stderr,none": 0.03296245110172229 }, "harness|mmlu_high_school_world_history|0": { "alias": " - high_school_world_history", "acc,none": 0.6118143459915611, "acc_stderr,none": 0.031722950043323296 }, "harness|mmlu_international_law|0": { "alias": " - international_law", "acc,none": 0.5867768595041323, "acc_stderr,none": 0.04495087843548408 }, "harness|mmlu_jurisprudence|0": { "alias": " - jurisprudence", "acc,none": 0.5370370370370371, "acc_stderr,none": 0.04820403072760627 }, "harness|mmlu_logical_fallacies|0": { "alias": " - logical_fallacies", "acc,none": 0.5214723926380368, "acc_stderr,none": 0.0392474687675113 }, "harness|mmlu_moral_disputes|0": { "alias": " - moral_disputes", "acc,none": 0.4884393063583815, "acc_stderr,none": 0.026911898686377906 }, "harness|mmlu_moral_scenarios|0": { "alias": " - moral_scenarios", "acc,none": 0.2424581005586592, "acc_stderr,none": 0.014333522059217887 }, "harness|mmlu_philosophy|0": { "alias": " - philosophy", "acc,none": 0.5369774919614148, "acc_stderr,none": 0.02832032583010592 }, "harness|mmlu_prehistory|0": { "alias": " - prehistory", "acc,none": 0.5524691358024691, "acc_stderr,none": 0.027667138569422708 }, "harness|mmlu_professional_law|0": { "alias": " - professional_law", "acc,none": 0.34485006518904826, "acc_stderr,none": 0.012139881006287049 }, "harness|mmlu_world_religions|0": { "alias": " - world_religions", "acc,none": 0.6666666666666666, "acc_stderr,none": 0.036155076303109344 }, "harness|mmlu_other|0": { "alias": " - other", "acc,none": 0.5352429996781461, "acc_stderr,none": 0.008705350844769753 }, "harness|mmlu_business_ethics|0": { "alias": " - business_ethics", "acc,none": 0.51, "acc_stderr,none": 0.05024183937956912 }, "harness|mmlu_clinical_knowledge|0": { "alias": " - clinical_knowledge", "acc,none": 0.5358490566037736, "acc_stderr,none": 0.030693675018458006 }, "harness|mmlu_college_medicine|0": { "alias": " - college_medicine", "acc,none": 0.3872832369942196, "acc_stderr,none": 0.037143259063020656 }, "harness|mmlu_global_facts|0": { "alias": " - global_facts", "acc,none": 0.38, "acc_stderr,none": 0.04878317312145632 }, "harness|mmlu_human_aging|0": { "alias": " - human_aging", "acc,none": 0.5964125560538116, "acc_stderr,none": 0.03292802819330313 }, "harness|mmlu_management|0": { "alias": " - management", "acc,none": 0.6213592233009708, "acc_stderr,none": 0.04802694698258975 }, "harness|mmlu_marketing|0": { "alias": " - marketing", "acc,none": 0.7051282051282052, "acc_stderr,none": 0.02987257770889118 }, "harness|mmlu_medical_genetics|0": { "alias": " - medical_genetics", "acc,none": 0.46, "acc_stderr,none": 0.05009082659620332 }, "harness|mmlu_miscellaneous|0": { "alias": " - miscellaneous", "acc,none": 0.6666666666666666, "acc_stderr,none": 0.016857391247472552 }, "harness|mmlu_nutrition|0": { "alias": " - nutrition", "acc,none": 0.4934640522875817, "acc_stderr,none": 0.02862747055055606 }, "harness|mmlu_professional_accounting|0": { "alias": " - professional_accounting", "acc,none": 0.35106382978723405, "acc_stderr,none": 0.028473501272963758 }, "harness|mmlu_professional_medicine|0": { "alias": " - professional_medicine", "acc,none": 0.4227941176470588, "acc_stderr,none": 0.030008562845003476 }, "harness|mmlu_virology|0": { "alias": " - virology", "acc,none": 0.42168674698795183, "acc_stderr,none": 0.03844453181770918 }, "harness|mmlu_social_sciences|0": { "alias": " - social_sciences", "acc,none": 0.5079623009424764, "acc_stderr,none": 0.008792891268502986 }, "harness|mmlu_econometrics|0": { "alias": " - econometrics", "acc,none": 0.3157894736842105, "acc_stderr,none": 0.04372748290278007 }, "harness|mmlu_high_school_geography|0": { "alias": " - high_school_geography", "acc,none": 0.5808080808080808, "acc_stderr,none": 0.03515520728670417 }, "harness|mmlu_high_school_government_and_politics|0": { "alias": " - high_school_government_and_politics", "acc,none": 0.6476683937823834, "acc_stderr,none": 0.03447478286414357 }, "harness|mmlu_high_school_macroeconomics|0": { "alias": " - high_school_macroeconomics", "acc,none": 0.37948717948717947, "acc_stderr,none": 0.024603626924097417 }, "harness|mmlu_high_school_microeconomics|0": { "alias": " - high_school_microeconomics", "acc,none": 0.37815126050420167, "acc_stderr,none": 0.031499305777849054 }, "harness|mmlu_high_school_psychology|0": { "alias": " - high_school_psychology", "acc,none": 0.5926605504587156, "acc_stderr,none": 0.021065986244412888 }, "harness|mmlu_human_sexuality|0": { "alias": " - human_sexuality", "acc,none": 0.5419847328244275, "acc_stderr,none": 0.04369802690578757 }, "harness|mmlu_professional_psychology|0": { "alias": " - professional_psychology", "acc,none": 0.4362745098039216, "acc_stderr,none": 0.02006287424353913 }, "harness|mmlu_public_relations|0": { "alias": " - public_relations", "acc,none": 0.5181818181818182, "acc_stderr,none": 0.04785964010794916 }, "harness|mmlu_security_studies|0": { "alias": " - security_studies", "acc,none": 0.47346938775510206, "acc_stderr,none": 0.03196412734523272 }, "harness|mmlu_sociology|0": { "alias": " - sociology", "acc,none": 0.7014925373134329, "acc_stderr,none": 0.032357437893550424 }, "harness|mmlu_us_foreign_policy|0": { "alias": " - us_foreign_policy", "acc,none": 0.74, "acc_stderr,none": 0.0440844002276808 }, "harness|mmlu_stem|0": { "alias": " - stem", "acc,none": 0.3526799873136695, "acc_stderr,none": 0.008379717025521196 }, "harness|mmlu_abstract_algebra|0": { "alias": " - abstract_algebra", "acc,none": 0.27, "acc_stderr,none": 0.0446196043338474 }, "harness|mmlu_anatomy|0": { "alias": " - anatomy", "acc,none": 0.42962962962962964, "acc_stderr,none": 0.04276349494376599 }, "harness|mmlu_astronomy|0": { "alias": " - astronomy", "acc,none": 0.4605263157894737, "acc_stderr,none": 0.04056242252249033 }, "harness|mmlu_college_biology|0": { "alias": " - college_biology", "acc,none": 0.4861111111111111, "acc_stderr,none": 0.04179596617581 }, "harness|mmlu_college_chemistry|0": { "alias": " - college_chemistry", "acc,none": 0.25, "acc_stderr,none": 0.04351941398892446 }, "harness|mmlu_college_computer_science|0": { "alias": " - college_computer_science", "acc,none": 0.28, "acc_stderr,none": 0.04512608598542127 }, "harness|mmlu_college_mathematics|0": { "alias": " - college_mathematics", "acc,none": 0.3, "acc_stderr,none": 0.046056618647183814 }, "harness|mmlu_college_physics|0": { "alias": " - college_physics", "acc,none": 0.21568627450980393, "acc_stderr,none": 0.040925639582376536 }, "harness|mmlu_computer_security|0": { "alias": " - computer_security", "acc,none": 0.55, "acc_stderr,none": 0.05 }, "harness|mmlu_conceptual_physics|0": { "alias": " - conceptual_physics", "acc,none": 0.3829787234042553, "acc_stderr,none": 0.031778212502369216 }, "harness|mmlu_electrical_engineering|0": { "alias": " - electrical_engineering", "acc,none": 0.42758620689655175, "acc_stderr,none": 0.04122737111370331 }, "harness|mmlu_elementary_mathematics|0": { "alias": " - elementary_mathematics", "acc,none": 0.2830687830687831, "acc_stderr,none": 0.023201392938194978 }, "harness|mmlu_high_school_biology|0": { "alias": " - high_school_biology", "acc,none": 0.47096774193548385, "acc_stderr,none": 0.028396016402761 }, "harness|mmlu_high_school_chemistry|0": { "alias": " - high_school_chemistry", "acc,none": 0.33497536945812806, "acc_stderr,none": 0.033208527423483104 }, "harness|mmlu_high_school_computer_science|0": { "alias": " - high_school_computer_science", "acc,none": 0.43, "acc_stderr,none": 0.049756985195624284 }, "harness|mmlu_high_school_mathematics|0": { "alias": " - high_school_mathematics", "acc,none": 0.2814814814814815, "acc_stderr,none": 0.027420019350945277 }, "harness|mmlu_high_school_physics|0": { "alias": " - high_school_physics", "acc,none": 0.2582781456953642, "acc_stderr,none": 0.035737053147634576 }, "harness|mmlu_high_school_statistics|0": { "alias": " - high_school_statistics", "acc,none": 0.25925925925925924, "acc_stderr,none": 0.029886910547626964 }, "harness|mmlu_machine_learning|0": { "alias": " - machine_learning", "acc,none": 0.35714285714285715, "acc_stderr,none": 0.04547960999764376 }, "harness|openbookqa|0": { "acc,none": 0.34, "acc_stderr,none": 0.021206117013673063, "acc_norm,none": 0.424, "acc_norm_stderr,none": 0.022122993778135404, "alias": "openbookqa" }, "harness|lambada:openai|0": { "perplexity,none": 3.3804927836594665, "perplexity_stderr,none": 0.09041913950351517, "acc,none": 0.7023093343683291, "acc_stderr,none": 0.006370285573012031, "alias": "lambada_openai" }, "harness|hellaswag|0": { "acc,none": 0.5731925911173074, "acc_stderr,none": 0.004936029827672042, "acc_norm,none": 0.7499502091216889, "acc_norm_stderr,none": 0.004321564303822486, "alias": "hellaswag" }, "harness|boolq|0": { "acc,none": 0.7948012232415902, "acc_stderr,none": 0.007063324955682799, "alias": "boolq" }, "harness|arc:challenge|0": { "acc,none": 0.4257679180887372, "acc_stderr,none": 0.014449464278868802, "acc_norm,none": 0.4257679180887372, "acc_norm_stderr,none": 0.014449464278868809, "alias": "arc_challenge" } }, "task_info": { "model": "unsloth/llama-2-7b-chat-bnb-4bit", "revision": "main", "private": false, "params": 14.408, "architectures": "LlamaForCausalLM", "quant_type": "bitsandbytes", "precision": "4bit", "model_params": 28.816, "model_size": 14.408, "weight_dtype": "int4", "compute_dtype": "float16", "gguf_ftype": "*Q4_0.gguf", "hardware": "gpu", "status": "Pending", "submitted_time": "2024-04-26T15:24:50Z", "model_type": "quantization", "job_id": -1, "job_start_time": null, "scripts": "ITREX" }, "quantization_config": { "_load_in_4bit": true, "_load_in_8bit": false, "bnb_4bit_compute_dtype": "float16", "bnb_4bit_quant_type": "nf4", "bnb_4bit_use_double_quant": true, "llm_int8_enable_fp32_cpu_offload": false, "llm_int8_has_fp16_weight": false, "llm_int8_skip_modules": null, "llm_int8_threshold": 6.0, "load_in_4bit": true, "load_in_8bit": false, "quant_method": "bitsandbytes" }, "versions": { "harness|piqa|0": 1.0, "harness|truthfulqa:mc1|0": 2.0, "harness|winogrande|0": 1.0, "harness|truthfulqa:mc2|0": 2.0, "harness|arc:easy|0": 1.0, "harness|mmlu|0": null, "harness|mmlu_humanities|0": null, "harness|mmlu_formal_logic|0": 0.0, "harness|mmlu_high_school_european_history|0": 0.0, "harness|mmlu_high_school_us_history|0": 0.0, "harness|mmlu_high_school_world_history|0": 0.0, "harness|mmlu_international_law|0": 0.0, "harness|mmlu_jurisprudence|0": 0.0, "harness|mmlu_logical_fallacies|0": 0.0, "harness|mmlu_moral_disputes|0": 0.0, "harness|mmlu_moral_scenarios|0": 0.0, "harness|mmlu_philosophy|0": 0.0, "harness|mmlu_prehistory|0": 0.0, "harness|mmlu_professional_law|0": 0.0, "harness|mmlu_world_religions|0": 0.0, "harness|mmlu_other|0": null, "harness|mmlu_business_ethics|0": 0.0, "harness|mmlu_clinical_knowledge|0": 0.0, "harness|mmlu_college_medicine|0": 0.0, "harness|mmlu_global_facts|0": 0.0, "harness|mmlu_human_aging|0": 0.0, "harness|mmlu_management|0": 0.0, "harness|mmlu_marketing|0": 0.0, "harness|mmlu_medical_genetics|0": 0.0, "harness|mmlu_miscellaneous|0": 0.0, "harness|mmlu_nutrition|0": 0.0, "harness|mmlu_professional_accounting|0": 0.0, "harness|mmlu_professional_medicine|0": 0.0, "harness|mmlu_virology|0": 0.0, "harness|mmlu_social_sciences|0": null, "harness|mmlu_econometrics|0": 0.0, "harness|mmlu_high_school_geography|0": 0.0, "harness|mmlu_high_school_government_and_politics|0": 0.0, "harness|mmlu_high_school_macroeconomics|0": 0.0, "harness|mmlu_high_school_microeconomics|0": 0.0, "harness|mmlu_high_school_psychology|0": 0.0, "harness|mmlu_human_sexuality|0": 0.0, "harness|mmlu_professional_psychology|0": 0.0, "harness|mmlu_public_relations|0": 0.0, "harness|mmlu_security_studies|0": 0.0, "harness|mmlu_sociology|0": 0.0, "harness|mmlu_us_foreign_policy|0": 0.0, "harness|mmlu_stem|0": null, "harness|mmlu_abstract_algebra|0": 0.0, "harness|mmlu_anatomy|0": 0.0, "harness|mmlu_astronomy|0": 0.0, "harness|mmlu_college_biology|0": 0.0, "harness|mmlu_college_chemistry|0": 0.0, "harness|mmlu_college_computer_science|0": 0.0, "harness|mmlu_college_mathematics|0": 0.0, "harness|mmlu_college_physics|0": 0.0, "harness|mmlu_computer_security|0": 0.0, "harness|mmlu_conceptual_physics|0": 0.0, "harness|mmlu_electrical_engineering|0": 0.0, "harness|mmlu_elementary_mathematics|0": 0.0, "harness|mmlu_high_school_biology|0": 0.0, "harness|mmlu_high_school_chemistry|0": 0.0, "harness|mmlu_high_school_computer_science|0": 0.0, "harness|mmlu_high_school_mathematics|0": 0.0, "harness|mmlu_high_school_physics|0": 0.0, "harness|mmlu_high_school_statistics|0": 0.0, "harness|mmlu_machine_learning|0": 0.0, "harness|openbookqa|0": 1.0, "harness|lambada:openai|0": 1.0, "harness|hellaswag|0": 1.0, "harness|boolq|0": 2.0, "harness|arc:challenge|0": 1.0 }, "n-shot": { "arc_challenge": 0, "arc_easy": 0, "boolq": 0, "hellaswag": 0, "lambada_openai": 0, "mmlu": 0, "mmlu_abstract_algebra": 0, "mmlu_anatomy": 0, "mmlu_astronomy": 0, "mmlu_business_ethics": 0, "mmlu_clinical_knowledge": 0, "mmlu_college_biology": 0, "mmlu_college_chemistry": 0, "mmlu_college_computer_science": 0, "mmlu_college_mathematics": 0, "mmlu_college_medicine": 0, "mmlu_college_physics": 0, "mmlu_computer_security": 0, "mmlu_conceptual_physics": 0, "mmlu_econometrics": 0, "mmlu_electrical_engineering": 0, "mmlu_elementary_mathematics": 0, "mmlu_formal_logic": 0, "mmlu_global_facts": 0, "mmlu_high_school_biology": 0, "mmlu_high_school_chemistry": 0, "mmlu_high_school_computer_science": 0, "mmlu_high_school_european_history": 0, "mmlu_high_school_geography": 0, "mmlu_high_school_government_and_politics": 0, "mmlu_high_school_macroeconomics": 0, "mmlu_high_school_mathematics": 0, "mmlu_high_school_microeconomics": 0, "mmlu_high_school_physics": 0, "mmlu_high_school_psychology": 0, "mmlu_high_school_statistics": 0, "mmlu_high_school_us_history": 0, "mmlu_high_school_world_history": 0, "mmlu_human_aging": 0, "mmlu_human_sexuality": 0, "mmlu_humanities": 0, "mmlu_international_law": 0, "mmlu_jurisprudence": 0, "mmlu_logical_fallacies": 0, "mmlu_machine_learning": 0, "mmlu_management": 0, "mmlu_marketing": 0, "mmlu_medical_genetics": 0, "mmlu_miscellaneous": 0, "mmlu_moral_disputes": 0, "mmlu_moral_scenarios": 0, "mmlu_nutrition": 0, "mmlu_other": 0, "mmlu_philosophy": 0, "mmlu_prehistory": 0, "mmlu_professional_accounting": 0, "mmlu_professional_law": 0, "mmlu_professional_medicine": 0, "mmlu_professional_psychology": 0, "mmlu_public_relations": 0, "mmlu_security_studies": 0, "mmlu_social_sciences": 0, "mmlu_sociology": 0, "mmlu_stem": 0, "mmlu_us_foreign_policy": 0, "mmlu_virology": 0, "mmlu_world_religions": 0, "openbookqa": 0, "piqa": 0, "truthfulqa_mc1": 0, "truthfulqa_mc2": 0, "winogrande": 0 }, "date": 1714145243.1145976, "config": { "model": "hf", "model_args": "pretrained=unsloth/llama-2-7b-chat-bnb-4bit,dtype=float16,_commit_hash=main", "batch_size": 2, "batch_sizes": [], "device": "cuda", "use_cache": null, "limit": null, "bootstrap_iters": 100000, "gen_kwargs": null } }