{ "config_general": { "lighteval_sha": "1.4", "num_few_shot_default": null, "num_fewshot_seeds": null, "override_batch_size": null, "max_samples": null, "job_id": -1, "start_time": null, "end_time": "2024-05-20-10-51-30", "total_evaluation_time_secondes": "", "model_name": "ISTA-DASLab/Llama-3-8B-Instruct-GPTQ-4bit", "model_sha": "", "model_dtype": "4bit", "model_size": 5.74, "model_params": 7.04, "quant_type": "GPTQ", "precision": "4bit" }, "results": { "harness|piqa|0": { "acc,none": 0.76550598476605, "acc_stderr,none": 0.009885203143240548, "acc_norm,none": 0.7742110990206746, "acc_norm_stderr,none": 0.009754980670917334, "alias": "piqa" }, "harness|hellaswag|0": { "acc,none": 0.5633339972117108, "acc_stderr,none": 0.00494958956767891, "acc_norm,none": 0.7466640111531567, "acc_norm_stderr,none": 0.004340328204135108, "alias": "hellaswag" }, "harness|arc:easy|0": { "acc,none": 0.7929292929292929, "acc_stderr,none": 0.008314665023956551, "acc_norm,none": 0.7643097643097643, "acc_norm_stderr,none": 0.008709108323214466, "alias": "arc_easy" }, "harness|winogrande|0": { "acc,none": 0.7348066298342542, "acc_stderr,none": 0.01240654946619286, "alias": "winogrande" }, "harness|openbookqa|0": { "acc,none": 0.334, "acc_stderr,none": 0.021113492347743738, "acc_norm,none": 0.428, "acc_norm_stderr,none": 0.02214979066386193, "alias": "openbookqa" }, "harness|boolq|0": { "acc,none": 0.8269113149847095, "acc_stderr,none": 0.006616927043886648, "alias": "boolq" }, "harness|lambada:openai|0": { "perplexity,none": 3.5061154188638777, "perplexity_stderr,none": 0.10109082998573839, "acc,none": 0.708131185717058, "acc_stderr,none": 0.006333777168216385, "alias": "lambada_openai" }, "harness|arc:challenge|0": { "acc,none": 0.5034129692832765, "acc_stderr,none": 0.014611050403244084, "acc_norm,none": 0.5238907849829352, "acc_norm_stderr,none": 0.014594701798071654, "alias": "arc_challenge" }, "harness|truthfulqa:mc1|0": { "acc,none": 0.3659730722154223, "acc_stderr,none": 0.016862941684088383, "alias": "truthfulqa_mc1" }, "harness|truthfulqa:mc2|0": { "acc,none": 0.5191254909055326, "acc_stderr,none": 0.015156374303657972, "alias": "truthfulqa_mc2" }, "harness|mmlu|0": { "acc,none": 0.6071072496795328, "acc_stderr,none": 0.003928160330453562, "alias": "mmlu" }, "harness|mmlu_humanities|0": { "alias": " - humanities", "acc,none": 0.5604675876726887, "acc_stderr,none": 0.006861969378650451 }, "harness|mmlu_formal_logic|0": { "alias": " - formal_logic", "acc,none": 0.4444444444444444, "acc_stderr,none": 0.04444444444444449 }, "harness|mmlu_high_school_european_history|0": { "alias": " - high_school_european_history", "acc,none": 0.7090909090909091, "acc_stderr,none": 0.03546563019624336 }, "harness|mmlu_high_school_us_history|0": { "alias": " - high_school_us_history", "acc,none": 0.8137254901960784, "acc_stderr,none": 0.027325470966716333 }, "harness|mmlu_high_school_world_history|0": { "alias": " - high_school_world_history", "acc,none": 0.8185654008438819, "acc_stderr,none": 0.025085961144579658 }, "harness|mmlu_international_law|0": { "alias": " - international_law", "acc,none": 0.7520661157024794, "acc_stderr,none": 0.03941897526516301 }, "harness|mmlu_jurisprudence|0": { "alias": " - jurisprudence", "acc,none": 0.75, "acc_stderr,none": 0.04186091791394607 }, "harness|mmlu_logical_fallacies|0": { "alias": " - logical_fallacies", "acc,none": 0.6932515337423313, "acc_stderr,none": 0.036230899157241474 }, "harness|mmlu_moral_disputes|0": { "alias": " - moral_disputes", "acc,none": 0.6676300578034682, "acc_stderr,none": 0.025361168749688235 }, "harness|mmlu_moral_scenarios|0": { "alias": " - moral_scenarios", "acc,none": 0.33854748603351953, "acc_stderr,none": 0.01582670009648135 }, "harness|mmlu_philosophy|0": { "alias": " - philosophy", "acc,none": 0.6688102893890675, "acc_stderr,none": 0.026730620728004924 }, "harness|mmlu_prehistory|0": { "alias": " - prehistory", "acc,none": 0.6882716049382716, "acc_stderr,none": 0.025773111169630433 }, "harness|mmlu_professional_law|0": { "alias": " - professional_law", "acc,none": 0.47392438070404175, "acc_stderr,none": 0.012752858346533133 }, "harness|mmlu_world_religions|0": { "alias": " - world_religions", "acc,none": 0.7426900584795322, "acc_stderr,none": 0.03352799844161865 }, "harness|mmlu_other|0": { "alias": " - other", "acc,none": 0.6810428065658192, "acc_stderr,none": 0.00813434488979903 }, "harness|mmlu_business_ethics|0": { "alias": " - business_ethics", "acc,none": 0.67, "acc_stderr,none": 0.04725815626252607 }, "harness|mmlu_clinical_knowledge|0": { "alias": " - clinical_knowledge", "acc,none": 0.6754716981132075, "acc_stderr,none": 0.028815615713432118 }, "harness|mmlu_college_medicine|0": { "alias": " - college_medicine", "acc,none": 0.630057803468208, "acc_stderr,none": 0.0368122963339432 }, "harness|mmlu_global_facts|0": { "alias": " - global_facts", "acc,none": 0.42, "acc_stderr,none": 0.049604496374885836 }, "harness|mmlu_human_aging|0": { "alias": " - human_aging", "acc,none": 0.6367713004484304, "acc_stderr,none": 0.032277904428505 }, "harness|mmlu_management|0": { "alias": " - management", "acc,none": 0.7766990291262136, "acc_stderr,none": 0.04123553189891431 }, "harness|mmlu_marketing|0": { "alias": " - marketing", "acc,none": 0.8675213675213675, "acc_stderr,none": 0.022209309073165606 }, "harness|mmlu_medical_genetics|0": { "alias": " - medical_genetics", "acc,none": 0.76, "acc_stderr,none": 0.04292346959909283 }, "harness|mmlu_miscellaneous|0": { "alias": " - miscellaneous", "acc,none": 0.7701149425287356, "acc_stderr,none": 0.015046301846691826 }, "harness|mmlu_nutrition|0": { "alias": " - nutrition", "acc,none": 0.6862745098039216, "acc_stderr,none": 0.026568921015457166 }, "harness|mmlu_professional_accounting|0": { "alias": " - professional_accounting", "acc,none": 0.4858156028368794, "acc_stderr,none": 0.02981549448368206 }, "harness|mmlu_professional_medicine|0": { "alias": " - professional_medicine", "acc,none": 0.6691176470588235, "acc_stderr,none": 0.028582709753898428 }, "harness|mmlu_virology|0": { "alias": " - virology", "acc,none": 0.5180722891566265, "acc_stderr,none": 0.03889951252827217 }, "harness|mmlu_social_sciences|0": { "alias": " - social_sciences", "acc,none": 0.7000324991875203, "acc_stderr,none": 0.00808605616864146 }, "harness|mmlu_econometrics|0": { "alias": " - econometrics", "acc,none": 0.4298245614035088, "acc_stderr,none": 0.04657047260594963 }, "harness|mmlu_high_school_geography|0": { "alias": " - high_school_geography", "acc,none": 0.7474747474747475, "acc_stderr,none": 0.030954055470365914 }, "harness|mmlu_high_school_government_and_politics|0": { "alias": " - high_school_government_and_politics", "acc,none": 0.8341968911917098, "acc_stderr,none": 0.026839845022314415 }, "harness|mmlu_high_school_macroeconomics|0": { "alias": " - high_school_macroeconomics", "acc,none": 0.5871794871794872, "acc_stderr,none": 0.024962683564331803 }, "harness|mmlu_high_school_microeconomics|0": { "alias": " - high_school_microeconomics", "acc,none": 0.6386554621848739, "acc_stderr,none": 0.031204691225150016 }, "harness|mmlu_high_school_psychology|0": { "alias": " - high_school_psychology", "acc,none": 0.7871559633027523, "acc_stderr,none": 0.017549376389313694 }, "harness|mmlu_human_sexuality|0": { "alias": " - human_sexuality", "acc,none": 0.7404580152671756, "acc_stderr,none": 0.03844876139785271 }, "harness|mmlu_professional_psychology|0": { "alias": " - professional_psychology", "acc,none": 0.6388888888888888, "acc_stderr,none": 0.019431775677037317 }, "harness|mmlu_public_relations|0": { "alias": " - public_relations", "acc,none": 0.6818181818181818, "acc_stderr,none": 0.044612721759105085 }, "harness|mmlu_security_studies|0": { "alias": " - security_studies", "acc,none": 0.7142857142857143, "acc_stderr,none": 0.02892058322067558 }, "harness|mmlu_sociology|0": { "alias": " - sociology", "acc,none": 0.8159203980099502, "acc_stderr,none": 0.027403859410786838 }, "harness|mmlu_us_foreign_policy|0": { "alias": " - us_foreign_policy", "acc,none": 0.84, "acc_stderr,none": 0.03684529491774709 }, "harness|mmlu_stem|0": { "alias": " - stem", "acc,none": 0.513162067871868, "acc_stderr,none": 0.008641503326837477 }, "harness|mmlu_abstract_algebra|0": { "alias": " - abstract_algebra", "acc,none": 0.34, "acc_stderr,none": 0.04760952285695236 }, "harness|mmlu_anatomy|0": { "alias": " - anatomy", "acc,none": 0.6222222222222222, "acc_stderr,none": 0.04188307537595853 }, "harness|mmlu_astronomy|0": { "alias": " - astronomy", "acc,none": 0.6447368421052632, "acc_stderr,none": 0.03894734487013316 }, "harness|mmlu_college_biology|0": { "alias": " - college_biology", "acc,none": 0.6944444444444444, "acc_stderr,none": 0.03852084696008534 }, "harness|mmlu_college_chemistry|0": { "alias": " - college_chemistry", "acc,none": 0.33, "acc_stderr,none": 0.047258156262526045 }, "harness|mmlu_college_computer_science|0": { "alias": " - college_computer_science", "acc,none": 0.47, "acc_stderr,none": 0.05016135580465919 }, "harness|mmlu_college_mathematics|0": { "alias": " - college_mathematics", "acc,none": 0.31, "acc_stderr,none": 0.04648231987117316 }, "harness|mmlu_college_physics|0": { "alias": " - college_physics", "acc,none": 0.43137254901960786, "acc_stderr,none": 0.04928099597287534 }, "harness|mmlu_computer_security|0": { "alias": " - computer_security", "acc,none": 0.72, "acc_stderr,none": 0.04512608598542128 }, "harness|mmlu_conceptual_physics|0": { "alias": " - conceptual_physics", "acc,none": 0.48936170212765956, "acc_stderr,none": 0.03267862331014063 }, "harness|mmlu_electrical_engineering|0": { "alias": " - electrical_engineering", "acc,none": 0.6344827586206897, "acc_stderr,none": 0.04013124195424385 }, "harness|mmlu_elementary_mathematics|0": { "alias": " - elementary_mathematics", "acc,none": 0.42328042328042326, "acc_stderr,none": 0.025446365634406783 }, "harness|mmlu_high_school_biology|0": { "alias": " - high_school_biology", "acc,none": 0.7129032258064516, "acc_stderr,none": 0.02573654274559452 }, "harness|mmlu_high_school_chemistry|0": { "alias": " - high_school_chemistry", "acc,none": 0.4827586206896552, "acc_stderr,none": 0.035158955511657 }, "harness|mmlu_high_school_computer_science|0": { "alias": " - high_school_computer_science", "acc,none": 0.63, "acc_stderr,none": 0.04852365870939099 }, "harness|mmlu_high_school_mathematics|0": { "alias": " - high_school_mathematics", "acc,none": 0.3814814814814815, "acc_stderr,none": 0.029616718927497593 }, "harness|mmlu_high_school_physics|0": { "alias": " - high_school_physics", "acc,none": 0.45695364238410596, "acc_stderr,none": 0.04067325174247443 }, "harness|mmlu_high_school_statistics|0": { "alias": " - high_school_statistics", "acc,none": 0.4861111111111111, "acc_stderr,none": 0.03408655867977749 }, "harness|mmlu_machine_learning|0": { "alias": " - machine_learning", "acc,none": 0.4375, "acc_stderr,none": 0.04708567521880525 } }, "task_info": { "model": "ISTA-DASLab/Llama-3-8B-Instruct-GPTQ-4bit", "revision": "main", "private": false, "params": 5.74, "architectures": "LlamaForCausalLM", "quant_type": "GPTQ", "precision": "4bit", "model_params": 7.04, "model_size": 5.74, "weight_dtype": "int4", "compute_dtype": "float16", "gguf_ftype": "*Q4_0.gguf", "hardware": "gpu", "status": "Waiting", "submitted_time": "2024-05-16T08:11:55Z", "model_type": "quantization", "job_id": -1, "job_start_time": null, "scripts": "ITREX" }, "quantization_config": { "bits": 4, "checkpoint_format": "gptq", "damp_percent": 0.01, "desc_act": true, "exllama_config": { "version": 2 }, "group_size": 128, "model_file_base_name": null, "model_name_or_path": null, "quant_method": "gptq", "static_groups": false, "sym": true, "true_sequential": true, "use_exllama": true }, "versions": { "harness|piqa|0": 1.0, "harness|hellaswag|0": 1.0, "harness|arc:easy|0": 1.0, "harness|winogrande|0": 1.0, "harness|openbookqa|0": 1.0, "harness|boolq|0": 2.0, "harness|lambada:openai|0": 1.0, "harness|arc:challenge|0": 1.0, "harness|truthfulqa:mc1|0": 2.0, "harness|truthfulqa:mc2|0": 2.0, "harness|mmlu|0": null, "harness|mmlu_humanities|0": null, "harness|mmlu_formal_logic|0": 0.0, "harness|mmlu_high_school_european_history|0": 0.0, "harness|mmlu_high_school_us_history|0": 0.0, "harness|mmlu_high_school_world_history|0": 0.0, "harness|mmlu_international_law|0": 0.0, "harness|mmlu_jurisprudence|0": 0.0, "harness|mmlu_logical_fallacies|0": 0.0, "harness|mmlu_moral_disputes|0": 0.0, "harness|mmlu_moral_scenarios|0": 0.0, "harness|mmlu_philosophy|0": 0.0, "harness|mmlu_prehistory|0": 0.0, "harness|mmlu_professional_law|0": 0.0, "harness|mmlu_world_religions|0": 0.0, "harness|mmlu_other|0": null, "harness|mmlu_business_ethics|0": 0.0, "harness|mmlu_clinical_knowledge|0": 0.0, "harness|mmlu_college_medicine|0": 0.0, "harness|mmlu_global_facts|0": 0.0, "harness|mmlu_human_aging|0": 0.0, "harness|mmlu_management|0": 0.0, "harness|mmlu_marketing|0": 0.0, "harness|mmlu_medical_genetics|0": 0.0, "harness|mmlu_miscellaneous|0": 0.0, "harness|mmlu_nutrition|0": 0.0, "harness|mmlu_professional_accounting|0": 0.0, "harness|mmlu_professional_medicine|0": 0.0, "harness|mmlu_virology|0": 0.0, "harness|mmlu_social_sciences|0": null, "harness|mmlu_econometrics|0": 0.0, "harness|mmlu_high_school_geography|0": 0.0, "harness|mmlu_high_school_government_and_politics|0": 0.0, "harness|mmlu_high_school_macroeconomics|0": 0.0, "harness|mmlu_high_school_microeconomics|0": 0.0, "harness|mmlu_high_school_psychology|0": 0.0, "harness|mmlu_human_sexuality|0": 0.0, "harness|mmlu_professional_psychology|0": 0.0, "harness|mmlu_public_relations|0": 0.0, "harness|mmlu_security_studies|0": 0.0, "harness|mmlu_sociology|0": 0.0, "harness|mmlu_us_foreign_policy|0": 0.0, "harness|mmlu_stem|0": null, "harness|mmlu_abstract_algebra|0": 0.0, "harness|mmlu_anatomy|0": 0.0, "harness|mmlu_astronomy|0": 0.0, "harness|mmlu_college_biology|0": 0.0, "harness|mmlu_college_chemistry|0": 0.0, "harness|mmlu_college_computer_science|0": 0.0, "harness|mmlu_college_mathematics|0": 0.0, "harness|mmlu_college_physics|0": 0.0, "harness|mmlu_computer_security|0": 0.0, "harness|mmlu_conceptual_physics|0": 0.0, "harness|mmlu_electrical_engineering|0": 0.0, "harness|mmlu_elementary_mathematics|0": 0.0, "harness|mmlu_high_school_biology|0": 0.0, "harness|mmlu_high_school_chemistry|0": 0.0, "harness|mmlu_high_school_computer_science|0": 0.0, "harness|mmlu_high_school_mathematics|0": 0.0, "harness|mmlu_high_school_physics|0": 0.0, "harness|mmlu_high_school_statistics|0": 0.0, "harness|mmlu_machine_learning|0": 0.0 }, "n-shot": { "arc_challenge": 0, "arc_easy": 0, "boolq": 0, "hellaswag": 0, "lambada_openai": 0, "mmlu": 0, "mmlu_abstract_algebra": 0, "mmlu_anatomy": 0, "mmlu_astronomy": 0, "mmlu_business_ethics": 0, "mmlu_clinical_knowledge": 0, "mmlu_college_biology": 0, "mmlu_college_chemistry": 0, "mmlu_college_computer_science": 0, "mmlu_college_mathematics": 0, "mmlu_college_medicine": 0, "mmlu_college_physics": 0, "mmlu_computer_security": 0, "mmlu_conceptual_physics": 0, "mmlu_econometrics": 0, "mmlu_electrical_engineering": 0, "mmlu_elementary_mathematics": 0, "mmlu_formal_logic": 0, "mmlu_global_facts": 0, "mmlu_high_school_biology": 0, "mmlu_high_school_chemistry": 0, "mmlu_high_school_computer_science": 0, "mmlu_high_school_european_history": 0, "mmlu_high_school_geography": 0, "mmlu_high_school_government_and_politics": 0, "mmlu_high_school_macroeconomics": 0, "mmlu_high_school_mathematics": 0, "mmlu_high_school_microeconomics": 0, "mmlu_high_school_physics": 0, "mmlu_high_school_psychology": 0, "mmlu_high_school_statistics": 0, "mmlu_high_school_us_history": 0, "mmlu_high_school_world_history": 0, "mmlu_human_aging": 0, "mmlu_human_sexuality": 0, "mmlu_humanities": 0, "mmlu_international_law": 0, "mmlu_jurisprudence": 0, "mmlu_logical_fallacies": 0, "mmlu_machine_learning": 0, "mmlu_management": 0, "mmlu_marketing": 0, "mmlu_medical_genetics": 0, "mmlu_miscellaneous": 0, "mmlu_moral_disputes": 0, "mmlu_moral_scenarios": 0, "mmlu_nutrition": 0, "mmlu_other": 0, "mmlu_philosophy": 0, "mmlu_prehistory": 0, "mmlu_professional_accounting": 0, "mmlu_professional_law": 0, "mmlu_professional_medicine": 0, "mmlu_professional_psychology": 0, "mmlu_public_relations": 0, "mmlu_security_studies": 0, "mmlu_social_sciences": 0, "mmlu_sociology": 0, "mmlu_stem": 0, "mmlu_us_foreign_policy": 0, "mmlu_virology": 0, "mmlu_world_religions": 0, "openbookqa": 0, "piqa": 0, "truthfulqa_mc1": 0, "truthfulqa_mc2": 0, "winogrande": 0 }, "date": 1716167855.9616175, "config": { "model": "hf", "model_args": "pretrained=ISTA-DASLab/Llama-3-8B-Instruct-GPTQ-4bit,trust_remote_code=True,dtype=float16,_commit_hash=main", "batch_size": 2, "batch_sizes": [], "device": "cuda", "use_cache": null, "limit": null, "bootstrap_iters": 100000, "gen_kwargs": null } }