{ "config_general": { "lighteval_sha": "no", "num_few_shot_default": null, "num_fewshot_seeds": null, "override_batch_size": null, "max_samples": null, "job_id": -1, "start_time": null, "end_time": "2024-05-07-09-41-53", "total_evaluation_time_secondes": "", "model_name": "Qwen/Qwen1.5-7B-Chat-GGUF", "model_sha": "", "model_dtype": "4bit", "model_size": 4.505956352, "model_params": 7.721324544, "quant_type": "llama.cpp", "precision": "4bit" }, "results": { "harness|openbookqa|0": { "acc,none": 0.3, "acc_stderr,none": 0.020514426225628043, "acc_norm,none": 0.396, "acc_norm_stderr,none": 0.021893529941665817, "alias": "openbookqa" }, "harness|piqa|0": { "acc,none": 0.7399347116430903, "acc_stderr,none": 0.01023489324906131, "acc_norm,none": 0.7393906420021763, "acc_norm_stderr,none": 0.010241826155811618, "alias": "piqa" }, "harness|truthfulqa:mc2|0": { "acc,none": 0.5445532573203143, "acc_stderr,none": 0.01620516610689256, "alias": "truthfulqa_mc2" }, "harness|winogrande|0": { "acc,none": 0.6393054459352802, "acc_stderr,none": 0.01349606439423403, "alias": "winogrande" }, "harness|truthfulqa:mc1|0": { "acc,none": 0.3537331701346389, "acc_stderr,none": 0.016737814358846147, "alias": "truthfulqa_mc1" }, "harness|arc:challenge|0": { "acc,none": 0.4104095563139932, "acc_stderr,none": 0.014374922192642662, "acc_norm,none": 0.4232081911262799, "acc_norm_stderr,none": 0.014438036220848025, "alias": "arc_challenge" }, "harness|hellaswag|0": { "acc,none": 0.5438159729137622, "acc_stderr,none": 0.004970585328297622, "acc_norm,none": 0.6963752240589524, "acc_norm_stderr,none": 0.00458882795877513, "alias": "hellaswag" }, "harness|arc:easy|0": { "acc,none": 0.6136363636363636, "acc_stderr,none": 0.009991296778159622, "acc_norm,none": 0.6056397306397306, "acc_norm_stderr,none": 0.010028176038393002, "alias": "arc_easy" }, "harness|boolq|0": { "acc,none": 0.7981651376146789, "acc_stderr,none": 0.00701999832474464, "alias": "boolq" }, "harness|lambada:openai|0": { "perplexity,none": 8.629176527841318, "perplexity_stderr,none": 0.42869315340212266, "acc,none": 0.4244129633223365, "acc_stderr,none": 0.006885918770006381, "alias": "lambada_openai" }, "harness|mmlu|0": { "acc,none": 0.5183734510753454, "acc_stderr,none": 0.00400817537653083, "alias": "mmlu" }, "harness|mmlu_humanities|0": { "alias": " - humanities", "acc,none": 0.4359192348565356, "acc_stderr,none": 0.006934954327448632 }, "harness|mmlu_formal_logic|0": { "alias": " - formal_logic", "acc,none": 0.3968253968253968, "acc_stderr,none": 0.0437588849272706 }, "harness|mmlu_high_school_european_history|0": { "alias": " - high_school_european_history", "acc,none": 0.3333333333333333, "acc_stderr,none": 0.03681050869161549 }, "harness|mmlu_high_school_us_history|0": { "alias": " - high_school_us_history", "acc,none": 0.37254901960784315, "acc_stderr,none": 0.03393388584958406 }, "harness|mmlu_high_school_world_history|0": { "alias": " - high_school_world_history", "acc,none": 0.2742616033755274, "acc_stderr,none": 0.029041333510598046 }, "harness|mmlu_international_law|0": { "alias": " - international_law", "acc,none": 0.6528925619834711, "acc_stderr,none": 0.043457245702925335 }, "harness|mmlu_jurisprudence|0": { "alias": " - jurisprudence", "acc,none": 0.7777777777777778, "acc_stderr,none": 0.040191074725573483 }, "harness|mmlu_logical_fallacies|0": { "alias": " - logical_fallacies", "acc,none": 0.6134969325153374, "acc_stderr,none": 0.03825825548848607 }, "harness|mmlu_moral_disputes|0": { "alias": " - moral_disputes", "acc,none": 0.5086705202312138, "acc_stderr,none": 0.02691504735536981 }, "harness|mmlu_moral_scenarios|0": { "alias": " - moral_scenarios", "acc,none": 0.3754189944134078, "acc_stderr,none": 0.01619510424846353 }, "harness|mmlu_philosophy|0": { "alias": " - philosophy", "acc,none": 0.639871382636656, "acc_stderr,none": 0.027264297599804015 }, "harness|mmlu_prehistory|0": { "alias": " - prehistory", "acc,none": 0.6234567901234568, "acc_stderr,none": 0.026959344518747787 }, "harness|mmlu_professional_law|0": { "alias": " - professional_law", "acc,none": 0.32790091264667537, "acc_stderr,none": 0.01198993664066653 }, "harness|mmlu_world_religions|0": { "alias": " - world_religions", "acc,none": 0.7368421052631579, "acc_stderr,none": 0.03377310252209205 }, "harness|mmlu_other|0": { "alias": " - other", "acc,none": 0.5616350177019633, "acc_stderr,none": 0.008371099697022879 }, "harness|mmlu_business_ethics|0": { "alias": " - business_ethics", "acc,none": 0.61, "acc_stderr,none": 0.04902071300001974 }, "harness|mmlu_clinical_knowledge|0": { "alias": " - clinical_knowledge", "acc,none": 0.5962264150943396, "acc_stderr,none": 0.03019761160019795 }, "harness|mmlu_college_medicine|0": { "alias": " - college_medicine", "acc,none": 0.4624277456647399, "acc_stderr,none": 0.0380168510452446 }, "harness|mmlu_global_facts|0": { "alias": " - global_facts", "acc,none": 0.37, "acc_stderr,none": 0.04852365870939099 }, "harness|mmlu_human_aging|0": { "alias": " - human_aging", "acc,none": 0.5336322869955157, "acc_stderr,none": 0.033481800170603065 }, "harness|mmlu_management|0": { "alias": " - management", "acc,none": 0.7669902912621359, "acc_stderr,none": 0.04185832598928315 }, "harness|mmlu_marketing|0": { "alias": " - marketing", "acc,none": 0.8205128205128205, "acc_stderr,none": 0.02514093595033544 }, "harness|mmlu_medical_genetics|0": { "alias": " - medical_genetics", "acc,none": 0.54, "acc_stderr,none": 0.05009082659620332 }, "harness|mmlu_miscellaneous|0": { "alias": " - miscellaneous", "acc,none": 0.6998722860791826, "acc_stderr,none": 0.016389249691317425 }, "harness|mmlu_nutrition|0": { "alias": " - nutrition", "acc,none": 0.5915032679738562, "acc_stderr,none": 0.028146405993096358 }, "harness|mmlu_professional_accounting|0": { "alias": " - professional_accounting", "acc,none": 0.39361702127659576, "acc_stderr,none": 0.029144544781596154 }, "harness|mmlu_professional_medicine|0": { "alias": " - professional_medicine", "acc,none": 0.17647058823529413, "acc_stderr,none": 0.023157468308559366 }, "harness|mmlu_virology|0": { "alias": " - virology", "acc,none": 0.463855421686747, "acc_stderr,none": 0.03882310850890594 }, "harness|mmlu_social_sciences|0": { "alias": " - social_sciences", "acc,none": 0.6301592460188495, "acc_stderr,none": 0.008418371310872776 }, "harness|mmlu_econometrics|0": { "alias": " - econometrics", "acc,none": 0.34210526315789475, "acc_stderr,none": 0.04462917535336936 }, "harness|mmlu_high_school_geography|0": { "alias": " - high_school_geography", "acc,none": 0.7727272727272727, "acc_stderr,none": 0.029857515673386414 }, "harness|mmlu_high_school_government_and_politics|0": { "alias": " - high_school_government_and_politics", "acc,none": 0.7823834196891192, "acc_stderr,none": 0.02977866303775295 }, "harness|mmlu_high_school_macroeconomics|0": { "alias": " - high_school_macroeconomics", "acc,none": 0.5846153846153846, "acc_stderr,none": 0.02498535492310233 }, "harness|mmlu_high_school_microeconomics|0": { "alias": " - high_school_microeconomics", "acc,none": 0.6302521008403361, "acc_stderr,none": 0.03135709599613591 }, "harness|mmlu_high_school_psychology|0": { "alias": " - high_school_psychology", "acc,none": 0.7761467889908257, "acc_stderr,none": 0.017871217767790236 }, "harness|mmlu_human_sexuality|0": { "alias": " - human_sexuality", "acc,none": 0.6717557251908397, "acc_stderr,none": 0.04118438565806299 }, "harness|mmlu_professional_psychology|0": { "alias": " - professional_psychology", "acc,none": 0.4820261437908497, "acc_stderr,none": 0.020214761037872408 }, "harness|mmlu_public_relations|0": { "alias": " - public_relations", "acc,none": 0.5818181818181818, "acc_stderr,none": 0.04724577405731572 }, "harness|mmlu_security_studies|0": { "alias": " - security_studies", "acc,none": 0.5428571428571428, "acc_stderr,none": 0.031891418324213966 }, "harness|mmlu_sociology|0": { "alias": " - sociology", "acc,none": 0.6865671641791045, "acc_stderr,none": 0.032801882053486435 }, "harness|mmlu_us_foreign_policy|0": { "alias": " - us_foreign_policy", "acc,none": 0.77, "acc_stderr,none": 0.04229525846816505 }, "harness|mmlu_stem|0": { "alias": " - stem", "acc,none": 0.48969235648588644, "acc_stderr,none": 0.008718408268467998 }, "harness|mmlu_abstract_algebra|0": { "alias": " - abstract_algebra", "acc,none": 0.42, "acc_stderr,none": 0.04960449637488584 }, "harness|mmlu_anatomy|0": { "alias": " - anatomy", "acc,none": 0.4888888888888889, "acc_stderr,none": 0.04318275491977976 }, "harness|mmlu_astronomy|0": { "alias": " - astronomy", "acc,none": 0.5855263157894737, "acc_stderr,none": 0.04008973785779206 }, "harness|mmlu_college_biology|0": { "alias": " - college_biology", "acc,none": 0.5694444444444444, "acc_stderr,none": 0.04140685639111503 }, "harness|mmlu_college_chemistry|0": { "alias": " - college_chemistry", "acc,none": 0.4, "acc_stderr,none": 0.04923659639173309 }, "harness|mmlu_college_computer_science|0": { "alias": " - college_computer_science", "acc,none": 0.41, "acc_stderr,none": 0.04943110704237101 }, "harness|mmlu_college_mathematics|0": { "alias": " - college_mathematics", "acc,none": 0.33, "acc_stderr,none": 0.04725815626252604 }, "harness|mmlu_college_physics|0": { "alias": " - college_physics", "acc,none": 0.4019607843137255, "acc_stderr,none": 0.04878608714466996 }, "harness|mmlu_computer_security|0": { "alias": " - computer_security", "acc,none": 0.73, "acc_stderr,none": 0.044619604333847394 }, "harness|mmlu_conceptual_physics|0": { "alias": " - conceptual_physics", "acc,none": 0.5531914893617021, "acc_stderr,none": 0.0325005368436584 }, "harness|mmlu_electrical_engineering|0": { "alias": " - electrical_engineering", "acc,none": 0.5310344827586206, "acc_stderr,none": 0.04158632762097828 }, "harness|mmlu_elementary_mathematics|0": { "alias": " - elementary_mathematics", "acc,none": 0.4523809523809524, "acc_stderr,none": 0.025634258115554958 }, "harness|mmlu_high_school_biology|0": { "alias": " - high_school_biology", "acc,none": 0.6709677419354839, "acc_stderr,none": 0.026729499068349958 }, "harness|mmlu_high_school_chemistry|0": { "alias": " - high_school_chemistry", "acc,none": 0.5073891625615764, "acc_stderr,none": 0.035176035403610105 }, "harness|mmlu_high_school_computer_science|0": { "alias": " - high_school_computer_science", "acc,none": 0.6, "acc_stderr,none": 0.04923659639173309 }, "harness|mmlu_high_school_mathematics|0": { "alias": " - high_school_mathematics", "acc,none": 0.3333333333333333, "acc_stderr,none": 0.028742040903948496 }, "harness|mmlu_high_school_physics|0": { "alias": " - high_school_physics", "acc,none": 0.3509933774834437, "acc_stderr,none": 0.03896981964257375 }, "harness|mmlu_high_school_statistics|0": { "alias": " - high_school_statistics", "acc,none": 0.4351851851851852, "acc_stderr,none": 0.03381200005643525 }, "harness|mmlu_machine_learning|0": { "alias": " - machine_learning", "acc,none": 0.45535714285714285, "acc_stderr,none": 0.04726835553719099 } }, "task_info": { "model": "Qwen/Qwen1.5-7B-Chat-GGUF", "revision": "main", "private": false, "params": 28.0, "architectures": "?", "quant_type": "llama.cpp", "precision": "4bit", "model_params": 56.0, "model_size": 28.0, "weight_dtype": "int4", "compute_dtype": "float16", "gguf_ftype": "*q4_0.gguf", "hardware": "cpu", "status": "Pending", "submitted_time": "2024-05-01T16:23:43Z", "model_type": "quantization", "job_id": -1, "job_start_time": null, "scripts": "llama_cpp" }, "quantization_config": { "quant_method": "llama.cpp", "ftype": "*q4_0.gguf" }, "versions": { "harness|openbookqa|0": 1.0, "harness|piqa|0": 1.0, "harness|truthfulqa:mc2|0": 2.0, "harness|winogrande|0": 1.0, "harness|truthfulqa:mc1|0": 2.0, "harness|arc:challenge|0": 1.0, "harness|hellaswag|0": 1.0, "harness|arc:easy|0": 1.0, "harness|boolq|0": 2.0, "harness|lambada:openai|0": 1.0, "harness|mmlu|0": null, "harness|mmlu_humanities|0": null, "harness|mmlu_formal_logic|0": 0.0, "harness|mmlu_high_school_european_history|0": 0.0, "harness|mmlu_high_school_us_history|0": 0.0, "harness|mmlu_high_school_world_history|0": 0.0, "harness|mmlu_international_law|0": 0.0, "harness|mmlu_jurisprudence|0": 0.0, "harness|mmlu_logical_fallacies|0": 0.0, "harness|mmlu_moral_disputes|0": 0.0, "harness|mmlu_moral_scenarios|0": 0.0, "harness|mmlu_philosophy|0": 0.0, "harness|mmlu_prehistory|0": 0.0, "harness|mmlu_professional_law|0": 0.0, "harness|mmlu_world_religions|0": 0.0, "harness|mmlu_other|0": null, "harness|mmlu_business_ethics|0": 0.0, "harness|mmlu_clinical_knowledge|0": 0.0, "harness|mmlu_college_medicine|0": 0.0, "harness|mmlu_global_facts|0": 0.0, "harness|mmlu_human_aging|0": 0.0, "harness|mmlu_management|0": 0.0, "harness|mmlu_marketing|0": 0.0, "harness|mmlu_medical_genetics|0": 0.0, "harness|mmlu_miscellaneous|0": 0.0, "harness|mmlu_nutrition|0": 0.0, "harness|mmlu_professional_accounting|0": 0.0, "harness|mmlu_professional_medicine|0": 0.0, "harness|mmlu_virology|0": 0.0, "harness|mmlu_social_sciences|0": null, "harness|mmlu_econometrics|0": 0.0, "harness|mmlu_high_school_geography|0": 0.0, "harness|mmlu_high_school_government_and_politics|0": 0.0, "harness|mmlu_high_school_macroeconomics|0": 0.0, "harness|mmlu_high_school_microeconomics|0": 0.0, "harness|mmlu_high_school_psychology|0": 0.0, "harness|mmlu_human_sexuality|0": 0.0, "harness|mmlu_professional_psychology|0": 0.0, "harness|mmlu_public_relations|0": 0.0, "harness|mmlu_security_studies|0": 0.0, "harness|mmlu_sociology|0": 0.0, "harness|mmlu_us_foreign_policy|0": 0.0, "harness|mmlu_stem|0": null, "harness|mmlu_abstract_algebra|0": 0.0, "harness|mmlu_anatomy|0": 0.0, "harness|mmlu_astronomy|0": 0.0, "harness|mmlu_college_biology|0": 0.0, "harness|mmlu_college_chemistry|0": 0.0, "harness|mmlu_college_computer_science|0": 0.0, "harness|mmlu_college_mathematics|0": 0.0, "harness|mmlu_college_physics|0": 0.0, "harness|mmlu_computer_security|0": 0.0, "harness|mmlu_conceptual_physics|0": 0.0, "harness|mmlu_electrical_engineering|0": 0.0, "harness|mmlu_elementary_mathematics|0": 0.0, "harness|mmlu_high_school_biology|0": 0.0, "harness|mmlu_high_school_chemistry|0": 0.0, "harness|mmlu_high_school_computer_science|0": 0.0, "harness|mmlu_high_school_mathematics|0": 0.0, "harness|mmlu_high_school_physics|0": 0.0, "harness|mmlu_high_school_statistics|0": 0.0, "harness|mmlu_machine_learning|0": 0.0 }, "n-shot": { "arc_challenge": 0, "arc_easy": 0, "boolq": 0, "hellaswag": 0, "lambada_openai": 0, "mmlu": 0, "mmlu_abstract_algebra": 0, "mmlu_anatomy": 0, "mmlu_astronomy": 0, "mmlu_business_ethics": 0, "mmlu_clinical_knowledge": 0, "mmlu_college_biology": 0, "mmlu_college_chemistry": 0, "mmlu_college_computer_science": 0, "mmlu_college_mathematics": 0, "mmlu_college_medicine": 0, "mmlu_college_physics": 0, "mmlu_computer_security": 0, "mmlu_conceptual_physics": 0, "mmlu_econometrics": 0, "mmlu_electrical_engineering": 0, "mmlu_elementary_mathematics": 0, "mmlu_formal_logic": 0, "mmlu_global_facts": 0, "mmlu_high_school_biology": 0, "mmlu_high_school_chemistry": 0, "mmlu_high_school_computer_science": 0, "mmlu_high_school_european_history": 0, "mmlu_high_school_geography": 0, "mmlu_high_school_government_and_politics": 0, "mmlu_high_school_macroeconomics": 0, "mmlu_high_school_mathematics": 0, "mmlu_high_school_microeconomics": 0, "mmlu_high_school_physics": 0, "mmlu_high_school_psychology": 0, "mmlu_high_school_statistics": 0, "mmlu_high_school_us_history": 0, "mmlu_high_school_world_history": 0, "mmlu_human_aging": 0, "mmlu_human_sexuality": 0, "mmlu_humanities": 0, "mmlu_international_law": 0, "mmlu_jurisprudence": 0, "mmlu_logical_fallacies": 0, "mmlu_machine_learning": 0, "mmlu_management": 0, "mmlu_marketing": 0, "mmlu_medical_genetics": 0, "mmlu_miscellaneous": 0, "mmlu_moral_disputes": 0, "mmlu_moral_scenarios": 0, "mmlu_nutrition": 0, "mmlu_other": 0, "mmlu_philosophy": 0, "mmlu_prehistory": 0, "mmlu_professional_accounting": 0, "mmlu_professional_law": 0, "mmlu_professional_medicine": 0, "mmlu_professional_psychology": 0, "mmlu_public_relations": 0, "mmlu_security_studies": 0, "mmlu_social_sciences": 0, "mmlu_sociology": 0, "mmlu_stem": 0, "mmlu_us_foreign_policy": 0, "mmlu_virology": 0, "mmlu_world_religions": 0, "openbookqa": 0, "piqa": 0, "truthfulqa_mc1": 0, "truthfulqa_mc2": 0, "winogrande": 0 }, "date": 1714965259.0617445, "config": { "model": "WrapperGGUFLM", "model_args": "gguf_model=Qwen/Qwen1.5-7B-Chat-GGUF,ftype=*q4_0.gguf,dtype=float16,_commit_hash=main", "batch_size": 1, "batch_sizes": [], "device": "cuda", "use_cache": null, "limit": null, "bootstrap_iters": 100000, "gen_kwargs": null } }